Source: lib/hls/hls_parser.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.hls.HlsParser');
  7. goog.require('goog.Uri');
  8. goog.require('goog.asserts');
  9. goog.require('shaka.abr.Ewma');
  10. goog.require('shaka.drm.DrmUtils');
  11. goog.require('shaka.drm.FairPlay');
  12. goog.require('shaka.drm.PlayReady');
  13. goog.require('shaka.hls.Attribute');
  14. goog.require('shaka.hls.ManifestTextParser');
  15. goog.require('shaka.hls.Playlist');
  16. goog.require('shaka.hls.PlaylistType');
  17. goog.require('shaka.hls.Tag');
  18. goog.require('shaka.hls.Utils');
  19. goog.require('shaka.log');
  20. goog.require('shaka.media.InitSegmentReference');
  21. goog.require('shaka.media.ManifestParser');
  22. goog.require('shaka.media.PresentationTimeline');
  23. goog.require('shaka.media.QualityObserver');
  24. goog.require('shaka.media.SegmentIndex');
  25. goog.require('shaka.media.SegmentReference');
  26. goog.require('shaka.media.SegmentUtils');
  27. goog.require('shaka.net.DataUriPlugin');
  28. goog.require('shaka.net.NetworkingEngine');
  29. goog.require('shaka.net.NetworkingEngine.PendingRequest');
  30. goog.require('shaka.util.ArrayUtils');
  31. goog.require('shaka.util.BufferUtils');
  32. goog.require('shaka.util.ContentSteeringManager');
  33. goog.require('shaka.util.Error');
  34. goog.require('shaka.util.EventManager');
  35. goog.require('shaka.util.FakeEvent');
  36. goog.require('shaka.util.LanguageUtils');
  37. goog.require('shaka.util.ManifestParserUtils');
  38. goog.require('shaka.util.MimeUtils');
  39. goog.require('shaka.util.Networking');
  40. goog.require('shaka.util.OperationManager');
  41. goog.require('shaka.util.Pssh');
  42. goog.require('shaka.util.Timer');
  43. goog.require('shaka.util.TsParser');
  44. goog.require('shaka.util.TXml');
  45. goog.require('shaka.util.StreamUtils');
  46. goog.require('shaka.util.Uint8ArrayUtils');
  47. goog.requireType('shaka.hls.Segment');
  48. /**
  49. * HLS parser.
  50. *
  51. * @implements {shaka.extern.ManifestParser}
  52. * @export
  53. */
  54. shaka.hls.HlsParser = class {
  55. /**
  56. * Creates an Hls Parser object.
  57. */
  58. constructor() {
  59. /** @private {?shaka.extern.ManifestParser.PlayerInterface} */
  60. this.playerInterface_ = null;
  61. /** @private {?shaka.extern.ManifestConfiguration} */
  62. this.config_ = null;
  63. /** @private {number} */
  64. this.globalId_ = 1;
  65. /** @private {!Map<string, string>} */
  66. this.globalVariables_ = new Map();
  67. /**
  68. * A map from group id to stream infos created from the media tags.
  69. * @private {!Map<string, !Array<?shaka.hls.HlsParser.StreamInfo>>}
  70. */
  71. this.groupIdToStreamInfosMap_ = new Map();
  72. /**
  73. * For media playlist lazy-loading to work in livestreams, we have to assume
  74. * that each stream of a type (video, audio, etc) has the same mappings of
  75. * sequence number to start time.
  76. * This map stores those relationships.
  77. * Only used during livestreams; we do not assume that VOD content is
  78. * aligned in that way.
  79. * @private {!Map<string, !Map<number, number>>}
  80. */
  81. this.mediaSequenceToStartTimeByType_ = new Map();
  82. // Set initial maps.
  83. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  84. this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map());
  85. this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map());
  86. this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map());
  87. this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map());
  88. /** @private {!Map<string, shaka.hls.HlsParser.DrmParser_>} */
  89. this.keyFormatsToDrmParsers_ = new Map()
  90. .set('com.apple.streamingkeydelivery',
  91. (tag, type, ref) => this.fairplayDrmParser_(tag, type, ref))
  92. .set('urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed',
  93. (tag, type, ref) => this.widevineDrmParser_(tag, type, ref))
  94. .set('com.microsoft.playready',
  95. (tag, type, ref) => this.playreadyDrmParser_(tag, type, ref))
  96. .set('urn:uuid:3d5e6d35-9b9a-41e8-b843-dd3c6e72c42c',
  97. (tag, type, ref) => this.wiseplayDrmParser_(tag, type, ref));
  98. /**
  99. * The values are strings of the form "<VIDEO URI> - <AUDIO URI>",
  100. * where the URIs are the verbatim media playlist URIs as they appeared in
  101. * the master playlist.
  102. *
  103. * Used to avoid duplicates that vary only in their text stream.
  104. *
  105. * @private {!Set<string>}
  106. */
  107. this.variantUriSet_ = new Set();
  108. /**
  109. * A map from (verbatim) media playlist URI to stream infos representing the
  110. * playlists.
  111. *
  112. * On update, used to iterate through and update from media playlists.
  113. *
  114. * On initial parse, used to iterate through and determine minimum
  115. * timestamps, offsets, and to handle TS rollover.
  116. *
  117. * During parsing, used to avoid duplicates in the async methods
  118. * createStreamInfoFromMediaTags_, createStreamInfoFromImageTag_ and
  119. * createStreamInfoFromVariantTags_.
  120. *
  121. * @private {!Map<string, shaka.hls.HlsParser.StreamInfo>}
  122. */
  123. this.uriToStreamInfosMap_ = new Map();
  124. /** @private {?shaka.media.PresentationTimeline} */
  125. this.presentationTimeline_ = null;
  126. /**
  127. * The master playlist URI, after redirects.
  128. *
  129. * @private {string}
  130. */
  131. this.masterPlaylistUri_ = '';
  132. /** @private {shaka.hls.ManifestTextParser} */
  133. this.manifestTextParser_ = new shaka.hls.ManifestTextParser();
  134. /**
  135. * The minimum sequence number for generated segments, when ignoring
  136. * EXT-X-PROGRAM-DATE-TIME.
  137. *
  138. * @private {number}
  139. */
  140. this.minSequenceNumber_ = -1;
  141. /**
  142. * The lowest time value for any of the streams, as defined by the
  143. * EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970.
  144. *
  145. * @private {number}
  146. */
  147. this.lowestSyncTime_ = Infinity;
  148. /**
  149. * Flag to indicate if any of the media playlists use
  150. * EXT-X-PROGRAM-DATE-TIME.
  151. *
  152. * @private {boolean}
  153. */
  154. this.usesProgramDateTime_ = false;
  155. /**
  156. * Whether the streams have previously been "finalized"; that is to say,
  157. * whether we have loaded enough streams to know information about the asset
  158. * such as timing information, live status, etc.
  159. *
  160. * @private {boolean}
  161. */
  162. this.streamsFinalized_ = false;
  163. /**
  164. * Whether the manifest informs about the codec to use.
  165. *
  166. * @private
  167. */
  168. this.codecInfoInManifest_ = false;
  169. /**
  170. * This timer is used to trigger the start of a manifest update. A manifest
  171. * update is async. Once the update is finished, the timer will be restarted
  172. * to trigger the next update. The timer will only be started if the content
  173. * is live content.
  174. *
  175. * @private {shaka.util.Timer}
  176. */
  177. this.updatePlaylistTimer_ = new shaka.util.Timer(() => {
  178. if (this.mediaElement_ && !this.config_.continueLoadingWhenPaused) {
  179. this.eventManager_.unlisten(this.mediaElement_, 'timeupdate');
  180. if (this.mediaElement_.paused) {
  181. this.eventManager_.listenOnce(
  182. this.mediaElement_, 'timeupdate', () => this.onUpdate_());
  183. return;
  184. }
  185. }
  186. this.onUpdate_();
  187. });
  188. /** @private {shaka.hls.HlsParser.PresentationType_} */
  189. this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD;
  190. /** @private {?shaka.extern.Manifest} */
  191. this.manifest_ = null;
  192. /** @private {number} */
  193. this.maxTargetDuration_ = 0;
  194. /** @private {number} */
  195. this.lastTargetDuration_ = Infinity;
  196. /**
  197. * Partial segments target duration.
  198. * @private {number}
  199. */
  200. this.partialTargetDuration_ = 0;
  201. /** @private {number} */
  202. this.presentationDelay_ = 0;
  203. /** @private {number} */
  204. this.lowLatencyPresentationDelay_ = 0;
  205. /** @private {shaka.util.OperationManager} */
  206. this.operationManager_ = new shaka.util.OperationManager();
  207. /**
  208. * A map from closed captions' group id, to a map of closed captions info.
  209. * {group id -> {closed captions channel id -> language}}
  210. * @private {Map<string, Map<string, string>>}
  211. */
  212. this.groupIdToClosedCaptionsMap_ = new Map();
  213. /** @private {Map<string, string>} */
  214. this.groupIdToCodecsMap_ = new Map();
  215. /**
  216. * A cache mapping EXT-X-MAP tag info to the InitSegmentReference created
  217. * from the tag.
  218. * The key is a string combining the EXT-X-MAP tag's absolute uri, and
  219. * its BYTERANGE if available.
  220. * @private {!Map<string, !shaka.media.InitSegmentReference>}
  221. */
  222. this.mapTagToInitSegmentRefMap_ = new Map();
  223. /** @private {Map<string, !shaka.extern.aesKey>} */
  224. this.aesKeyInfoMap_ = new Map();
  225. /** @private {Map<string, !Promise<shaka.extern.Response>>} */
  226. this.aesKeyMap_ = new Map();
  227. /** @private {Map<string, !Promise<shaka.extern.Response>>} */
  228. this.identityKeyMap_ = new Map();
  229. /** @private {Map<!shaka.media.InitSegmentReference, ?string>} */
  230. this.initSegmentToKidMap_ = new Map();
  231. /** @private {boolean} */
  232. this.lowLatencyMode_ = false;
  233. /** @private {boolean} */
  234. this.lowLatencyByterangeOptimization_ = false;
  235. /**
  236. * An ewma that tracks how long updates take.
  237. * This is to mitigate issues caused by slow parsing on embedded devices.
  238. * @private {!shaka.abr.Ewma}
  239. */
  240. this.averageUpdateDuration_ = new shaka.abr.Ewma(5);
  241. /** @private {?shaka.util.ContentSteeringManager} */
  242. this.contentSteeringManager_ = null;
  243. /** @private {boolean} */
  244. this.needsClosedCaptionsDetection_ = true;
  245. /** @private {Set<string>} */
  246. this.dateRangeIdsEmitted_ = new Set();
  247. /** @private {shaka.util.EventManager} */
  248. this.eventManager_ = new shaka.util.EventManager();
  249. /** @private {HTMLMediaElement} */
  250. this.mediaElement_ = null;
  251. /** @private {?number} */
  252. this.startTime_ = null;
  253. /** @private {function():boolean} */
  254. this.isPreloadFn_ = () => false;
  255. }
  256. /**
  257. * @param {shaka.extern.ManifestConfiguration} config
  258. * @param {(function():boolean)=} isPreloadFn
  259. * @override
  260. * @exportInterface
  261. */
  262. configure(config, isPreloadFn) {
  263. const needFireUpdate = this.playerInterface_ &&
  264. config.updatePeriod != this.config_.updatePeriod &&
  265. config.updatePeriod >= 0;
  266. this.config_ = config;
  267. if (isPreloadFn) {
  268. this.isPreloadFn_ = isPreloadFn;
  269. }
  270. if (this.contentSteeringManager_) {
  271. this.contentSteeringManager_.configure(this.config_);
  272. }
  273. if (needFireUpdate && this.manifest_ &&
  274. this.manifest_.presentationTimeline.isLive()) {
  275. this.updatePlaylistTimer_.tickNow();
  276. }
  277. }
  278. /**
  279. * @override
  280. * @exportInterface
  281. */
  282. async start(uri, playerInterface) {
  283. goog.asserts.assert(this.config_, 'Must call configure() before start()!');
  284. this.playerInterface_ = playerInterface;
  285. this.lowLatencyMode_ = playerInterface.isLowLatencyMode();
  286. const response = await this.requestManifest_([uri]).promise;
  287. // Record the master playlist URI after redirects.
  288. this.masterPlaylistUri_ = response.uri;
  289. goog.asserts.assert(response.data, 'Response data should be non-null!');
  290. await this.parseManifest_(response.data);
  291. goog.asserts.assert(this.manifest_, 'Manifest should be non-null');
  292. return this.manifest_;
  293. }
  294. /**
  295. * @override
  296. * @exportInterface
  297. */
  298. stop() {
  299. // Make sure we don't update the manifest again. Even if the timer is not
  300. // running, this is safe to call.
  301. if (this.updatePlaylistTimer_) {
  302. this.updatePlaylistTimer_.stop();
  303. this.updatePlaylistTimer_ = null;
  304. }
  305. /** @type {!Array<!Promise>} */
  306. const pending = [];
  307. if (this.operationManager_) {
  308. pending.push(this.operationManager_.destroy());
  309. this.operationManager_ = null;
  310. }
  311. this.playerInterface_ = null;
  312. this.config_ = null;
  313. this.variantUriSet_.clear();
  314. this.manifest_ = null;
  315. this.uriToStreamInfosMap_.clear();
  316. this.groupIdToStreamInfosMap_.clear();
  317. this.groupIdToCodecsMap_.clear();
  318. this.globalVariables_.clear();
  319. this.mapTagToInitSegmentRefMap_.clear();
  320. this.aesKeyInfoMap_.clear();
  321. this.aesKeyMap_.clear();
  322. this.identityKeyMap_.clear();
  323. this.initSegmentToKidMap_.clear();
  324. this.dateRangeIdsEmitted_.clear();
  325. if (this.contentSteeringManager_) {
  326. this.contentSteeringManager_.destroy();
  327. }
  328. if (this.eventManager_) {
  329. this.eventManager_.release();
  330. this.eventManager_ = null;
  331. }
  332. return Promise.all(pending);
  333. }
  334. /**
  335. * @override
  336. * @exportInterface
  337. */
  338. async update() {
  339. if (!this.isLive_()) {
  340. return;
  341. }
  342. /** @type {!Array<!Promise>} */
  343. const updates = [];
  344. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  345. // This is necessary to calculate correctly the update time.
  346. this.lastTargetDuration_ = Infinity;
  347. this.manifest_.gapCount = 0;
  348. // Only update active streams.
  349. const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex);
  350. for (const streamInfo of activeStreamInfos) {
  351. updates.push(this.updateStream_(streamInfo));
  352. }
  353. await Promise.all(updates);
  354. // Now that streams have been updated, notify the presentation timeline.
  355. this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream));
  356. // If any hasEndList is false, the stream is still live.
  357. const stillLive = activeStreamInfos.some((s) => s.hasEndList == false);
  358. if (activeStreamInfos.length && !stillLive) {
  359. // Convert the presentation to VOD and set the duration.
  360. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  361. this.setPresentationType_(PresentationType.VOD);
  362. // The duration is the minimum of the end times of all active streams.
  363. // Non-active streams are not guaranteed to have useful maxTimestamp
  364. // values, due to the lazy-loading system, so they are ignored.
  365. const maxTimestamps = activeStreamInfos.map((s) => s.maxTimestamp);
  366. // The duration is the minimum of the end times of all streams.
  367. this.presentationTimeline_.setDuration(Math.min(...maxTimestamps));
  368. this.playerInterface_.updateDuration();
  369. }
  370. if (stillLive) {
  371. this.determineDuration_();
  372. }
  373. // Check if any playlist does not have the first reference (due to a
  374. // problem in the live encoder for example), and disable the stream if
  375. // necessary.
  376. for (const streamInfo of activeStreamInfos) {
  377. if (!streamInfo.stream.isAudioMuxedInVideo &&
  378. streamInfo.stream.segmentIndex &&
  379. !streamInfo.stream.segmentIndex.earliestReference()) {
  380. this.playerInterface_.disableStream(streamInfo.stream);
  381. }
  382. }
  383. }
  384. /**
  385. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  386. * @return {!Map<number, number>}
  387. * @private
  388. */
  389. getMediaSequenceToStartTimeFor_(streamInfo) {
  390. if (this.isLive_()) {
  391. return this.mediaSequenceToStartTimeByType_.get(streamInfo.type);
  392. } else {
  393. return streamInfo.mediaSequenceToStartTime;
  394. }
  395. }
  396. /**
  397. * Updates a stream.
  398. *
  399. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  400. * @return {!Promise}
  401. * @private
  402. */
  403. async updateStream_(streamInfo) {
  404. if (streamInfo.stream.isAudioMuxedInVideo) {
  405. return;
  406. }
  407. const manifestUris = [];
  408. for (const uri of streamInfo.getUris()) {
  409. const uriObj = new goog.Uri(uri);
  410. const queryData = uriObj.getQueryData();
  411. if (streamInfo.canBlockReload) {
  412. if (streamInfo.nextMediaSequence >= 0) {
  413. // Indicates that the server must hold the request until a Playlist
  414. // contains a Media Segment with Media Sequence
  415. queryData.add('_HLS_msn', String(streamInfo.nextMediaSequence));
  416. }
  417. if (streamInfo.nextPart >= 0) {
  418. // Indicates, in combination with _HLS_msn, that the server must hold
  419. // the request until a Playlist contains Partial Segment N of Media
  420. // Sequence Number M or later.
  421. queryData.add('_HLS_part', String(streamInfo.nextPart));
  422. }
  423. }
  424. if (streamInfo.canSkipSegments) {
  425. // Enable delta updates. This will replace older segments with
  426. // 'EXT-X-SKIP' tag in the media playlist.
  427. queryData.add('_HLS_skip', 'YES');
  428. }
  429. if (queryData.getCount()) {
  430. uriObj.setQueryData(queryData.toDecodedString());
  431. }
  432. manifestUris.push(uriObj.toString());
  433. }
  434. let response;
  435. try {
  436. response = await this.requestManifest_(
  437. manifestUris, /* isPlaylist= */ true).promise;
  438. } catch (e) {
  439. if (this.playerInterface_) {
  440. this.playerInterface_.disableStream(streamInfo.stream);
  441. }
  442. throw e;
  443. }
  444. if (!streamInfo.stream.segmentIndex) {
  445. // The stream was closed since the update was first requested.
  446. return;
  447. }
  448. /** @type {shaka.hls.Playlist} */
  449. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  450. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  451. throw new shaka.util.Error(
  452. shaka.util.Error.Severity.CRITICAL,
  453. shaka.util.Error.Category.MANIFEST,
  454. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  455. }
  456. // Record the final URI after redirects.
  457. const responseUri = response.uri;
  458. if (responseUri != response.originalUri &&
  459. !streamInfo.getUris().includes(responseUri)) {
  460. streamInfo.redirectUris.push(responseUri);
  461. }
  462. /** @type {!Array<!shaka.hls.Tag>} */
  463. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  464. 'EXT-X-DEFINE');
  465. const mediaVariables = this.parseMediaVariables_(
  466. variablesTags, responseUri);
  467. const stream = streamInfo.stream;
  468. const mediaSequenceToStartTime =
  469. this.getMediaSequenceToStartTimeFor_(streamInfo);
  470. const {keyIds, drmInfos, encrypted, aesEncrypted} =
  471. await this.parseDrmInfo_(playlist, stream.mimeType,
  472. streamInfo.getUris, mediaVariables);
  473. if (!stream.encrypted && encrypted && !aesEncrypted) {
  474. stream.encrypted = true;
  475. }
  476. const keysAreEqual =
  477. (a, b) => a.size === b.size && [...a].every((value) => b.has(value));
  478. if (!keysAreEqual(stream.keyIds, keyIds)) {
  479. stream.keyIds = keyIds;
  480. stream.drmInfos = drmInfos;
  481. this.playerInterface_.newDrmInfo(stream);
  482. }
  483. const {segments, bandwidth} = this.createSegments_(
  484. playlist, mediaSequenceToStartTime, mediaVariables,
  485. streamInfo.getUris, streamInfo.type);
  486. if (bandwidth) {
  487. stream.bandwidth = bandwidth;
  488. }
  489. const qualityInfo =
  490. shaka.media.QualityObserver.createQualityInfo(stream);
  491. for (const segment of segments) {
  492. if (segment.initSegmentReference) {
  493. segment.initSegmentReference.mediaQuality = qualityInfo;
  494. }
  495. }
  496. stream.segmentIndex.mergeAndEvict(
  497. segments, this.presentationTimeline_.getSegmentAvailabilityStart());
  498. if (segments.length) {
  499. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  500. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  501. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  502. playlist.tags, 'EXT-X-SKIP');
  503. const skippedSegments =
  504. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  505. const {nextMediaSequence, nextPart} =
  506. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  507. streamInfo.nextMediaSequence = nextMediaSequence + skippedSegments;
  508. streamInfo.nextPart = nextPart;
  509. const playlistStartTime = mediaSequenceToStartTime.get(
  510. mediaSequenceNumber);
  511. stream.segmentIndex.evict(playlistStartTime);
  512. }
  513. const oldSegment = stream.segmentIndex.earliestReference();
  514. if (oldSegment) {
  515. streamInfo.minTimestamp = oldSegment.startTime;
  516. const newestSegment = segments[segments.length - 1];
  517. goog.asserts.assert(newestSegment, 'Should have segments!');
  518. streamInfo.maxTimestamp = newestSegment.endTime;
  519. }
  520. // Once the last segment has been added to the playlist,
  521. // #EXT-X-ENDLIST tag will be appended.
  522. // If that happened, treat the rest of the EVENT presentation as VOD.
  523. const endListTag =
  524. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  525. if (endListTag) {
  526. // Flag this for later. We don't convert the whole presentation into VOD
  527. // until we've seen the ENDLIST tag for all active playlists.
  528. streamInfo.hasEndList = true;
  529. }
  530. this.determineLastTargetDuration_(playlist);
  531. this.processDateRangeTags_(
  532. playlist.tags, stream.type, mediaVariables, streamInfo.getUris);
  533. }
  534. /**
  535. * @override
  536. * @exportInterface
  537. */
  538. onExpirationUpdated(sessionId, expiration) {
  539. // No-op
  540. }
  541. /**
  542. * @override
  543. * @exportInterface
  544. */
  545. onInitialVariantChosen(variant) {
  546. // No-op
  547. }
  548. /**
  549. * @override
  550. * @exportInterface
  551. */
  552. banLocation(uri) {
  553. if (this.contentSteeringManager_) {
  554. this.contentSteeringManager_.banLocation(uri);
  555. }
  556. }
  557. /**
  558. * @override
  559. * @exportInterface
  560. */
  561. setMediaElement(mediaElement) {
  562. this.mediaElement_ = mediaElement;
  563. }
  564. /**
  565. * Align the streams by sequence number by dropping early segments. Then
  566. * offset the streams to begin at presentation time 0.
  567. * @param {!Array<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  568. * @param {boolean=} force
  569. * @private
  570. */
  571. syncStreamsWithSequenceNumber_(streamInfos, force = false) {
  572. // We assume that, when this is first called, we have enough info to
  573. // determine how to use the program date times (e.g. we have both a video
  574. // and an audio, and all other videos and audios match those).
  575. // Thus, we only need to calculate this once.
  576. const updateMinSequenceNumber = this.minSequenceNumber_ == -1;
  577. // Sync using media sequence number. Find the highest starting sequence
  578. // number among all streams. Later, we will drop any references to
  579. // earlier segments in other streams, then offset everything back to 0.
  580. for (const streamInfo of streamInfos) {
  581. const segmentIndex = streamInfo.stream.segmentIndex;
  582. goog.asserts.assert(segmentIndex,
  583. 'Only loaded streams should be synced');
  584. const mediaSequenceToStartTime =
  585. this.getMediaSequenceToStartTimeFor_(streamInfo);
  586. const segment0 = segmentIndex.earliestReference();
  587. if (segment0) {
  588. // This looks inefficient, but iteration order is insertion order.
  589. // So the very first entry should be the one we want.
  590. // We assert that this holds true so that we are alerted by debug
  591. // builds and tests if it changes. We still do a loop, though, so
  592. // that the code functions correctly in production no matter what.
  593. if (goog.DEBUG) {
  594. const firstSequenceStartTime =
  595. mediaSequenceToStartTime.values().next().value;
  596. if (firstSequenceStartTime != segment0.startTime) {
  597. shaka.log.warning(
  598. 'Sequence number map is not ordered as expected!');
  599. }
  600. }
  601. for (const [sequence, start] of mediaSequenceToStartTime) {
  602. if (start == segment0.startTime) {
  603. if (updateMinSequenceNumber) {
  604. this.minSequenceNumber_ = Math.max(
  605. this.minSequenceNumber_, sequence);
  606. }
  607. // Even if we already have decided on a value for
  608. // |this.minSequenceNumber_|, we still need to determine the first
  609. // sequence number for the stream, to offset it in the code below.
  610. streamInfo.firstSequenceNumber = sequence;
  611. break;
  612. }
  613. }
  614. }
  615. }
  616. if (this.minSequenceNumber_ < 0) {
  617. // Nothing to sync.
  618. return;
  619. }
  620. shaka.log.debug('Syncing HLS streams against base sequence number:',
  621. this.minSequenceNumber_);
  622. for (const streamInfo of streamInfos) {
  623. if (!this.ignoreManifestProgramDateTimeFor_(streamInfo.type) && !force) {
  624. continue;
  625. }
  626. const segmentIndex = streamInfo.stream.segmentIndex;
  627. if (segmentIndex) {
  628. // Drop any earlier references.
  629. const numSegmentsToDrop =
  630. this.minSequenceNumber_ - streamInfo.firstSequenceNumber;
  631. if (numSegmentsToDrop > 0) {
  632. segmentIndex.dropFirstReferences(numSegmentsToDrop);
  633. // Now adjust timestamps back to begin at 0.
  634. const segmentN = segmentIndex.earliestReference();
  635. if (segmentN) {
  636. const streamOffset = -segmentN.startTime;
  637. // Modify all SegmentReferences equally.
  638. streamInfo.stream.segmentIndex.offset(streamOffset);
  639. // Update other parts of streamInfo the same way.
  640. this.offsetStreamInfo_(streamInfo, streamOffset);
  641. }
  642. }
  643. }
  644. }
  645. }
  646. /**
  647. * Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their
  648. * segments. Also normalizes segment times so that the earliest segment in
  649. * any stream is at time 0.
  650. * @param {!Array<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  651. * @private
  652. */
  653. syncStreamsWithProgramDateTime_(streamInfos) {
  654. // We assume that, when this is first called, we have enough info to
  655. // determine how to use the program date times (e.g. we have both a video
  656. // and an audio, and all other videos and audios match those).
  657. // Thus, we only need to calculate this once.
  658. if (this.lowestSyncTime_ == Infinity) {
  659. for (const streamInfo of streamInfos) {
  660. const segmentIndex = streamInfo.stream.segmentIndex;
  661. goog.asserts.assert(segmentIndex,
  662. 'Only loaded streams should be synced');
  663. const segment0 = segmentIndex.earliestReference();
  664. if (segment0 != null && segment0.syncTime != null) {
  665. this.lowestSyncTime_ =
  666. Math.min(this.lowestSyncTime_, segment0.syncTime);
  667. }
  668. }
  669. }
  670. const lowestSyncTime = this.lowestSyncTime_;
  671. if (lowestSyncTime == Infinity) {
  672. // Nothing to sync.
  673. return;
  674. }
  675. shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime);
  676. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  677. if (this.ignoreManifestProgramDateTimeFor_(streamInfo.type)) {
  678. continue;
  679. }
  680. const segmentIndex = streamInfo.stream.segmentIndex;
  681. if (segmentIndex != null) {
  682. // A segment's startTime should be based on its syncTime vs the lowest
  683. // syncTime across all streams. The earliest segment sync time from
  684. // any stream will become presentation time 0. If two streams start
  685. // e.g. 6 seconds apart in syncTime, then their first segments will
  686. // also start 6 seconds apart in presentation time.
  687. const segment0 = segmentIndex.earliestReference();
  688. if (!segment0) {
  689. continue;
  690. }
  691. if (segment0.syncTime == null) {
  692. shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream',
  693. streamInfo.getUris(),
  694. 'Expect AV sync issues!');
  695. } else {
  696. // Stream metadata are offset by a fixed amount based on the
  697. // first segment.
  698. const segment0TargetTime = segment0.syncTime - lowestSyncTime;
  699. const streamOffset = segment0TargetTime - segment0.startTime;
  700. this.offsetStreamInfo_(streamInfo, streamOffset);
  701. // This is computed across all segments separately to manage
  702. // accumulated drift in durations.
  703. for (const segment of segmentIndex) {
  704. segment.syncAgainst(lowestSyncTime);
  705. }
  706. }
  707. }
  708. }
  709. }
  710. /**
  711. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  712. * @param {number} offset
  713. * @private
  714. */
  715. offsetStreamInfo_(streamInfo, offset) {
  716. // Due to float compute issue we can have some millisecond issue.
  717. // We don't apply the offset if it's the case.
  718. if (Math.abs(offset) < 0.001) {
  719. return;
  720. }
  721. // Adjust our accounting of the minimum timestamp.
  722. streamInfo.minTimestamp += offset;
  723. // Adjust our accounting of the maximum timestamp.
  724. streamInfo.maxTimestamp += offset;
  725. goog.asserts.assert(streamInfo.maxTimestamp >= 0,
  726. 'Negative maxTimestamp after adjustment!');
  727. // Update our map from sequence number to start time.
  728. const mediaSequenceToStartTime =
  729. this.getMediaSequenceToStartTimeFor_(streamInfo);
  730. for (const [key, value] of mediaSequenceToStartTime) {
  731. mediaSequenceToStartTime.set(key, value + offset);
  732. }
  733. shaka.log.debug('Offset', offset, 'applied to',
  734. streamInfo.getUris());
  735. }
  736. /**
  737. * Parses the manifest.
  738. *
  739. * @param {BufferSource} data
  740. * @return {!Promise}
  741. * @private
  742. */
  743. async parseManifest_(data) {
  744. const Utils = shaka.hls.Utils;
  745. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  746. goog.asserts.assert(this.masterPlaylistUri_,
  747. 'Master playlist URI must be set before calling parseManifest_!');
  748. const playlist = this.manifestTextParser_.parsePlaylist(data);
  749. /** @type {!Array<!shaka.hls.Tag>} */
  750. const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE');
  751. /** @type {!Array<!shaka.extern.Variant>} */
  752. let variants = [];
  753. /** @type {!Array<!shaka.extern.Stream>} */
  754. let textStreams = [];
  755. /** @type {!Array<!shaka.extern.Stream>} */
  756. let imageStreams = [];
  757. // This assert is our own sanity check.
  758. goog.asserts.assert(this.presentationTimeline_ == null,
  759. 'Presentation timeline created early!');
  760. // We don't know if the presentation is VOD or live until we parse at least
  761. // one media playlist, so make a VOD-style presentation timeline for now
  762. // and change the type later if we discover this is live.
  763. // Since the player will load the first variant chosen early in the process,
  764. // there isn't a window during playback where the live-ness is unknown.
  765. this.presentationTimeline_ = new shaka.media.PresentationTimeline(
  766. /* presentationStartTime= */ null, /* delay= */ 0);
  767. this.presentationTimeline_.setStatic(true);
  768. const getUris = () => {
  769. return [this.masterPlaylistUri_];
  770. };
  771. /** @type {?string} */
  772. let mediaPlaylistType = null;
  773. /** @type {!Map<string, string>} */
  774. let mediaVariables = new Map();
  775. // Parsing a media playlist results in a single-variant stream.
  776. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  777. this.needsClosedCaptionsDetection_ = false;
  778. /** @type {!Array<!shaka.hls.Tag>} */
  779. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  780. 'EXT-X-DEFINE');
  781. mediaVariables = this.parseMediaVariables_(
  782. variablesTags, this.masterPlaylistUri_);
  783. // By default we assume it is video, but in a later step the correct type
  784. // is obtained.
  785. mediaPlaylistType = ContentType.VIDEO;
  786. // These values can be obtained later so these default values are good.
  787. const codecs = '';
  788. const languageValue = '';
  789. const channelsCount = null;
  790. const sampleRate = null;
  791. const closedCaptions = new Map();
  792. const spatialAudio = false;
  793. const characteristics = null;
  794. const forced = false; // Only relevant for text.
  795. const primary = true; // This is the only stream!
  796. const name = 'Media Playlist';
  797. // Make the stream info, with those values.
  798. const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  799. this.globalId_++, mediaVariables, playlist, getUris, codecs,
  800. mediaPlaylistType, languageValue, primary, name, channelsCount,
  801. closedCaptions, characteristics, forced, sampleRate, spatialAudio);
  802. this.uriToStreamInfosMap_.set(this.masterPlaylistUri_, streamInfo);
  803. if (streamInfo.stream) {
  804. const qualityInfo =
  805. shaka.media.QualityObserver.createQualityInfo(streamInfo.stream);
  806. streamInfo.stream.segmentIndex.forEachTopLevelReference(
  807. (reference) => {
  808. if (reference.initSegmentReference) {
  809. reference.initSegmentReference.mediaQuality = qualityInfo;
  810. }
  811. });
  812. }
  813. mediaPlaylistType = streamInfo.stream.type;
  814. // Wrap the stream from that stream info with a variant.
  815. variants.push({
  816. id: 0,
  817. language: this.getLanguage_(languageValue),
  818. disabledUntilTime: 0,
  819. primary: true,
  820. audio: mediaPlaylistType == 'audio' ? streamInfo.stream : null,
  821. video: mediaPlaylistType == 'video' ? streamInfo.stream : null,
  822. bandwidth: streamInfo.stream.bandwidth || 0,
  823. allowedByApplication: true,
  824. allowedByKeySystem: true,
  825. decodingInfos: [],
  826. });
  827. } else {
  828. this.parseMasterVariables_(variablesTags);
  829. /** @type {!Array<!shaka.hls.Tag>} */
  830. const mediaTags = Utils.filterTagsByName(
  831. playlist.tags, 'EXT-X-MEDIA');
  832. /** @type {!Array<!shaka.hls.Tag>} */
  833. const variantTags = Utils.filterTagsByName(
  834. playlist.tags, 'EXT-X-STREAM-INF');
  835. /** @type {!Array<!shaka.hls.Tag>} */
  836. const imageTags = Utils.filterTagsByName(
  837. playlist.tags, 'EXT-X-IMAGE-STREAM-INF');
  838. /** @type {!Array<!shaka.hls.Tag>} */
  839. const iFrameTags = Utils.filterTagsByName(
  840. playlist.tags, 'EXT-X-I-FRAME-STREAM-INF');
  841. /** @type {!Array<!shaka.hls.Tag>} */
  842. const sessionKeyTags = Utils.filterTagsByName(
  843. playlist.tags, 'EXT-X-SESSION-KEY');
  844. /** @type {!Array<!shaka.hls.Tag>} */
  845. const sessionDataTags = Utils.filterTagsByName(
  846. playlist.tags, 'EXT-X-SESSION-DATA');
  847. /** @type {!Array<!shaka.hls.Tag>} */
  848. const contentSteeringTags = Utils.filterTagsByName(
  849. playlist.tags, 'EXT-X-CONTENT-STEERING');
  850. this.processSessionData_(sessionDataTags);
  851. await this.processContentSteering_(contentSteeringTags);
  852. if (!this.config_.ignoreSupplementalCodecs) {
  853. // Duplicate variant tags with supplementalCodecs
  854. const newVariantTags = [];
  855. for (const tag of variantTags) {
  856. const supplementalCodecsString =
  857. tag.getAttributeValue('SUPPLEMENTAL-CODECS');
  858. if (!supplementalCodecsString) {
  859. continue;
  860. }
  861. const supplementalCodecs = supplementalCodecsString.split(/\s*,\s*/)
  862. .map((codec) => {
  863. return codec.split('/')[0];
  864. });
  865. const newAttributes = tag.attributes.map((attr) => {
  866. const name = attr.name;
  867. let value = attr.value;
  868. if (name == 'CODECS') {
  869. value = supplementalCodecs.join(',');
  870. const allCodecs = attr.value.split(',');
  871. if (allCodecs.length > 1) {
  872. const audioCodec =
  873. shaka.util.ManifestParserUtils.guessCodecsSafe(
  874. shaka.util.ManifestParserUtils.ContentType.AUDIO,
  875. allCodecs);
  876. if (audioCodec) {
  877. value += ',' + audioCodec;
  878. }
  879. }
  880. }
  881. return new shaka.hls.Attribute(name, value);
  882. });
  883. newVariantTags.push(
  884. new shaka.hls.Tag(tag.id, tag.name, newAttributes, null));
  885. }
  886. variantTags.push(...newVariantTags);
  887. // Duplicate iFrame tags with supplementalCodecs
  888. const newIFrameTags = [];
  889. for (const tag of iFrameTags) {
  890. const supplementalCodecsString =
  891. tag.getAttributeValue('SUPPLEMENTAL-CODECS');
  892. if (!supplementalCodecsString) {
  893. continue;
  894. }
  895. const supplementalCodecs = supplementalCodecsString.split(/\s*,\s*/)
  896. .map((codec) => {
  897. return codec.split('/')[0];
  898. });
  899. const newAttributes = tag.attributes.map((attr) => {
  900. const name = attr.name;
  901. let value = attr.value;
  902. if (name == 'CODECS') {
  903. value = supplementalCodecs.join(',');
  904. }
  905. return new shaka.hls.Attribute(name, value);
  906. });
  907. newIFrameTags.push(
  908. new shaka.hls.Tag(tag.id, tag.name, newAttributes, null));
  909. }
  910. iFrameTags.push(...newIFrameTags);
  911. }
  912. this.parseCodecs_(variantTags);
  913. this.parseClosedCaptions_(mediaTags);
  914. const iFrameStreams = this.parseIFrames_(iFrameTags);
  915. variants = await this.createVariantsForTags_(
  916. variantTags, sessionKeyTags, mediaTags, getUris,
  917. this.globalVariables_, iFrameStreams);
  918. textStreams = this.parseTexts_(mediaTags);
  919. imageStreams = await this.parseImages_(imageTags, iFrameTags);
  920. }
  921. // Make sure that the parser has not been destroyed.
  922. if (!this.playerInterface_) {
  923. throw new shaka.util.Error(
  924. shaka.util.Error.Severity.CRITICAL,
  925. shaka.util.Error.Category.PLAYER,
  926. shaka.util.Error.Code.OPERATION_ABORTED);
  927. }
  928. this.determineStartTime_(playlist);
  929. // Single-variant streams aren't lazy-loaded, so for them we already have
  930. // enough info here to determine the presentation type and duration.
  931. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  932. if (this.isLive_()) {
  933. this.changePresentationTimelineToLive_(playlist);
  934. const delay = this.getUpdatePlaylistDelay_();
  935. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  936. }
  937. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  938. this.finalizeStreams_(streamInfos);
  939. this.determineDuration_();
  940. goog.asserts.assert(mediaPlaylistType,
  941. 'mediaPlaylistType should be non-null');
  942. this.processDateRangeTags_(
  943. playlist.tags, mediaPlaylistType, mediaVariables, getUris);
  944. }
  945. this.manifest_ = {
  946. presentationTimeline: this.presentationTimeline_,
  947. variants,
  948. textStreams,
  949. imageStreams,
  950. offlineSessionIds: [],
  951. sequenceMode: this.config_.hls.sequenceMode,
  952. ignoreManifestTimestampsInSegmentsMode:
  953. this.config_.hls.ignoreManifestTimestampsInSegmentsMode,
  954. type: shaka.media.ManifestParser.HLS,
  955. serviceDescription: null,
  956. nextUrl: null,
  957. periodCount: 1,
  958. gapCount: 0,
  959. isLowLatency: false,
  960. startTime: this.startTime_,
  961. };
  962. // If there is no 'CODECS' attribute in the manifest and codec guessing is
  963. // disabled, we need to create the segment indexes now so that missing info
  964. // can be parsed from the media data and added to the stream objects.
  965. if (!this.codecInfoInManifest_ && this.config_.hls.disableCodecGuessing) {
  966. const createIndexes = [];
  967. for (const variant of this.manifest_.variants) {
  968. if (variant.audio && variant.audio.codecs === '') {
  969. createIndexes.push(variant.audio.createSegmentIndex());
  970. }
  971. if (variant.video && variant.video.codecs === '') {
  972. createIndexes.push(variant.video.createSegmentIndex());
  973. }
  974. }
  975. await Promise.all(createIndexes);
  976. }
  977. this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_);
  978. }
  979. /**
  980. * @param {!Array<!shaka.media.SegmentReference>} segments
  981. * @return {!Promise<shaka.media.SegmentUtils.BasicInfo>}
  982. * @private
  983. */
  984. async getBasicInfoFromSegments_(segments) {
  985. const HlsParser = shaka.hls.HlsParser;
  986. const defaultBasicInfo = shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  987. this.config_.hls.mediaPlaylistFullMimeType);
  988. if (!segments.length) {
  989. return defaultBasicInfo;
  990. }
  991. const {segment, segmentIndex} = this.getAvailableSegment_(segments);
  992. const segmentUris = segment.getUris();
  993. const segmentUri = segmentUris[0];
  994. const parsedUri = new goog.Uri(segmentUri);
  995. const extension = parsedUri.getPath().split('.').pop();
  996. const rawMimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_.get(extension);
  997. if (rawMimeType) {
  998. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  999. rawMimeType);
  1000. }
  1001. const basicInfos = await Promise.all([
  1002. this.getInfoFromSegment_(segment.initSegmentReference, 0),
  1003. this.getInfoFromSegment_(segment, segmentIndex),
  1004. ]);
  1005. const contentMimeType = basicInfos[1].mimeType;
  1006. const initData = basicInfos[0].data;
  1007. const data = basicInfos[1].data;
  1008. const validMp4Extensions = [
  1009. 'mp4',
  1010. 'mp4a',
  1011. 'm4s',
  1012. 'm4i',
  1013. 'm4a',
  1014. 'm4f',
  1015. 'cmfa',
  1016. 'mp4v',
  1017. 'm4v',
  1018. 'cmfv',
  1019. 'fmp4',
  1020. ];
  1021. const validMp4MimeType = [
  1022. 'audio/mp4',
  1023. 'video/mp4',
  1024. 'video/iso.segment',
  1025. ];
  1026. if (shaka.util.TsParser.probe(
  1027. shaka.util.BufferUtils.toUint8(data))) {
  1028. const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromTs(
  1029. data, this.config_.disableAudio, this.config_.disableVideo,
  1030. this.config_.disableText);
  1031. if (basicInfo) {
  1032. return basicInfo;
  1033. }
  1034. } else if (segment.initSegmentReference ||
  1035. validMp4Extensions.includes(extension) ||
  1036. validMp4MimeType.includes(contentMimeType)) {
  1037. const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromMp4(
  1038. initData, data, this.config_.disableText);
  1039. if (basicInfo) {
  1040. return basicInfo;
  1041. }
  1042. }
  1043. if (contentMimeType) {
  1044. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  1045. contentMimeType);
  1046. }
  1047. return defaultBasicInfo;
  1048. }
  1049. /**
  1050. * @param {?shaka.media.AnySegmentReference} segment
  1051. * @param {number} segmentIndex
  1052. * @return {!Promise<{mimeType: ?string, data: ?BufferSource}>}
  1053. * @private
  1054. */
  1055. async getInfoFromSegment_(segment, segmentIndex) {
  1056. if (!segment) {
  1057. return {mimeType: null, data: null};
  1058. }
  1059. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  1060. const segmentRequest = shaka.util.Networking.createSegmentRequest(
  1061. segment.getUris(), segment.getStartByte(), segment.getEndByte(),
  1062. this.config_.retryParameters);
  1063. const type = segment instanceof shaka.media.SegmentReference ?
  1064. shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT :
  1065. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  1066. const response = await this.makeNetworkRequest_(
  1067. segmentRequest, requestType, {type}).promise;
  1068. let data = response.data;
  1069. if (segment.aesKey) {
  1070. data = await shaka.media.SegmentUtils.aesDecrypt(
  1071. data, segment.aesKey, segmentIndex);
  1072. }
  1073. if (segment instanceof shaka.media.SegmentReference) {
  1074. segment.setSegmentData(data, /* singleUse= */ true);
  1075. } else {
  1076. segment.setSegmentData(data);
  1077. }
  1078. let mimeType = response.headers['content-type'];
  1079. if (mimeType) {
  1080. // Split the MIME type in case the server sent additional parameters.
  1081. mimeType = mimeType.split(';')[0].toLowerCase();
  1082. }
  1083. return {mimeType, data};
  1084. }
  1085. /** @private */
  1086. determineDuration_() {
  1087. goog.asserts.assert(this.presentationTimeline_,
  1088. 'Presentation timeline not created!');
  1089. if (this.isLive_()) {
  1090. // The spec says nothing much about seeking in live content, but Safari's
  1091. // built-in HLS implementation does not allow it. Therefore we will set
  1092. // the availability window equal to the presentation delay. The player
  1093. // will be able to buffer ahead three segments, but the seek window will
  1094. // be zero-sized.
  1095. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  1096. if (this.presentationType_ == PresentationType.LIVE) {
  1097. let segmentAvailabilityDuration = this.getLiveDuration_() || 0;
  1098. // The app can override that with a longer duration, to allow seeking.
  1099. if (!isNaN(this.config_.availabilityWindowOverride)) {
  1100. segmentAvailabilityDuration = this.config_.availabilityWindowOverride;
  1101. }
  1102. this.presentationTimeline_.setSegmentAvailabilityDuration(
  1103. segmentAvailabilityDuration);
  1104. }
  1105. } else {
  1106. // Use the minimum duration as the presentation duration.
  1107. this.presentationTimeline_.setDuration(this.getMinDuration_());
  1108. }
  1109. if (!this.presentationTimeline_.isStartTimeLocked()) {
  1110. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  1111. if (!streamInfo.stream.segmentIndex) {
  1112. continue; // Not active.
  1113. }
  1114. if (streamInfo.type != 'audio' && streamInfo.type != 'video') {
  1115. continue;
  1116. }
  1117. const firstReference =
  1118. streamInfo.stream.segmentIndex.earliestReference();
  1119. if (firstReference && firstReference.syncTime) {
  1120. const syncTime = firstReference.syncTime;
  1121. this.presentationTimeline_.setInitialProgramDateTime(syncTime);
  1122. }
  1123. }
  1124. }
  1125. // This is the first point where we have a meaningful presentation start
  1126. // time, and we need to tell PresentationTimeline that so that it can
  1127. // maintain consistency from here on.
  1128. this.presentationTimeline_.lockStartTime();
  1129. // This asserts that the live edge is being calculated from segment times.
  1130. // For VOD and event streams, this check should still pass.
  1131. goog.asserts.assert(
  1132. !this.presentationTimeline_.usingPresentationStartTime(),
  1133. 'We should not be using the presentation start time in HLS!');
  1134. }
  1135. /**
  1136. * Get the variables of each variant tag, and store in a map.
  1137. * @param {!Array<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1138. * @private
  1139. */
  1140. parseMasterVariables_(tags) {
  1141. const queryParams = new goog.Uri(this.masterPlaylistUri_).getQueryData();
  1142. for (const variableTag of tags) {
  1143. const name = variableTag.getAttributeValue('NAME');
  1144. const value = variableTag.getAttributeValue('VALUE');
  1145. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  1146. if (name && value) {
  1147. if (!this.globalVariables_.has(name)) {
  1148. this.globalVariables_.set(name, value);
  1149. }
  1150. }
  1151. if (queryParam) {
  1152. const queryParamValue = queryParams.get(queryParam)[0];
  1153. if (queryParamValue && !this.globalVariables_.has(queryParamValue)) {
  1154. this.globalVariables_.set(queryParam, queryParamValue);
  1155. }
  1156. }
  1157. }
  1158. }
  1159. /**
  1160. * Get the variables of each variant tag, and store in a map.
  1161. * @param {!Array<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1162. * @param {string} uri Media playlist URI.
  1163. * @return {!Map<string, string>}
  1164. * @private
  1165. */
  1166. parseMediaVariables_(tags, uri) {
  1167. const queryParams = new goog.Uri(uri).getQueryData();
  1168. const mediaVariables = new Map();
  1169. for (const variableTag of tags) {
  1170. const name = variableTag.getAttributeValue('NAME');
  1171. const value = variableTag.getAttributeValue('VALUE');
  1172. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  1173. const mediaImport = variableTag.getAttributeValue('IMPORT');
  1174. if (name && value) {
  1175. if (!mediaVariables.has(name)) {
  1176. mediaVariables.set(name, value);
  1177. }
  1178. }
  1179. if (queryParam) {
  1180. const queryParamValue = queryParams.get(queryParam)[0];
  1181. if (queryParamValue && !mediaVariables.has(queryParamValue)) {
  1182. mediaVariables.set(queryParam, queryParamValue);
  1183. }
  1184. }
  1185. if (mediaImport) {
  1186. const globalValue = this.globalVariables_.get(mediaImport);
  1187. if (globalValue) {
  1188. mediaVariables.set(mediaImport, globalValue);
  1189. }
  1190. }
  1191. }
  1192. return mediaVariables;
  1193. }
  1194. /**
  1195. * Get the codecs of each variant tag, and store in a map from
  1196. * audio/video/subtitle group id to the codecs array list.
  1197. * @param {!Array<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1198. * @private
  1199. */
  1200. parseCodecs_(tags) {
  1201. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1202. for (const variantTag of tags) {
  1203. const audioGroupId = variantTag.getAttributeValue('AUDIO');
  1204. const videoGroupId = variantTag.getAttributeValue('VIDEO');
  1205. const subGroupId = variantTag.getAttributeValue('SUBTITLES');
  1206. const allCodecs = this.getCodecsForVariantTag_(variantTag);
  1207. if (subGroupId) {
  1208. const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1209. ContentType.TEXT, allCodecs);
  1210. goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.');
  1211. this.groupIdToCodecsMap_.set(subGroupId, textCodecs);
  1212. shaka.util.ArrayUtils.remove(allCodecs, textCodecs);
  1213. }
  1214. if (audioGroupId) {
  1215. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1216. ContentType.AUDIO, allCodecs);
  1217. if (!codecs) {
  1218. codecs = this.config_.hls.defaultAudioCodec;
  1219. }
  1220. this.groupIdToCodecsMap_.set(audioGroupId, codecs);
  1221. }
  1222. if (videoGroupId) {
  1223. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1224. ContentType.VIDEO, allCodecs);
  1225. if (!codecs) {
  1226. codecs = this.config_.hls.defaultVideoCodec;
  1227. }
  1228. this.groupIdToCodecsMap_.set(videoGroupId, codecs);
  1229. }
  1230. }
  1231. }
  1232. /**
  1233. * Process EXT-X-SESSION-DATA tags.
  1234. *
  1235. * @param {!Array<!shaka.hls.Tag>} tags
  1236. * @private
  1237. */
  1238. processSessionData_(tags) {
  1239. for (const tag of tags) {
  1240. const id = tag.getAttributeValue('DATA-ID');
  1241. const uri = tag.getAttributeValue('URI');
  1242. const language = tag.getAttributeValue('LANGUAGE');
  1243. const value = tag.getAttributeValue('VALUE');
  1244. const data = (new Map()).set('id', id);
  1245. if (uri) {
  1246. data.set('uri', shaka.hls.Utils.constructSegmentUris(
  1247. [this.masterPlaylistUri_], uri, this.globalVariables_)[0]);
  1248. }
  1249. if (language) {
  1250. data.set('language', language);
  1251. }
  1252. if (value) {
  1253. data.set('value', value);
  1254. }
  1255. const event = new shaka.util.FakeEvent('sessiondata', data);
  1256. if (this.playerInterface_) {
  1257. this.playerInterface_.onEvent(event);
  1258. }
  1259. }
  1260. }
  1261. /**
  1262. * Process EXT-X-CONTENT-STEERING tags.
  1263. *
  1264. * @param {!Array<!shaka.hls.Tag>} tags
  1265. * @return {!Promise}
  1266. * @private
  1267. */
  1268. async processContentSteering_(tags) {
  1269. if (!this.playerInterface_ || !this.config_) {
  1270. return;
  1271. }
  1272. let contentSteeringPromise;
  1273. for (const tag of tags) {
  1274. const defaultPathwayId = tag.getAttributeValue('PATHWAY-ID');
  1275. const uri = tag.getAttributeValue('SERVER-URI');
  1276. if (!defaultPathwayId || !uri) {
  1277. continue;
  1278. }
  1279. this.contentSteeringManager_ =
  1280. new shaka.util.ContentSteeringManager(this.playerInterface_);
  1281. this.contentSteeringManager_.configure(this.config_);
  1282. this.contentSteeringManager_.setBaseUris([this.masterPlaylistUri_]);
  1283. this.contentSteeringManager_.setManifestType(
  1284. shaka.media.ManifestParser.HLS);
  1285. this.contentSteeringManager_.setDefaultPathwayId(defaultPathwayId);
  1286. contentSteeringPromise =
  1287. this.contentSteeringManager_.requestInfo(uri);
  1288. break;
  1289. }
  1290. await contentSteeringPromise;
  1291. }
  1292. /**
  1293. * Parse Subtitles and Closed Captions from 'EXT-X-MEDIA' tags.
  1294. * Create text streams for Subtitles, but not Closed Captions.
  1295. *
  1296. * @param {!Array<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1297. * @return {!Array<!shaka.extern.Stream>}
  1298. * @private
  1299. */
  1300. parseTexts_(mediaTags) {
  1301. // Create text stream for each Subtitle media tag.
  1302. const subtitleTags =
  1303. shaka.hls.Utils.filterTagsByType(mediaTags, 'SUBTITLES');
  1304. const textStreams = subtitleTags.map((tag) => {
  1305. const disableText = this.config_.disableText;
  1306. if (disableText) {
  1307. return null;
  1308. }
  1309. try {
  1310. return this.createStreamInfoFromMediaTags_([tag], new Map()).stream;
  1311. } catch (e) {
  1312. if (this.config_.hls.ignoreTextStreamFailures) {
  1313. return null;
  1314. }
  1315. throw e;
  1316. }
  1317. });
  1318. const type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1319. // Set the codecs for text streams.
  1320. for (const tag of subtitleTags) {
  1321. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1322. const codecs = this.groupIdToCodecsMap_.get(groupId);
  1323. if (codecs) {
  1324. const textStreamInfos = this.groupIdToStreamInfosMap_.get(groupId);
  1325. if (textStreamInfos) {
  1326. for (const textStreamInfo of textStreamInfos) {
  1327. textStreamInfo.stream.codecs = codecs;
  1328. textStreamInfo.stream.mimeType =
  1329. this.guessMimeTypeBeforeLoading_(type, codecs) ||
  1330. this.guessMimeTypeFallback_(type);
  1331. this.setFullTypeForStream_(textStreamInfo.stream);
  1332. }
  1333. }
  1334. }
  1335. }
  1336. // Do not create text streams for Closed captions.
  1337. return textStreams.filter((s) => s);
  1338. }
  1339. /**
  1340. * @param {!shaka.extern.Stream} stream
  1341. * @private
  1342. */
  1343. setFullTypeForStream_(stream) {
  1344. const combinations = new Set([shaka.util.MimeUtils.getFullType(
  1345. stream.mimeType, stream.codecs)]);
  1346. if (stream.segmentIndex) {
  1347. stream.segmentIndex.forEachTopLevelReference((reference) => {
  1348. if (reference.mimeType) {
  1349. combinations.add(shaka.util.MimeUtils.getFullType(
  1350. reference.mimeType, stream.codecs));
  1351. }
  1352. });
  1353. }
  1354. stream.fullMimeTypes = combinations;
  1355. }
  1356. /**
  1357. * @param {!Array<!shaka.hls.Tag>} imageTags from the playlist.
  1358. * @param {!Array<!shaka.hls.Tag>} iFrameTags from the playlist.
  1359. * @return {!Promise<!Array<!shaka.extern.Stream>>}
  1360. * @private
  1361. */
  1362. async parseImages_(imageTags, iFrameTags) {
  1363. // Create image stream for each image tag.
  1364. const imageStreamPromises = imageTags.map(async (tag) => {
  1365. const disableThumbnails = this.config_.disableThumbnails;
  1366. if (disableThumbnails) {
  1367. return null;
  1368. }
  1369. try {
  1370. const streamInfo = await this.createStreamInfoFromImageTag_(tag);
  1371. return streamInfo.stream;
  1372. } catch (e) {
  1373. if (this.config_.hls.ignoreImageStreamFailures) {
  1374. return null;
  1375. }
  1376. throw e;
  1377. }
  1378. }).concat(iFrameTags.map((tag) => {
  1379. const disableThumbnails = this.config_.disableThumbnails;
  1380. if (disableThumbnails) {
  1381. return null;
  1382. }
  1383. try {
  1384. const streamInfo = this.createStreamInfoFromIframeTag_(tag);
  1385. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1386. if (streamInfo.stream.type !== ContentType.IMAGE) {
  1387. return null;
  1388. }
  1389. return streamInfo.stream;
  1390. } catch (e) {
  1391. if (this.config_.hls.ignoreImageStreamFailures) {
  1392. return null;
  1393. }
  1394. throw e;
  1395. }
  1396. }));
  1397. const imageStreams = await Promise.all(imageStreamPromises);
  1398. return imageStreams.filter((s) => s);
  1399. }
  1400. /**
  1401. * @param {!Array<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1402. * @param {!Map<string, string>} groupIdPathwayIdMapping
  1403. * @private
  1404. */
  1405. createStreamInfosFromMediaTags_(mediaTags, groupIdPathwayIdMapping) {
  1406. // Filter out subtitles and media tags without uri (except audio).
  1407. mediaTags = mediaTags.filter((tag) => {
  1408. const uri = tag.getAttributeValue('URI') || '';
  1409. const type = tag.getAttributeValue('TYPE');
  1410. return type != 'SUBTITLES' && (uri != '' || type == 'AUDIO');
  1411. });
  1412. const groupedTags = {};
  1413. for (const tag of mediaTags) {
  1414. const key = tag.getTagKey(!this.contentSteeringManager_);
  1415. if (!groupedTags[key]) {
  1416. groupedTags[key] = [tag];
  1417. } else {
  1418. groupedTags[key].push(tag);
  1419. }
  1420. }
  1421. for (const key in groupedTags) {
  1422. // Create stream info for each audio / video media grouped tag.
  1423. this.createStreamInfoFromMediaTags_(
  1424. groupedTags[key], groupIdPathwayIdMapping, /* requireUri= */ false);
  1425. }
  1426. }
  1427. /**
  1428. * @param {!Array<!shaka.hls.Tag>} iFrameTags from the playlist.
  1429. * @return {!Array<!shaka.extern.Stream>}
  1430. * @private
  1431. */
  1432. parseIFrames_(iFrameTags) {
  1433. // Create iFrame stream for each iFrame tag.
  1434. const iFrameStreams = iFrameTags.map((tag) => {
  1435. const streamInfo = this.createStreamInfoFromIframeTag_(tag);
  1436. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1437. if (streamInfo.stream.type !== ContentType.VIDEO) {
  1438. return null;
  1439. }
  1440. return streamInfo.stream;
  1441. });
  1442. // Filter mjpg iFrames
  1443. return iFrameStreams.filter((s) => s);
  1444. }
  1445. /**
  1446. * @param {!Array<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1447. * @param {!Array<!shaka.hls.Tag>} sessionKeyTags EXT-X-SESSION-KEY tags
  1448. * from the playlist.
  1449. * @param {!Array<!shaka.hls.Tag>} mediaTags EXT-X-MEDIA tags from the
  1450. * playlist.
  1451. * @param {function(): !Array<string>} getUris
  1452. * @param {?Map<string, string>} variables
  1453. * @param {!Array<!shaka.extern.Stream>} iFrameStreams
  1454. * @return {!Promise<!Array<!shaka.extern.Variant>>}
  1455. * @private
  1456. */
  1457. async createVariantsForTags_(tags, sessionKeyTags, mediaTags, getUris,
  1458. variables, iFrameStreams) {
  1459. // EXT-X-SESSION-KEY processing
  1460. const drmInfos = [];
  1461. const keyIds = new Set();
  1462. if (!this.config_.ignoreDrmInfo && sessionKeyTags.length > 0) {
  1463. for (const drmTag of sessionKeyTags) {
  1464. const method = drmTag.getRequiredAttrValue('METHOD');
  1465. // According to the HLS spec, KEYFORMAT is optional and implicitly
  1466. // defaults to "identity".
  1467. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  1468. const keyFormat =
  1469. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  1470. let drmInfo = null;
  1471. if (method == 'NONE') {
  1472. continue;
  1473. } else if (this.isAesMethod_(method)) {
  1474. const keyUris = shaka.hls.Utils.constructSegmentUris(
  1475. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  1476. const keyMapKey = keyUris.sort().join('');
  1477. if (!this.aesKeyMap_.has(keyMapKey)) {
  1478. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  1479. const request = shaka.net.NetworkingEngine.makeRequest(
  1480. keyUris, this.config_.retryParameters);
  1481. const keyResponse = this.makeNetworkRequest_(request, requestType)
  1482. .promise;
  1483. this.aesKeyMap_.set(keyMapKey, keyResponse);
  1484. }
  1485. continue;
  1486. } else if (keyFormat == 'identity') {
  1487. // eslint-disable-next-line no-await-in-loop
  1488. drmInfo = await this.identityDrmParser_(
  1489. drmTag, /* mimeType= */ '', getUris,
  1490. /* initSegmentRef= */ null, variables);
  1491. } else {
  1492. const drmParser =
  1493. this.keyFormatsToDrmParsers_.get(keyFormat);
  1494. drmInfo = drmParser ?
  1495. // eslint-disable-next-line no-await-in-loop
  1496. await drmParser(drmTag, /* mimeType= */ '',
  1497. /* initSegmentRef= */ null) : null;
  1498. }
  1499. if (drmInfo) {
  1500. if (drmInfo.keyIds) {
  1501. for (const keyId of drmInfo.keyIds) {
  1502. keyIds.add(keyId);
  1503. }
  1504. }
  1505. drmInfos.push(drmInfo);
  1506. } else {
  1507. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  1508. }
  1509. }
  1510. }
  1511. const groupedTags = {};
  1512. for (const tag of tags) {
  1513. const key = tag.getTagKey(!this.contentSteeringManager_);
  1514. if (!groupedTags[key]) {
  1515. groupedTags[key] = [tag];
  1516. } else {
  1517. groupedTags[key].push(tag);
  1518. }
  1519. }
  1520. const allVariants = [];
  1521. // Create variants for each group of variant tag.
  1522. for (const key in groupedTags) {
  1523. const tags = groupedTags[key];
  1524. const firstTag = tags[0];
  1525. const frameRate = firstTag.getAttributeValue('FRAME-RATE');
  1526. const bandwidth =
  1527. Number(firstTag.getAttributeValue('AVERAGE-BANDWIDTH')) ||
  1528. Number(firstTag.getRequiredAttrValue('BANDWIDTH'));
  1529. const resolution = firstTag.getAttributeValue('RESOLUTION');
  1530. const [width, height] = resolution ? resolution.split('x') : [null, null];
  1531. const videoRange = firstTag.getAttributeValue('VIDEO-RANGE');
  1532. let videoLayout = firstTag.getAttributeValue('REQ-VIDEO-LAYOUT');
  1533. if (videoLayout && videoLayout.includes(',')) {
  1534. // If multiple video layout strings are present, pick the first valid
  1535. // one.
  1536. const layoutStrings = videoLayout.split(',').filter((layoutString) => {
  1537. return layoutString == 'CH-STEREO' || layoutString == 'CH-MONO';
  1538. });
  1539. videoLayout = layoutStrings[0];
  1540. }
  1541. // According to the HLS spec:
  1542. // By default a video variant is monoscopic, so an attribute
  1543. // consisting entirely of REQ-VIDEO-LAYOUT="CH-MONO" is unnecessary
  1544. // and SHOULD NOT be present.
  1545. videoLayout = videoLayout || 'CH-MONO';
  1546. const streamInfos = this.createStreamInfosForVariantTags_(tags,
  1547. mediaTags, resolution, frameRate);
  1548. goog.asserts.assert(streamInfos.audio.length ||
  1549. streamInfos.video.length, 'We should have created a stream!');
  1550. allVariants.push(...this.createVariants_(
  1551. streamInfos.audio,
  1552. streamInfos.video,
  1553. bandwidth,
  1554. width,
  1555. height,
  1556. frameRate,
  1557. videoRange,
  1558. videoLayout,
  1559. drmInfos,
  1560. keyIds,
  1561. iFrameStreams));
  1562. }
  1563. return allVariants.filter((variant) => variant != null);
  1564. }
  1565. /**
  1566. * Create audio and video streamInfos from an 'EXT-X-STREAM-INF' tag and its
  1567. * related media tags.
  1568. *
  1569. * @param {!Array<!shaka.hls.Tag>} tags
  1570. * @param {!Array<!shaka.hls.Tag>} mediaTags
  1571. * @param {?string} resolution
  1572. * @param {?string} frameRate
  1573. * @return {!shaka.hls.HlsParser.StreamInfos}
  1574. * @private
  1575. */
  1576. createStreamInfosForVariantTags_(tags, mediaTags, resolution, frameRate) {
  1577. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1578. /** @type {shaka.hls.HlsParser.StreamInfos} */
  1579. const res = {
  1580. audio: [],
  1581. video: [],
  1582. };
  1583. const groupIdPathwayIdMapping = new Map();
  1584. const globalGroupIds = [];
  1585. let isAudioGroup = false;
  1586. let isVideoGroup = false;
  1587. for (const tag of tags) {
  1588. const audioGroupId = tag.getAttributeValue('AUDIO');
  1589. const videoGroupId = tag.getAttributeValue('VIDEO');
  1590. goog.asserts.assert(audioGroupId == null || videoGroupId == null,
  1591. 'Unexpected: both video and audio described by media tags!');
  1592. const groupId = audioGroupId || videoGroupId;
  1593. if (!groupId) {
  1594. continue;
  1595. }
  1596. if (!globalGroupIds.includes(groupId)) {
  1597. globalGroupIds.push(groupId);
  1598. }
  1599. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  1600. if (pathwayId) {
  1601. groupIdPathwayIdMapping.set(groupId, pathwayId);
  1602. }
  1603. if (audioGroupId) {
  1604. isAudioGroup = true;
  1605. } else if (videoGroupId) {
  1606. isVideoGroup = true;
  1607. }
  1608. // Make an educated guess about the stream type.
  1609. shaka.log.debug('Guessing stream type for', tag.toString());
  1610. }
  1611. if (globalGroupIds.length && mediaTags.length) {
  1612. const mediaTagsForVariant = mediaTags.filter((tag) => {
  1613. return globalGroupIds.includes(tag.getRequiredAttrValue('GROUP-ID'));
  1614. });
  1615. this.createStreamInfosFromMediaTags_(
  1616. mediaTagsForVariant, groupIdPathwayIdMapping);
  1617. }
  1618. const globalGroupId = globalGroupIds.sort().join(',');
  1619. const streamInfos =
  1620. (globalGroupId && this.groupIdToStreamInfosMap_.has(globalGroupId)) ?
  1621. this.groupIdToStreamInfosMap_.get(globalGroupId) : [];
  1622. if (isAudioGroup) {
  1623. res.audio.push(...streamInfos);
  1624. } else if (isVideoGroup) {
  1625. res.video.push(...streamInfos);
  1626. }
  1627. let type;
  1628. let ignoreStream = false;
  1629. // The Microsoft HLS manifest generators will make audio-only variants
  1630. // that link to their URI both directly and through an audio tag.
  1631. // In that case, ignore the local URI and use the version in the
  1632. // AUDIO tag, so you inherit its language.
  1633. // As an example, see the manifest linked in issue #860.
  1634. const allStreamUris = tags.map((tag) => tag.getRequiredAttrValue('URI'));
  1635. const hasSameUri = res.audio.find((audio) => {
  1636. return audio && audio.getUris().find((uri) => {
  1637. return allStreamUris.includes(uri);
  1638. });
  1639. });
  1640. /** @type {!Array<string>} */
  1641. let allCodecs = this.getCodecsForVariantTag_(tags[0]);
  1642. const videoCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1643. ContentType.VIDEO, allCodecs);
  1644. const audioCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1645. ContentType.AUDIO, allCodecs);
  1646. if (audioCodecs && !videoCodecs) {
  1647. // There are no associated media tags, and there's only audio codec,
  1648. // and no video codec, so it should be audio.
  1649. type = ContentType.AUDIO;
  1650. shaka.log.debug('Guessing audio-only.');
  1651. ignoreStream = res.audio.length > 0;
  1652. } else if (!res.audio.length && !res.video.length &&
  1653. audioCodecs && videoCodecs) {
  1654. // There are both audio and video codecs, so assume multiplexed content.
  1655. // Note that the default used when CODECS is missing assumes multiple
  1656. // (and therefore multiplexed).
  1657. // Recombine the codec strings into one so that MediaSource isn't
  1658. // lied to later. (That would trigger an error in Chrome.)
  1659. shaka.log.debug('Guessing multiplexed audio+video.');
  1660. type = ContentType.VIDEO;
  1661. allCodecs = [[videoCodecs, audioCodecs].join(',')];
  1662. } else if (res.audio.length && hasSameUri) {
  1663. shaka.log.debug('Guessing audio-only.');
  1664. type = ContentType.AUDIO;
  1665. ignoreStream = true;
  1666. } else if (res.video.length && !res.audio.length) {
  1667. // There are associated video streams. Assume this is audio.
  1668. shaka.log.debug('Guessing audio-only.');
  1669. type = ContentType.AUDIO;
  1670. } else {
  1671. shaka.log.debug('Guessing video-only.');
  1672. type = ContentType.VIDEO;
  1673. }
  1674. if (!ignoreStream) {
  1675. const streamInfo =
  1676. this.createStreamInfoFromVariantTags_(tags, allCodecs, type);
  1677. if (globalGroupId) {
  1678. streamInfo.stream.groupId = globalGroupId;
  1679. }
  1680. res[streamInfo.stream.type] = [streamInfo];
  1681. }
  1682. return res;
  1683. }
  1684. /**
  1685. * Get the codecs from the 'EXT-X-STREAM-INF' tag.
  1686. *
  1687. * @param {!shaka.hls.Tag} tag
  1688. * @return {!Array<string>} codecs
  1689. * @private
  1690. */
  1691. getCodecsForVariantTag_(tag) {
  1692. let codecsString = tag.getAttributeValue('CODECS') || '';
  1693. this.codecInfoInManifest_ = codecsString.length > 0;
  1694. if (!this.codecInfoInManifest_ && !this.config_.hls.disableCodecGuessing) {
  1695. // These are the default codecs to assume if none are specified.
  1696. const defaultCodecsArray = [];
  1697. if (!this.config_.disableVideo) {
  1698. defaultCodecsArray.push(this.config_.hls.defaultVideoCodec);
  1699. }
  1700. if (!this.config_.disableAudio) {
  1701. defaultCodecsArray.push(this.config_.hls.defaultAudioCodec);
  1702. }
  1703. codecsString = defaultCodecsArray.join(',');
  1704. }
  1705. // Strip out internal whitespace while splitting on commas:
  1706. /** @type {!Array<string>} */
  1707. const codecs = codecsString.split(/\s*,\s*/);
  1708. return shaka.media.SegmentUtils.codecsFiltering(codecs);
  1709. }
  1710. /**
  1711. * Get the channel count information for an HLS audio track.
  1712. * CHANNELS specifies an ordered, "/" separated list of parameters.
  1713. * If the type is audio, the first parameter will be a decimal integer
  1714. * specifying the number of independent, simultaneous audio channels.
  1715. * No other channels parameters are currently defined.
  1716. *
  1717. * @param {!shaka.hls.Tag} tag
  1718. * @return {?number}
  1719. * @private
  1720. */
  1721. getChannelsCount_(tag) {
  1722. const channels = tag.getAttributeValue('CHANNELS');
  1723. if (!channels) {
  1724. return null;
  1725. }
  1726. const channelCountString = channels.split('/')[0];
  1727. const count = parseInt(channelCountString, 10);
  1728. return count;
  1729. }
  1730. /**
  1731. * Get the sample rate information for an HLS audio track.
  1732. *
  1733. * @param {!shaka.hls.Tag} tag
  1734. * @return {?number}
  1735. * @private
  1736. */
  1737. getSampleRate_(tag) {
  1738. const sampleRate = tag.getAttributeValue('SAMPLE-RATE');
  1739. if (!sampleRate) {
  1740. return null;
  1741. }
  1742. return parseInt(sampleRate, 10);
  1743. }
  1744. /**
  1745. * Get the spatial audio information for an HLS audio track.
  1746. * In HLS the channels field indicates the number of audio channels that the
  1747. * stream has (eg: 2). In the case of Dolby Atmos (EAC-3), the complexity is
  1748. * expressed with the number of channels followed by the word JOC
  1749. * (eg: 16/JOC), so 16 would be the number of channels (eg: 7.3.6 layout),
  1750. * and JOC indicates that the stream has spatial audio. For Dolby AC-4 ATMOS,
  1751. * it's necessary search ATMOS word.
  1752. * @see https://developer.apple.com/documentation/http-live-streaming/hls-authoring-specification-for-apple-devices-appendixes
  1753. * @see https://ott.dolby.com/OnDelKits/AC-4/Dolby_AC-4_Online_Delivery_Kit_1.5/Documentation/Specs/AC4_HLS/help_files/topics/hls_playlist_c_codec_indication_ims.html
  1754. *
  1755. * @param {!shaka.hls.Tag} tag
  1756. * @return {boolean}
  1757. * @private
  1758. */
  1759. isSpatialAudio_(tag) {
  1760. const channels = tag.getAttributeValue('CHANNELS');
  1761. if (!channels) {
  1762. return false;
  1763. }
  1764. const channelsParts = channels.split('/');
  1765. if (channelsParts.length != 2) {
  1766. return false;
  1767. }
  1768. return channelsParts[1] === 'JOC' || channelsParts[1].includes('ATMOS');
  1769. }
  1770. /**
  1771. * Get the closed captions map information for the EXT-X-STREAM-INF tag, to
  1772. * create the stream info.
  1773. * @param {!shaka.hls.Tag} tag
  1774. * @param {string} type
  1775. * @return {Map<string, string>} closedCaptions
  1776. * @private
  1777. */
  1778. getClosedCaptions_(tag, type) {
  1779. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1780. // The attribute of closed captions is optional, and the value may be
  1781. // 'NONE'.
  1782. const closedCaptionsAttr = tag.getAttributeValue('CLOSED-CAPTIONS');
  1783. // EXT-X-STREAM-INF tags may have CLOSED-CAPTIONS attributes.
  1784. // The value can be either a quoted-string or an enumerated-string with
  1785. // the value NONE. If the value is a quoted-string, it MUST match the
  1786. // value of the GROUP-ID attribute of an EXT-X-MEDIA tag elsewhere in the
  1787. // Playlist whose TYPE attribute is CLOSED-CAPTIONS.
  1788. if (type == ContentType.VIDEO ) {
  1789. if (this.config_.disableText) {
  1790. this.needsClosedCaptionsDetection_ = false;
  1791. return null;
  1792. }
  1793. if (closedCaptionsAttr) {
  1794. if (closedCaptionsAttr != 'NONE') {
  1795. return this.groupIdToClosedCaptionsMap_.get(closedCaptionsAttr);
  1796. }
  1797. this.needsClosedCaptionsDetection_ = false;
  1798. } else if (!closedCaptionsAttr && this.groupIdToClosedCaptionsMap_.size) {
  1799. for (const key of this.groupIdToClosedCaptionsMap_.keys()) {
  1800. return this.groupIdToClosedCaptionsMap_.get(key);
  1801. }
  1802. }
  1803. }
  1804. return null;
  1805. }
  1806. /**
  1807. * Get the normalized language value.
  1808. *
  1809. * @param {?string} languageValue
  1810. * @return {string}
  1811. * @private
  1812. */
  1813. getLanguage_(languageValue) {
  1814. const LanguageUtils = shaka.util.LanguageUtils;
  1815. return LanguageUtils.normalize(languageValue || 'und');
  1816. }
  1817. /**
  1818. * Get the type value.
  1819. * Shaka recognizes the content types 'audio', 'video', 'text', and 'image'.
  1820. * The HLS 'subtitles' type needs to be mapped to 'text'.
  1821. * @param {!shaka.hls.Tag} tag
  1822. * @return {string}
  1823. * @private
  1824. */
  1825. getType_(tag) {
  1826. let type = tag.getRequiredAttrValue('TYPE').toLowerCase();
  1827. if (type == 'subtitles') {
  1828. type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1829. }
  1830. return type;
  1831. }
  1832. /**
  1833. * @param {!Array<shaka.hls.HlsParser.StreamInfo>} audioInfos
  1834. * @param {!Array<shaka.hls.HlsParser.StreamInfo>} videoInfos
  1835. * @param {number} bandwidth
  1836. * @param {?string} width
  1837. * @param {?string} height
  1838. * @param {?string} frameRate
  1839. * @param {?string} videoRange
  1840. * @param {?string} videoLayout
  1841. * @param {!Array<shaka.extern.DrmInfo>} drmInfos
  1842. * @param {!Set<string>} keyIds
  1843. * @param {!Array<!shaka.extern.Stream>} iFrameStreams
  1844. * @return {!Array<!shaka.extern.Variant>}
  1845. * @private
  1846. */
  1847. createVariants_(
  1848. audioInfos, videoInfos, bandwidth, width, height, frameRate, videoRange,
  1849. videoLayout, drmInfos, keyIds, iFrameStreams) {
  1850. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1851. const DrmUtils = shaka.drm.DrmUtils;
  1852. for (const info of videoInfos) {
  1853. this.addVideoAttributes_(
  1854. info.stream, width, height, frameRate, videoRange, videoLayout,
  1855. /** colorGamut= */ null);
  1856. }
  1857. // In case of audio-only or video-only content or the audio/video is
  1858. // disabled by the config, we create an array of one item containing
  1859. // a null. This way, the double-loop works for all kinds of content.
  1860. // NOTE: we currently don't have support for audio-only content.
  1861. const disableAudio = this.config_.disableAudio;
  1862. if (!audioInfos.length || disableAudio) {
  1863. audioInfos = [null];
  1864. }
  1865. const disableVideo = this.config_.disableVideo;
  1866. if (!videoInfos.length || disableVideo) {
  1867. videoInfos = [null];
  1868. }
  1869. const variants = [];
  1870. for (const audioInfo of audioInfos) {
  1871. for (const videoInfo of videoInfos) {
  1872. const audioStream = audioInfo ? audioInfo.stream : null;
  1873. if (audioStream) {
  1874. audioStream.drmInfos = drmInfos;
  1875. audioStream.keyIds = keyIds;
  1876. }
  1877. const videoStream = videoInfo ? videoInfo.stream : null;
  1878. if (videoStream) {
  1879. videoStream.drmInfos = drmInfos;
  1880. videoStream.keyIds = keyIds;
  1881. if (!this.config_.disableIFrames) {
  1882. shaka.util.StreamUtils.setBetterIFrameStream(
  1883. videoStream, iFrameStreams);
  1884. }
  1885. }
  1886. if (videoStream && !audioStream) {
  1887. videoStream.bandwidth = bandwidth;
  1888. }
  1889. if (!videoStream && audioStream) {
  1890. audioStream.bandwidth = bandwidth;
  1891. }
  1892. const audioDrmInfos = audioInfo ? audioInfo.stream.drmInfos : null;
  1893. const videoDrmInfos = videoInfo ? videoInfo.stream.drmInfos : null;
  1894. const videoStreamUri =
  1895. videoInfo ? videoInfo.getUris().sort().join(',') : '';
  1896. const audioStreamUri =
  1897. audioInfo ? audioInfo.getUris().sort().join(',') : '';
  1898. const codecs = [];
  1899. if (audioStream && audioStream.codecs) {
  1900. codecs.push(audioStream.codecs);
  1901. }
  1902. if (videoStream && videoStream.codecs) {
  1903. codecs.push(videoStream.codecs);
  1904. }
  1905. const variantUriKey = [
  1906. videoStreamUri,
  1907. audioStreamUri,
  1908. codecs.sort(),
  1909. ].join('-');
  1910. if (audioStream && videoStream) {
  1911. if (!DrmUtils.areDrmCompatible(audioDrmInfos, videoDrmInfos)) {
  1912. shaka.log.warning(
  1913. 'Incompatible DRM info in HLS variant. Skipping.');
  1914. continue;
  1915. }
  1916. }
  1917. if (this.variantUriSet_.has(variantUriKey)) {
  1918. // This happens when two variants only differ in their text streams.
  1919. shaka.log.debug(
  1920. 'Skipping variant which only differs in text streams.');
  1921. continue;
  1922. }
  1923. // Since both audio and video are of the same type, this assertion will
  1924. // catch certain mistakes at runtime that the compiler would miss.
  1925. goog.asserts.assert(!audioStream ||
  1926. audioStream.type == ContentType.AUDIO, 'Audio parameter mismatch!');
  1927. goog.asserts.assert(!videoStream ||
  1928. videoStream.type == ContentType.VIDEO, 'Video parameter mismatch!');
  1929. const variant = {
  1930. id: this.globalId_++,
  1931. language: audioStream ? audioStream.language : 'und',
  1932. disabledUntilTime: 0,
  1933. primary: (!!audioStream && audioStream.primary) ||
  1934. (!!videoStream && videoStream.primary),
  1935. audio: audioStream,
  1936. video: videoStream,
  1937. bandwidth,
  1938. allowedByApplication: true,
  1939. allowedByKeySystem: true,
  1940. decodingInfos: [],
  1941. };
  1942. variants.push(variant);
  1943. this.variantUriSet_.add(variantUriKey);
  1944. }
  1945. }
  1946. return variants;
  1947. }
  1948. /**
  1949. * Parses an array of EXT-X-MEDIA tags, then stores the values of all tags
  1950. * with TYPE="CLOSED-CAPTIONS" into a map of group id to closed captions.
  1951. *
  1952. * @param {!Array<!shaka.hls.Tag>} mediaTags
  1953. * @private
  1954. */
  1955. parseClosedCaptions_(mediaTags) {
  1956. const closedCaptionsTags =
  1957. shaka.hls.Utils.filterTagsByType(mediaTags, 'CLOSED-CAPTIONS');
  1958. this.needsClosedCaptionsDetection_ = closedCaptionsTags.length == 0;
  1959. for (const tag of closedCaptionsTags) {
  1960. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1961. 'Should only be called on media tags!');
  1962. const languageValue = tag.getAttributeValue('LANGUAGE');
  1963. let language = this.getLanguage_(languageValue);
  1964. if (!languageValue) {
  1965. const nameValue = tag.getAttributeValue('NAME');
  1966. if (nameValue) {
  1967. language = nameValue;
  1968. }
  1969. }
  1970. // The GROUP-ID value is a quoted-string that specifies the group to which
  1971. // the Rendition belongs.
  1972. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1973. // The value of INSTREAM-ID is a quoted-string that specifies a Rendition
  1974. // within the segments in the Media Playlist. This attribute is REQUIRED
  1975. // if the TYPE attribute is CLOSED-CAPTIONS.
  1976. // We need replace SERVICE string by our internal svc string.
  1977. const instreamId = tag.getRequiredAttrValue('INSTREAM-ID')
  1978. .replace('SERVICE', 'svc');
  1979. if (!this.groupIdToClosedCaptionsMap_.get(groupId)) {
  1980. this.groupIdToClosedCaptionsMap_.set(groupId, new Map());
  1981. }
  1982. this.groupIdToClosedCaptionsMap_.get(groupId).set(instreamId, language);
  1983. }
  1984. }
  1985. /**
  1986. * Parse EXT-X-MEDIA media tag into a Stream object.
  1987. *
  1988. * @param {!Array<!shaka.hls.Tag>} tags
  1989. * @param {!Map<string, string>} groupIdPathwayIdMapping
  1990. * @param {boolean=} requireUri
  1991. * @return {!shaka.hls.HlsParser.StreamInfo}
  1992. * @private
  1993. */
  1994. createStreamInfoFromMediaTags_(tags, groupIdPathwayIdMapping,
  1995. requireUri = true) {
  1996. const verbatimMediaPlaylistUris = [];
  1997. const globalGroupIds = [];
  1998. const groupIdUriMapping = new Map();
  1999. for (const tag of tags) {
  2000. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  2001. 'Should only be called on media tags!');
  2002. const uri = requireUri ? tag.getRequiredAttrValue('URI') :
  2003. (tag.getAttributeValue('URI') || shaka.hls.HlsParser.FAKE_MUXED_URL_);
  2004. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  2005. verbatimMediaPlaylistUris.push(uri);
  2006. globalGroupIds.push(groupId);
  2007. groupIdUriMapping.set(groupId, uri);
  2008. }
  2009. const globalGroupId = globalGroupIds.sort().join(',');
  2010. const firstTag = tags[0];
  2011. let codecs = '';
  2012. /** @type {string} */
  2013. const type = this.getType_(firstTag);
  2014. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2015. codecs = firstTag.getAttributeValue('CODECS') || '';
  2016. } else {
  2017. for (const groupId of globalGroupIds) {
  2018. if (this.groupIdToCodecsMap_.has(groupId)) {
  2019. codecs = this.groupIdToCodecsMap_.get(groupId);
  2020. break;
  2021. }
  2022. }
  2023. }
  2024. // Check if the stream has already been created as part of another Variant
  2025. // and return it if it has.
  2026. const key = verbatimMediaPlaylistUris.sort().join(',');
  2027. if (this.uriToStreamInfosMap_.has(key)) {
  2028. return this.uriToStreamInfosMap_.get(key);
  2029. }
  2030. const streamId = this.globalId_++;
  2031. if (this.contentSteeringManager_) {
  2032. for (const [groupId, uri] of groupIdUriMapping) {
  2033. const pathwayId = groupIdPathwayIdMapping.get(groupId);
  2034. if (pathwayId) {
  2035. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  2036. }
  2037. }
  2038. }
  2039. const language = firstTag.getAttributeValue('LANGUAGE');
  2040. const name = firstTag.getAttributeValue('NAME');
  2041. // NOTE: According to the HLS spec, "DEFAULT=YES" requires "AUTOSELECT=YES".
  2042. // However, we don't bother to validate "AUTOSELECT", since we don't
  2043. // actually use it in our streaming model, and we treat everything as
  2044. // "AUTOSELECT=YES". A value of "AUTOSELECT=NO" would imply that it may
  2045. // only be selected explicitly by the user, and we don't have a way to
  2046. // represent that in our model.
  2047. const defaultAttrValue = firstTag.getAttributeValue('DEFAULT');
  2048. const primary = defaultAttrValue == 'YES';
  2049. const channelsCount =
  2050. type == 'audio' ? this.getChannelsCount_(firstTag) : null;
  2051. const spatialAudio =
  2052. type == 'audio' ? this.isSpatialAudio_(firstTag) : false;
  2053. const characteristics = firstTag.getAttributeValue('CHARACTERISTICS');
  2054. const forcedAttrValue = firstTag.getAttributeValue('FORCED');
  2055. const forced = forcedAttrValue == 'YES';
  2056. const sampleRate = type == 'audio' ? this.getSampleRate_(firstTag) : null;
  2057. // TODO: Should we take into account some of the currently ignored
  2058. // attributes: INSTREAM-ID, Attribute descriptions: https://bit.ly/2lpjOhj
  2059. const streamInfo = this.createStreamInfo_(
  2060. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  2061. primary, name, channelsCount, /* closedCaptions= */ null,
  2062. characteristics, forced, sampleRate, spatialAudio);
  2063. if (streamInfo.stream) {
  2064. streamInfo.stream.groupId = globalGroupId;
  2065. }
  2066. if (this.groupIdToStreamInfosMap_.has(globalGroupId)) {
  2067. this.groupIdToStreamInfosMap_.get(globalGroupId).push(streamInfo);
  2068. } else {
  2069. this.groupIdToStreamInfosMap_.set(globalGroupId, [streamInfo]);
  2070. }
  2071. this.uriToStreamInfosMap_.set(key, streamInfo);
  2072. return streamInfo;
  2073. }
  2074. /**
  2075. * Parse EXT-X-IMAGE-STREAM-INF media tag into a Stream object.
  2076. *
  2077. * @param {shaka.hls.Tag} tag
  2078. * @return {!Promise<!shaka.hls.HlsParser.StreamInfo>}
  2079. * @private
  2080. */
  2081. async createStreamInfoFromImageTag_(tag) {
  2082. goog.asserts.assert(tag.name == 'EXT-X-IMAGE-STREAM-INF',
  2083. 'Should only be called on image tags!');
  2084. /** @type {string} */
  2085. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  2086. const verbatimImagePlaylistUri = tag.getRequiredAttrValue('URI');
  2087. const codecs = tag.getAttributeValue('CODECS', 'jpeg') || '';
  2088. // Check if the stream has already been created as part of another Variant
  2089. // and return it if it has.
  2090. if (this.uriToStreamInfosMap_.has(verbatimImagePlaylistUri)) {
  2091. return this.uriToStreamInfosMap_.get(verbatimImagePlaylistUri);
  2092. }
  2093. const language = tag.getAttributeValue('LANGUAGE');
  2094. const name = tag.getAttributeValue('NAME');
  2095. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  2096. const streamInfo = this.createStreamInfo_(
  2097. this.globalId_++, [verbatimImagePlaylistUri], codecs, type, language,
  2098. /* primary= */ false, name, /* channelsCount= */ null,
  2099. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  2100. /* sampleRate= */ null, /* spatialAudio= */ false);
  2101. // Parse misc attributes.
  2102. const resolution = tag.getAttributeValue('RESOLUTION');
  2103. if (resolution) {
  2104. // The RESOLUTION tag represents the resolution of a single thumbnail, not
  2105. // of the entire sheet at once (like we expect in the output).
  2106. // So multiply by the layout size.
  2107. // Since we need to have generated the segment index for this, we can't
  2108. // lazy-load in this situation.
  2109. await streamInfo.stream.createSegmentIndex();
  2110. const reference = streamInfo.stream.segmentIndex.earliestReference();
  2111. const layout = reference.getTilesLayout();
  2112. if (layout) {
  2113. streamInfo.stream.width =
  2114. Number(resolution.split('x')[0]) * Number(layout.split('x')[0]);
  2115. streamInfo.stream.height =
  2116. Number(resolution.split('x')[1]) * Number(layout.split('x')[1]);
  2117. // TODO: What happens if there are multiple grids, with different
  2118. // layout sizes, inside this image stream?
  2119. }
  2120. }
  2121. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  2122. if (bandwidth) {
  2123. streamInfo.stream.bandwidth = Number(bandwidth);
  2124. }
  2125. this.uriToStreamInfosMap_.set(verbatimImagePlaylistUri, streamInfo);
  2126. return streamInfo;
  2127. }
  2128. /**
  2129. * Parse EXT-X-I-FRAME-STREAM-INF media tag into a Stream object.
  2130. *
  2131. * @param {shaka.hls.Tag} tag
  2132. * @return {!shaka.hls.HlsParser.StreamInfo}
  2133. * @private
  2134. */
  2135. createStreamInfoFromIframeTag_(tag) {
  2136. goog.asserts.assert(tag.name == 'EXT-X-I-FRAME-STREAM-INF',
  2137. 'Should only be called on iframe tags!');
  2138. /** @type {string} */
  2139. let type = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  2140. const verbatimIFramePlaylistUri = tag.getRequiredAttrValue('URI');
  2141. const codecs = tag.getAttributeValue('CODECS') || '';
  2142. if (codecs == 'mjpg') {
  2143. type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  2144. }
  2145. // Check if the stream has already been created as part of another Variant
  2146. // and return it if it has.
  2147. if (this.uriToStreamInfosMap_.has(verbatimIFramePlaylistUri)) {
  2148. return this.uriToStreamInfosMap_.get(verbatimIFramePlaylistUri);
  2149. }
  2150. const language = tag.getAttributeValue('LANGUAGE');
  2151. const name = tag.getAttributeValue('NAME');
  2152. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  2153. const streamInfo = this.createStreamInfo_(
  2154. this.globalId_++, [verbatimIFramePlaylistUri], codecs, type, language,
  2155. /* primary= */ false, name, /* channelsCount= */ null,
  2156. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  2157. /* sampleRate= */ null, /* spatialAudio= */ false);
  2158. // Parse misc attributes.
  2159. const resolution = tag.getAttributeValue('RESOLUTION');
  2160. const [width, height] = resolution ? resolution.split('x') : [null, null];
  2161. streamInfo.stream.width = Number(width) || undefined;
  2162. streamInfo.stream.height = Number(height) || undefined;
  2163. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  2164. if (bandwidth) {
  2165. streamInfo.stream.bandwidth = Number(bandwidth);
  2166. }
  2167. this.uriToStreamInfosMap_.set(verbatimIFramePlaylistUri, streamInfo);
  2168. return streamInfo;
  2169. }
  2170. /**
  2171. * Parse an EXT-X-STREAM-INF media tag into a Stream object.
  2172. *
  2173. * @param {!Array<!shaka.hls.Tag>} tags
  2174. * @param {!Array<string>} allCodecs
  2175. * @param {string} type
  2176. * @return {!shaka.hls.HlsParser.StreamInfo}
  2177. * @private
  2178. */
  2179. createStreamInfoFromVariantTags_(tags, allCodecs, type) {
  2180. const streamId = this.globalId_++;
  2181. const verbatimMediaPlaylistUris = [];
  2182. for (const tag of tags) {
  2183. goog.asserts.assert(tag.name == 'EXT-X-STREAM-INF',
  2184. 'Should only be called on variant tags!');
  2185. const uri = tag.getRequiredAttrValue('URI');
  2186. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  2187. if (this.contentSteeringManager_ && pathwayId) {
  2188. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  2189. }
  2190. verbatimMediaPlaylistUris.push(uri);
  2191. }
  2192. const key = verbatimMediaPlaylistUris.sort().join(',') +
  2193. allCodecs.sort().join(',');
  2194. if (this.uriToStreamInfosMap_.has(key)) {
  2195. return this.uriToStreamInfosMap_.get(key);
  2196. }
  2197. const name = verbatimMediaPlaylistUris.join(',');
  2198. const closedCaptions = this.getClosedCaptions_(tags[0], type);
  2199. const codecs = shaka.util.ManifestParserUtils.guessCodecs(type, allCodecs);
  2200. const streamInfo = this.createStreamInfo_(
  2201. streamId, verbatimMediaPlaylistUris,
  2202. codecs, type, /* language= */ null, /* primary= */ false,
  2203. name, /* channelCount= */ null, closedCaptions,
  2204. /* characteristics= */ null, /* forced= */ false,
  2205. /* sampleRate= */ null, /* spatialAudio= */ false);
  2206. this.uriToStreamInfosMap_.set(key, streamInfo);
  2207. return streamInfo;
  2208. }
  2209. /**
  2210. * @param {number} streamId
  2211. * @param {!Array<string>} verbatimMediaPlaylistUris
  2212. * @param {string} codecs
  2213. * @param {string} type
  2214. * @param {?string} languageValue
  2215. * @param {boolean} primary
  2216. * @param {?string} name
  2217. * @param {?number} channelsCount
  2218. * @param {Map<string, string>} closedCaptions
  2219. * @param {?string} characteristics
  2220. * @param {boolean} forced
  2221. * @param {?number} sampleRate
  2222. * @param {boolean} spatialAudio
  2223. * @return {!shaka.hls.HlsParser.StreamInfo}
  2224. * @private
  2225. */
  2226. createStreamInfo_(streamId, verbatimMediaPlaylistUris, codecs, type,
  2227. languageValue, primary, name, channelsCount, closedCaptions,
  2228. characteristics, forced, sampleRate, spatialAudio) {
  2229. // TODO: Refactor, too many parameters
  2230. // This stream is lazy-loaded inside the createSegmentIndex function.
  2231. // So we start out with a stream object that does not contain the actual
  2232. // segment index, then download when createSegmentIndex is called.
  2233. const stream = this.makeStreamObject_(streamId, codecs, type,
  2234. languageValue, primary, name, channelsCount, closedCaptions,
  2235. characteristics, forced, sampleRate, spatialAudio);
  2236. const FAKE_MUXED_URL_ = shaka.hls.HlsParser.FAKE_MUXED_URL_;
  2237. if (verbatimMediaPlaylistUris.includes(FAKE_MUXED_URL_)) {
  2238. stream.isAudioMuxedInVideo = true;
  2239. // We assigned the TS mimetype because it is the only one that works
  2240. // with this functionality. MP4 is not supported right now.
  2241. stream.mimeType = 'video/mp2t';
  2242. this.setFullTypeForStream_(stream);
  2243. }
  2244. const streamInfo = {
  2245. stream,
  2246. type,
  2247. redirectUris: [],
  2248. getUris: () => {},
  2249. // These values are filled out or updated after lazy-loading:
  2250. minTimestamp: 0,
  2251. maxTimestamp: 0,
  2252. mediaSequenceToStartTime: new Map(),
  2253. canSkipSegments: false,
  2254. canBlockReload: false,
  2255. hasEndList: false,
  2256. firstSequenceNumber: -1,
  2257. nextMediaSequence: -1,
  2258. nextPart: -1,
  2259. loadedOnce: false,
  2260. };
  2261. const getUris = () => {
  2262. if (this.contentSteeringManager_ &&
  2263. verbatimMediaPlaylistUris.length > 1) {
  2264. return this.contentSteeringManager_.getLocations(streamId);
  2265. }
  2266. return streamInfo.redirectUris.concat(shaka.hls.Utils.constructUris(
  2267. [this.masterPlaylistUri_], verbatimMediaPlaylistUris,
  2268. this.globalVariables_));
  2269. };
  2270. streamInfo.getUris = getUris;
  2271. /** @param {!shaka.net.NetworkingEngine.PendingRequest} pendingRequest */
  2272. const downloadSegmentIndex = async (pendingRequest) => {
  2273. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2274. try {
  2275. // Download the actual manifest.
  2276. const response = await pendingRequest.promise;
  2277. if (pendingRequest.aborted) {
  2278. return;
  2279. }
  2280. // Record the final URI after redirects.
  2281. const responseUri = response.uri;
  2282. if (responseUri != response.originalUri) {
  2283. const uris = streamInfo.getUris();
  2284. if (!uris.includes(responseUri)) {
  2285. streamInfo.redirectUris.push(responseUri);
  2286. }
  2287. }
  2288. // Record the redirected, final URI of this media playlist when we parse
  2289. // it.
  2290. /** @type {!shaka.hls.Playlist} */
  2291. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  2292. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  2293. throw new shaka.util.Error(
  2294. shaka.util.Error.Severity.CRITICAL,
  2295. shaka.util.Error.Category.MANIFEST,
  2296. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  2297. }
  2298. /** @type {!Array<!shaka.hls.Tag>} */
  2299. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  2300. 'EXT-X-DEFINE');
  2301. const mediaVariables =
  2302. this.parseMediaVariables_(variablesTags, responseUri);
  2303. const mimeType = undefined;
  2304. let requestBasicInfo = false;
  2305. // If no codec info was provided in the manifest and codec guessing is
  2306. // disabled we try to get necessary info from the media data.
  2307. if ((!this.codecInfoInManifest_ &&
  2308. this.config_.hls.disableCodecGuessing) ||
  2309. (this.needsClosedCaptionsDetection_ && type == ContentType.VIDEO &&
  2310. !this.config_.hls.disableClosedCaptionsDetection)) {
  2311. if (playlist.segments.length > 0) {
  2312. this.needsClosedCaptionsDetection_ = false;
  2313. requestBasicInfo = true;
  2314. }
  2315. }
  2316. const allowOverrideMimeType = !this.codecInfoInManifest_ &&
  2317. this.config_.hls.disableCodecGuessing;
  2318. const wasLive = this.isLive_();
  2319. const realStreamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  2320. streamId, mediaVariables, playlist, getUris, codecs,
  2321. type, languageValue, primary, name, channelsCount, closedCaptions,
  2322. characteristics, forced, sampleRate, spatialAudio, mimeType,
  2323. requestBasicInfo, allowOverrideMimeType);
  2324. if (pendingRequest.aborted) {
  2325. return;
  2326. }
  2327. const realStream = realStreamInfo.stream;
  2328. this.determineStartTime_(playlist);
  2329. if (this.isLive_() && !wasLive) {
  2330. // Now that we know that the presentation is live, convert the
  2331. // timeline to live.
  2332. this.changePresentationTimelineToLive_(playlist);
  2333. }
  2334. // Copy values from the real stream info to our initial one.
  2335. streamInfo.minTimestamp = realStreamInfo.minTimestamp;
  2336. streamInfo.maxTimestamp = realStreamInfo.maxTimestamp;
  2337. streamInfo.canSkipSegments = realStreamInfo.canSkipSegments;
  2338. streamInfo.canBlockReload = realStreamInfo.canBlockReload;
  2339. streamInfo.hasEndList = realStreamInfo.hasEndList;
  2340. streamInfo.mediaSequenceToStartTime =
  2341. realStreamInfo.mediaSequenceToStartTime;
  2342. streamInfo.nextMediaSequence = realStreamInfo.nextMediaSequence;
  2343. streamInfo.nextPart = realStreamInfo.nextPart;
  2344. streamInfo.loadedOnce = true;
  2345. stream.segmentIndex = realStream.segmentIndex;
  2346. stream.encrypted = realStream.encrypted;
  2347. stream.drmInfos = realStream.drmInfos;
  2348. stream.keyIds = realStream.keyIds;
  2349. stream.mimeType = realStream.mimeType;
  2350. stream.bandwidth = stream.bandwidth || realStream.bandwidth;
  2351. stream.codecs = stream.codecs || realStream.codecs;
  2352. stream.closedCaptions =
  2353. stream.closedCaptions || realStream.closedCaptions;
  2354. stream.width = stream.width || realStream.width;
  2355. stream.height = stream.height || realStream.height;
  2356. stream.hdr = stream.hdr || realStream.hdr;
  2357. stream.colorGamut = stream.colorGamut || realStream.colorGamut;
  2358. stream.frameRate = stream.frameRate || realStream.frameRate;
  2359. if (stream.language == 'und' && realStream.language != 'und') {
  2360. stream.language = realStream.language;
  2361. }
  2362. stream.language = stream.language || realStream.language;
  2363. stream.channelsCount = stream.channelsCount || realStream.channelsCount;
  2364. stream.audioSamplingRate =
  2365. stream.audioSamplingRate || realStream.audioSamplingRate;
  2366. this.setFullTypeForStream_(stream);
  2367. // Since we lazy-loaded this content, the player may need to create new
  2368. // sessions for the DRM info in this stream.
  2369. if (stream.drmInfos.length) {
  2370. this.playerInterface_.newDrmInfo(stream);
  2371. }
  2372. let closedCaptionsUpdated = false;
  2373. if ((!closedCaptions && stream.closedCaptions) ||
  2374. (closedCaptions && stream.closedCaptions &&
  2375. closedCaptions.size != stream.closedCaptions.size)) {
  2376. closedCaptionsUpdated = true;
  2377. }
  2378. if (this.manifest_ && closedCaptionsUpdated) {
  2379. this.playerInterface_.makeTextStreamsForClosedCaptions(
  2380. this.manifest_);
  2381. }
  2382. if (type == ContentType.VIDEO || type == ContentType.AUDIO) {
  2383. for (const otherStreamInfo of this.uriToStreamInfosMap_.values()) {
  2384. if (!otherStreamInfo.loadedOnce && otherStreamInfo.type == type) {
  2385. // To aid manifest filtering, assume before loading that all video
  2386. // renditions have the same MIME type. (And likewise for audio.)
  2387. otherStreamInfo.stream.mimeType = realStream.mimeType;
  2388. this.setFullTypeForStream_(otherStreamInfo.stream);
  2389. }
  2390. }
  2391. }
  2392. if (type == ContentType.TEXT) {
  2393. const firstSegment = realStream.segmentIndex.earliestReference();
  2394. if (firstSegment && firstSegment.initSegmentReference) {
  2395. stream.mimeType = 'application/mp4';
  2396. this.setFullTypeForStream_(stream);
  2397. }
  2398. }
  2399. const qualityInfo =
  2400. shaka.media.QualityObserver.createQualityInfo(stream);
  2401. stream.segmentIndex.forEachTopLevelReference((reference) => {
  2402. if (reference.initSegmentReference) {
  2403. reference.initSegmentReference.mediaQuality = qualityInfo;
  2404. }
  2405. });
  2406. // Add finishing touches to the stream that can only be done once we
  2407. // have more full context on the media as a whole.
  2408. if (this.hasEnoughInfoToFinalizeStreams_()) {
  2409. if (!this.streamsFinalized_) {
  2410. // Mark this manifest as having been finalized, so we don't go
  2411. // through this whole process of finishing touches a second time.
  2412. this.streamsFinalized_ = true;
  2413. // Finalize all of the currently-loaded streams.
  2414. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  2415. const activeStreamInfos =
  2416. streamInfos.filter((s) => s.stream.segmentIndex);
  2417. this.finalizeStreams_(activeStreamInfos);
  2418. // With the addition of this new stream, we now have enough info to
  2419. // figure out how long the streams should be. So process all streams
  2420. // we have downloaded up until this point.
  2421. this.determineDuration_();
  2422. // Finally, start the update timer, if this asset has been
  2423. // determined to be a livestream.
  2424. const delay = this.getUpdatePlaylistDelay_();
  2425. if (delay > 0) {
  2426. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  2427. }
  2428. } else {
  2429. // We don't need to go through the full process; just finalize this
  2430. // single stream.
  2431. this.finalizeStreams_([streamInfo]);
  2432. }
  2433. }
  2434. this.processDateRangeTags_(
  2435. playlist.tags, stream.type, mediaVariables, getUris);
  2436. if (this.manifest_) {
  2437. this.manifest_.startTime = this.startTime_;
  2438. }
  2439. } catch (e) {
  2440. stream.closeSegmentIndex();
  2441. if (e.code === shaka.util.Error.Code.OPERATION_ABORTED) {
  2442. return;
  2443. }
  2444. const handled = this.playerInterface_.disableStream(stream);
  2445. if (!handled) {
  2446. throw e;
  2447. }
  2448. }
  2449. };
  2450. /** @type {Promise} */
  2451. let creationPromise = null;
  2452. /** @type {!shaka.net.NetworkingEngine.PendingRequest} */
  2453. let pendingRequest;
  2454. const safeCreateSegmentIndex = () => {
  2455. // An operation is already in progress. The second and subsequent
  2456. // callers receive the same Promise as the first caller, and only one
  2457. // download operation will occur.
  2458. if (creationPromise) {
  2459. return creationPromise;
  2460. }
  2461. if (stream.isAudioMuxedInVideo) {
  2462. const segmentIndex = new shaka.media.SegmentIndex([]);
  2463. stream.segmentIndex = segmentIndex;
  2464. return Promise.resolve();
  2465. }
  2466. // Create a new PendingRequest to be able to cancel this specific
  2467. // download.
  2468. pendingRequest = this.requestManifest_(streamInfo.getUris(),
  2469. /* isPlaylist= */ true);
  2470. // Create a Promise tied to the outcome of downloadSegmentIndex(). If
  2471. // downloadSegmentIndex is rejected, creationPromise will also be
  2472. // rejected.
  2473. creationPromise = new Promise((resolve) => {
  2474. resolve(downloadSegmentIndex(pendingRequest));
  2475. });
  2476. return creationPromise;
  2477. };
  2478. stream.createSegmentIndex = safeCreateSegmentIndex;
  2479. stream.closeSegmentIndex = () => {
  2480. // If we're mid-creation, cancel it.
  2481. if (creationPromise && !stream.segmentIndex) {
  2482. pendingRequest.abort();
  2483. }
  2484. // If we have a segment index, release it.
  2485. if (stream.segmentIndex) {
  2486. stream.segmentIndex.release();
  2487. stream.segmentIndex = null;
  2488. }
  2489. // Clear the creation Promise so that a new operation can begin.
  2490. creationPromise = null;
  2491. };
  2492. return streamInfo;
  2493. }
  2494. /**
  2495. * @return {number}
  2496. * @private
  2497. */
  2498. getMinDuration_() {
  2499. let minDuration = Infinity;
  2500. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2501. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text' &&
  2502. !streamInfo.stream.isAudioMuxedInVideo) {
  2503. // Since everything is already offset to 0 (either by sync or by being
  2504. // VOD), only maxTimestamp is necessary to compute the duration.
  2505. minDuration = Math.min(minDuration, streamInfo.maxTimestamp);
  2506. }
  2507. }
  2508. return minDuration;
  2509. }
  2510. /**
  2511. * @return {number}
  2512. * @private
  2513. */
  2514. getLiveDuration_() {
  2515. let maxTimestamp = Infinity;
  2516. let minTimestamp = Infinity;
  2517. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2518. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text' &&
  2519. !streamInfo.stream.isAudioMuxedInVideo) {
  2520. maxTimestamp = Math.min(maxTimestamp, streamInfo.maxTimestamp);
  2521. minTimestamp = Math.min(minTimestamp, streamInfo.minTimestamp);
  2522. }
  2523. }
  2524. return maxTimestamp - minTimestamp;
  2525. }
  2526. /**
  2527. * @param {!Array<!shaka.extern.Stream>} streams
  2528. * @private
  2529. */
  2530. notifySegmentsForStreams_(streams) {
  2531. const references = [];
  2532. for (const stream of streams) {
  2533. if (!stream.segmentIndex) {
  2534. // The stream was closed since the list of streams was built.
  2535. continue;
  2536. }
  2537. stream.segmentIndex.forEachTopLevelReference((reference) => {
  2538. references.push(reference);
  2539. });
  2540. }
  2541. this.presentationTimeline_.notifySegments(references);
  2542. }
  2543. /**
  2544. * @param {!Array<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  2545. * @private
  2546. */
  2547. finalizeStreams_(streamInfos) {
  2548. if (!this.isLive_()) {
  2549. const minDuration = this.getMinDuration_();
  2550. for (const streamInfo of streamInfos) {
  2551. streamInfo.stream.segmentIndex.fit(/* periodStart= */ 0, minDuration);
  2552. }
  2553. }
  2554. this.notifySegmentsForStreams_(streamInfos.map((s) => s.stream));
  2555. const activeStreamInfos = Array.from(this.uriToStreamInfosMap_.values())
  2556. .filter((s) => s.stream.segmentIndex);
  2557. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2558. const hasAudio =
  2559. activeStreamInfos.some((s) => s.stream.type == ContentType.AUDIO);
  2560. const hasVideo =
  2561. activeStreamInfos.some((s) => s.stream.type == ContentType.VIDEO);
  2562. const liveWithNoProgramDateTime =
  2563. this.isLive_() && !this.usesProgramDateTime_;
  2564. const vodWithOnlyAudioOrVideo = !this.isLive_() &&
  2565. this.usesProgramDateTime_ && !(hasAudio && hasVideo);
  2566. if (this.config_.hls.ignoreManifestProgramDateTime ||
  2567. liveWithNoProgramDateTime || vodWithOnlyAudioOrVideo) {
  2568. this.syncStreamsWithSequenceNumber_(
  2569. streamInfos, liveWithNoProgramDateTime);
  2570. } else {
  2571. this.syncStreamsWithProgramDateTime_(streamInfos);
  2572. if (this.config_.hls.ignoreManifestProgramDateTimeForTypes.length > 0) {
  2573. this.syncStreamsWithSequenceNumber_(streamInfos);
  2574. }
  2575. }
  2576. }
  2577. /**
  2578. * @param {string} type
  2579. * @return {boolean}
  2580. * @private
  2581. */
  2582. ignoreManifestProgramDateTimeFor_(type) {
  2583. if (this.config_.hls.ignoreManifestProgramDateTime) {
  2584. return true;
  2585. }
  2586. const forTypes = this.config_.hls.ignoreManifestProgramDateTimeForTypes;
  2587. return forTypes.includes(type);
  2588. }
  2589. /**
  2590. * There are some values on streams that can only be set once we know about
  2591. * both the video and audio content, if present.
  2592. * This checks if there is at least one video downloaded (if the media has
  2593. * video), and that there is at least one audio downloaded (if the media has
  2594. * audio).
  2595. * @return {boolean}
  2596. * @private
  2597. */
  2598. hasEnoughInfoToFinalizeStreams_() {
  2599. if (!this.manifest_) {
  2600. return false;
  2601. }
  2602. const videos = [];
  2603. const audios = [];
  2604. for (const variant of this.manifest_.variants) {
  2605. if (variant.video) {
  2606. videos.push(variant.video);
  2607. }
  2608. if (variant.audio) {
  2609. audios.push(variant.audio);
  2610. }
  2611. }
  2612. if (videos.length > 0 && !videos.some((stream) => stream.segmentIndex)) {
  2613. return false;
  2614. }
  2615. if (audios.length > 0 && !audios.some((stream) => stream.segmentIndex)) {
  2616. return false;
  2617. }
  2618. return true;
  2619. }
  2620. /**
  2621. * @param {number} streamId
  2622. * @param {!Map<string, string>} variables
  2623. * @param {!shaka.hls.Playlist} playlist
  2624. * @param {function(): !Array<string>} getUris
  2625. * @param {string} codecs
  2626. * @param {string} type
  2627. * @param {?string} languageValue
  2628. * @param {boolean} primary
  2629. * @param {?string} name
  2630. * @param {?number} channelsCount
  2631. * @param {Map<string, string>} closedCaptions
  2632. * @param {?string} characteristics
  2633. * @param {boolean} forced
  2634. * @param {?number} sampleRate
  2635. * @param {boolean} spatialAudio
  2636. * @param {(string|undefined)} mimeType
  2637. * @param {boolean=} requestBasicInfo
  2638. * @param {boolean=} allowOverrideMimeType
  2639. * @return {!Promise<!shaka.hls.HlsParser.StreamInfo>}
  2640. * @private
  2641. */
  2642. async convertParsedPlaylistIntoStreamInfo_(streamId, variables, playlist,
  2643. getUris, codecs, type, languageValue, primary, name,
  2644. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2645. spatialAudio, mimeType = undefined, requestBasicInfo = true,
  2646. allowOverrideMimeType = true) {
  2647. const playlistSegments = playlist.segments || [];
  2648. const allAreMissing = playlistSegments.every((seg) => {
  2649. if (shaka.hls.Utils.getFirstTagWithName(seg.tags, 'EXT-X-GAP')) {
  2650. return true;
  2651. }
  2652. return false;
  2653. });
  2654. if (!playlistSegments.length || allAreMissing) {
  2655. throw new shaka.util.Error(
  2656. shaka.util.Error.Severity.CRITICAL,
  2657. shaka.util.Error.Category.MANIFEST,
  2658. shaka.util.Error.Code.HLS_EMPTY_MEDIA_PLAYLIST);
  2659. }
  2660. this.determinePresentationType_(playlist);
  2661. if (this.isLive_()) {
  2662. this.determineLastTargetDuration_(playlist);
  2663. }
  2664. const mediaSequenceToStartTime = this.isLive_() ?
  2665. this.mediaSequenceToStartTimeByType_.get(type) : new Map();
  2666. const {segments, bandwidth} = this.createSegments_(
  2667. playlist, mediaSequenceToStartTime, variables, getUris, type);
  2668. let width = null;
  2669. let height = null;
  2670. let videoRange = null;
  2671. let colorGamut = null;
  2672. let frameRate = null;
  2673. if (segments.length > 0 && requestBasicInfo) {
  2674. const basicInfo = await this.getBasicInfoFromSegments_(segments);
  2675. type = basicInfo.type;
  2676. languageValue = basicInfo.language;
  2677. channelsCount = basicInfo.channelCount;
  2678. sampleRate = basicInfo.sampleRate;
  2679. if (!this.config_.disableText) {
  2680. closedCaptions = basicInfo.closedCaptions;
  2681. }
  2682. height = basicInfo.height;
  2683. width = basicInfo.width;
  2684. videoRange = basicInfo.videoRange;
  2685. colorGamut = basicInfo.colorGamut;
  2686. frameRate = basicInfo.frameRate;
  2687. if (allowOverrideMimeType) {
  2688. mimeType = basicInfo.mimeType;
  2689. codecs = basicInfo.codecs;
  2690. }
  2691. }
  2692. if (!mimeType) {
  2693. mimeType = await this.guessMimeType_(type, codecs, segments);
  2694. // Some manifests don't say what text codec they use, this is a problem
  2695. // if the cmft extension is used because we identify the mimeType as
  2696. // application/mp4. In this case if we don't detect initialization
  2697. // segments, we assume that the mimeType is text/vtt.
  2698. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT &&
  2699. !codecs && mimeType == 'application/mp4' &&
  2700. segments[0] && !segments[0].initSegmentReference) {
  2701. mimeType = 'text/vtt';
  2702. }
  2703. }
  2704. const {drmInfos, keyIds, encrypted, aesEncrypted} =
  2705. await this.parseDrmInfo_(playlist, mimeType, getUris, variables);
  2706. if (encrypted && !drmInfos.length && !aesEncrypted) {
  2707. throw new shaka.util.Error(
  2708. shaka.util.Error.Severity.CRITICAL,
  2709. shaka.util.Error.Category.MANIFEST,
  2710. shaka.util.Error.Code.HLS_KEYFORMATS_NOT_SUPPORTED);
  2711. }
  2712. const stream = this.makeStreamObject_(streamId, codecs, type,
  2713. languageValue, primary, name, channelsCount, closedCaptions,
  2714. characteristics, forced, sampleRate, spatialAudio);
  2715. stream.encrypted = encrypted && !aesEncrypted;
  2716. stream.drmInfos = drmInfos;
  2717. stream.keyIds = keyIds;
  2718. stream.mimeType = mimeType;
  2719. if (bandwidth) {
  2720. stream.bandwidth = bandwidth;
  2721. }
  2722. this.setFullTypeForStream_(stream);
  2723. if (type == shaka.util.ManifestParserUtils.ContentType.VIDEO &&
  2724. (width || height || videoRange || colorGamut)) {
  2725. this.addVideoAttributes_(stream, width, height,
  2726. frameRate, videoRange, /* videoLayout= */ null, colorGamut);
  2727. }
  2728. // This new calculation is necessary for Low Latency streams.
  2729. if (this.isLive_()) {
  2730. this.determineLastTargetDuration_(playlist);
  2731. }
  2732. const firstStartTime = segments[0].startTime;
  2733. const lastSegment = segments[segments.length - 1];
  2734. const lastEndTime = lastSegment.endTime;
  2735. /** @type {!shaka.media.SegmentIndex} */
  2736. const segmentIndex = new shaka.media.SegmentIndex(segments);
  2737. stream.segmentIndex = segmentIndex;
  2738. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  2739. playlist.tags, 'EXT-X-SERVER-CONTROL');
  2740. const canSkipSegments = serverControlTag ?
  2741. serverControlTag.getAttribute('CAN-SKIP-UNTIL') != null : false;
  2742. const canBlockReload = serverControlTag ?
  2743. serverControlTag.getAttribute('CAN-BLOCK-RELOAD') != null : false;
  2744. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2745. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2746. const {nextMediaSequence, nextPart} =
  2747. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  2748. return {
  2749. stream,
  2750. type,
  2751. redirectUris: [],
  2752. getUris,
  2753. minTimestamp: firstStartTime,
  2754. maxTimestamp: lastEndTime,
  2755. canSkipSegments,
  2756. canBlockReload,
  2757. hasEndList: false,
  2758. firstSequenceNumber: -1,
  2759. nextMediaSequence,
  2760. nextPart,
  2761. mediaSequenceToStartTime,
  2762. loadedOnce: false,
  2763. };
  2764. }
  2765. /**
  2766. * Get the next msn and part
  2767. *
  2768. * @param {number} mediaSequenceNumber
  2769. * @param {!Array<!shaka.media.SegmentReference>} segments
  2770. * @return {{nextMediaSequence: number, nextPart:number}}}
  2771. * @private
  2772. */
  2773. getNextMediaSequenceAndPart_(mediaSequenceNumber, segments) {
  2774. const currentMediaSequence = mediaSequenceNumber + segments.length - 1;
  2775. let nextMediaSequence = currentMediaSequence;
  2776. let nextPart = -1;
  2777. if (!segments.length) {
  2778. nextMediaSequence++;
  2779. return {
  2780. nextMediaSequence,
  2781. nextPart,
  2782. };
  2783. }
  2784. const lastSegment = segments[segments.length - 1];
  2785. const partialReferences = lastSegment.partialReferences;
  2786. if (!lastSegment.partialReferences.length) {
  2787. nextMediaSequence++;
  2788. if (lastSegment.hasByterangeOptimization()) {
  2789. nextPart = 0;
  2790. }
  2791. return {
  2792. nextMediaSequence,
  2793. nextPart,
  2794. };
  2795. }
  2796. nextPart = partialReferences.length - 1;
  2797. const lastPartialReference =
  2798. partialReferences[partialReferences.length - 1];
  2799. if (!lastPartialReference.isPreload()) {
  2800. nextMediaSequence++;
  2801. nextPart = 0;
  2802. }
  2803. return {
  2804. nextMediaSequence,
  2805. nextPart,
  2806. };
  2807. }
  2808. /**
  2809. * Creates a stream object with the given parameters.
  2810. * The parameters that are passed into here are only the things that can be
  2811. * known without downloading the media playlist; other values must be set
  2812. * manually on the object after creation.
  2813. * @param {number} id
  2814. * @param {string} codecs
  2815. * @param {string} type
  2816. * @param {?string} languageValue
  2817. * @param {boolean} primary
  2818. * @param {?string} name
  2819. * @param {?number} channelsCount
  2820. * @param {Map<string, string>} closedCaptions
  2821. * @param {?string} characteristics
  2822. * @param {boolean} forced
  2823. * @param {?number} sampleRate
  2824. * @param {boolean} spatialAudio
  2825. * @return {!shaka.extern.Stream}
  2826. * @private
  2827. */
  2828. makeStreamObject_(id, codecs, type, languageValue, primary, name,
  2829. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2830. spatialAudio) {
  2831. // Fill out a "best-guess" mimeType, for now. It will be replaced once the
  2832. // stream is lazy-loaded.
  2833. const mimeType = this.guessMimeTypeBeforeLoading_(type, codecs) ||
  2834. this.guessMimeTypeFallback_(type);
  2835. const roles = [];
  2836. if (characteristics) {
  2837. for (const characteristic of characteristics.split(',')) {
  2838. roles.push(characteristic);
  2839. }
  2840. }
  2841. let kind = undefined;
  2842. let accessibilityPurpose = null;
  2843. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2844. if (roles.includes('public.accessibility.transcribes-spoken-dialog') &&
  2845. roles.includes('public.accessibility.describes-music-and-sound')) {
  2846. kind = shaka.util.ManifestParserUtils.TextStreamKind.CLOSED_CAPTION;
  2847. } else {
  2848. kind = shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE;
  2849. }
  2850. } else {
  2851. if (roles.includes('public.accessibility.describes-video')) {
  2852. accessibilityPurpose =
  2853. shaka.media.ManifestParser.AccessibilityPurpose.VISUALLY_IMPAIRED;
  2854. }
  2855. }
  2856. // If there are no roles, and we have defaulted to the subtitle "kind" for
  2857. // this track, add the implied subtitle role.
  2858. if (!roles.length &&
  2859. kind === shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE) {
  2860. roles.push(shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE);
  2861. }
  2862. const stream = {
  2863. id: this.globalId_++,
  2864. originalId: name,
  2865. groupId: null,
  2866. createSegmentIndex: () => Promise.resolve(),
  2867. segmentIndex: null,
  2868. mimeType,
  2869. codecs,
  2870. kind: (type == shaka.util.ManifestParserUtils.ContentType.TEXT) ?
  2871. shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE : undefined,
  2872. encrypted: false,
  2873. drmInfos: [],
  2874. keyIds: new Set(),
  2875. language: this.getLanguage_(languageValue),
  2876. originalLanguage: languageValue,
  2877. label: name, // For historical reasons, since before "originalId".
  2878. type,
  2879. primary,
  2880. // TODO: trick mode
  2881. trickModeVideo: null,
  2882. dependencyStream: null,
  2883. emsgSchemeIdUris: null,
  2884. frameRate: undefined,
  2885. pixelAspectRatio: undefined,
  2886. width: undefined,
  2887. height: undefined,
  2888. bandwidth: undefined,
  2889. roles,
  2890. forced,
  2891. channelsCount,
  2892. audioSamplingRate: sampleRate,
  2893. spatialAudio,
  2894. closedCaptions,
  2895. hdr: undefined,
  2896. colorGamut: undefined,
  2897. videoLayout: undefined,
  2898. tilesLayout: undefined,
  2899. accessibilityPurpose: accessibilityPurpose,
  2900. external: false,
  2901. fastSwitching: false,
  2902. fullMimeTypes: new Set(),
  2903. isAudioMuxedInVideo: false,
  2904. baseOriginalId: null,
  2905. };
  2906. this.setFullTypeForStream_(stream);
  2907. return stream;
  2908. }
  2909. /**
  2910. * @param {!shaka.hls.Playlist} playlist
  2911. * @param {string} mimeType
  2912. * @param {function(): !Array<string>} getUris
  2913. * @param {?Map<string, string>=} variables
  2914. * @return {Promise<{
  2915. * drmInfos: !Array<shaka.extern.DrmInfo>,
  2916. * keyIds: !Set<string>,
  2917. * encrypted: boolean,
  2918. * aesEncrypted: boolean
  2919. * }>}
  2920. * @private
  2921. */
  2922. async parseDrmInfo_(playlist, mimeType, getUris, variables) {
  2923. /** @type {!Map<!shaka.hls.Tag, ?shaka.media.InitSegmentReference>} */
  2924. const drmTagsMap = new Map();
  2925. if (!this.config_.ignoreDrmInfo && playlist.segments) {
  2926. for (const segment of playlist.segments) {
  2927. const segmentKeyTags = shaka.hls.Utils.filterTagsByName(segment.tags,
  2928. 'EXT-X-KEY');
  2929. let initSegmentRef = null;
  2930. if (segmentKeyTags.length) {
  2931. initSegmentRef = this.getInitSegmentReference_(playlist,
  2932. segment.tags, getUris, variables);
  2933. for (const segmentKeyTag of segmentKeyTags) {
  2934. drmTagsMap.set(segmentKeyTag, initSegmentRef);
  2935. }
  2936. }
  2937. }
  2938. }
  2939. let encrypted = false;
  2940. let aesEncrypted = false;
  2941. /** @type {!Array<shaka.extern.DrmInfo>}*/
  2942. const drmInfos = [];
  2943. const keyIds = new Set();
  2944. for (const [key, value] of drmTagsMap) {
  2945. const drmTag = /** @type {!shaka.hls.Tag} */ (key);
  2946. const initSegmentRef =
  2947. /** @type {?shaka.media.InitSegmentReference} */ (value);
  2948. const method = drmTag.getRequiredAttrValue('METHOD');
  2949. if (method != 'NONE') {
  2950. encrypted = true;
  2951. // According to the HLS spec, KEYFORMAT is optional and implicitly
  2952. // defaults to "identity".
  2953. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  2954. const keyFormat =
  2955. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  2956. let drmInfo = null;
  2957. if (this.isAesMethod_(method)) {
  2958. // These keys are handled separately.
  2959. aesEncrypted = true;
  2960. continue;
  2961. } else if (keyFormat == 'identity') {
  2962. // eslint-disable-next-line no-await-in-loop
  2963. drmInfo = await this.identityDrmParser_(
  2964. drmTag, mimeType, getUris, initSegmentRef, variables);
  2965. } else {
  2966. const drmParser =
  2967. this.keyFormatsToDrmParsers_.get(keyFormat);
  2968. drmInfo = drmParser ?
  2969. // eslint-disable-next-line no-await-in-loop
  2970. await drmParser(drmTag, mimeType, initSegmentRef) :
  2971. null;
  2972. }
  2973. if (drmInfo) {
  2974. if (drmInfo.keyIds) {
  2975. for (const keyId of drmInfo.keyIds) {
  2976. keyIds.add(keyId);
  2977. }
  2978. }
  2979. drmInfos.push(drmInfo);
  2980. } else {
  2981. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  2982. }
  2983. }
  2984. }
  2985. return {drmInfos, keyIds, encrypted, aesEncrypted};
  2986. }
  2987. /**
  2988. * @param {!shaka.hls.Tag} drmTag
  2989. * @param {!shaka.hls.Playlist} playlist
  2990. * @param {function(): !Array<string>} getUris
  2991. * @param {?Map<string, string>=} variables
  2992. * @return {!shaka.extern.aesKey}
  2993. * @private
  2994. */
  2995. parseAESDrmTag_(drmTag, playlist, getUris, variables) {
  2996. // Check if the Web Crypto API is available.
  2997. if (!window.crypto || !window.crypto.subtle) {
  2998. shaka.log.alwaysWarn('Web Crypto API is not available to decrypt ' +
  2999. 'AES. (Web Crypto only exists in secure origins like https)');
  3000. throw new shaka.util.Error(
  3001. shaka.util.Error.Severity.CRITICAL,
  3002. shaka.util.Error.Category.MANIFEST,
  3003. shaka.util.Error.Code.NO_WEB_CRYPTO_API);
  3004. }
  3005. // HLS RFC 8216 Section 5.2:
  3006. // An EXT-X-KEY tag with a KEYFORMAT of "identity" that does not have an IV
  3007. // attribute indicates that the Media Sequence Number is to be used as the
  3008. // IV when decrypting a Media Segment, by putting its big-endian binary
  3009. // representation into a 16-octet (128-bit) buffer and padding (on the left)
  3010. // with zeros.
  3011. let firstMediaSequenceNumber = 0;
  3012. let iv;
  3013. const ivHex = drmTag.getAttributeValue('IV', '');
  3014. if (!ivHex) {
  3015. // Media Sequence Number will be used as IV.
  3016. firstMediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3017. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  3018. } else {
  3019. // Exclude 0x at the start of string.
  3020. iv = shaka.util.Uint8ArrayUtils.fromHex(ivHex.substr(2));
  3021. if (iv.byteLength != 16) {
  3022. throw new shaka.util.Error(
  3023. shaka.util.Error.Severity.CRITICAL,
  3024. shaka.util.Error.Category.MANIFEST,
  3025. shaka.util.Error.Code.AES_128_INVALID_IV_LENGTH);
  3026. }
  3027. }
  3028. const keyUris = shaka.hls.Utils.constructSegmentUris(
  3029. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  3030. const keyMapKey = keyUris.sort().join('');
  3031. const aesKeyInfoKey =
  3032. `${drmTag.toString()}-${firstMediaSequenceNumber}-${keyMapKey}`;
  3033. if (!this.aesKeyInfoMap_.has(aesKeyInfoKey)) {
  3034. // Default AES-128
  3035. const keyInfo = {
  3036. bitsKey: 128,
  3037. blockCipherMode: 'CBC',
  3038. iv,
  3039. firstMediaSequenceNumber,
  3040. };
  3041. const method = drmTag.getRequiredAttrValue('METHOD');
  3042. switch (method) {
  3043. case 'AES-256':
  3044. keyInfo.bitsKey = 256;
  3045. break;
  3046. case 'AES-256-CTR':
  3047. keyInfo.bitsKey = 256;
  3048. keyInfo.blockCipherMode = 'CTR';
  3049. break;
  3050. }
  3051. // Don't download the key object until the segment is parsed, to avoid a
  3052. // startup delay for long manifests with lots of keys.
  3053. keyInfo.fetchKey = async () => {
  3054. if (!this.aesKeyMap_.has(keyMapKey)) {
  3055. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  3056. const request = shaka.net.NetworkingEngine.makeRequest(
  3057. keyUris, this.config_.retryParameters);
  3058. const keyResponse = this.makeNetworkRequest_(request, requestType)
  3059. .promise;
  3060. this.aesKeyMap_.set(keyMapKey, keyResponse);
  3061. }
  3062. const keyResponse = await this.aesKeyMap_.get(keyMapKey);
  3063. // keyResponse.status is undefined when URI is "data:text/plain;base64,"
  3064. if (!keyResponse.data ||
  3065. keyResponse.data.byteLength != (keyInfo.bitsKey / 8)) {
  3066. throw new shaka.util.Error(
  3067. shaka.util.Error.Severity.CRITICAL,
  3068. shaka.util.Error.Category.MANIFEST,
  3069. shaka.util.Error.Code.AES_128_INVALID_KEY_LENGTH);
  3070. }
  3071. const algorithm = {
  3072. name: keyInfo.blockCipherMode == 'CTR' ? 'AES-CTR' : 'AES-CBC',
  3073. length: keyInfo.bitsKey,
  3074. };
  3075. keyInfo.cryptoKey = await window.crypto.subtle.importKey(
  3076. 'raw', keyResponse.data, algorithm, true, ['decrypt']);
  3077. keyInfo.fetchKey = undefined; // No longer needed.
  3078. };
  3079. this.aesKeyInfoMap_.set(aesKeyInfoKey, keyInfo);
  3080. }
  3081. return this.aesKeyInfoMap_.get(aesKeyInfoKey);
  3082. }
  3083. /**
  3084. * @param {!shaka.hls.Playlist} playlist
  3085. * @private
  3086. */
  3087. determineStartTime_(playlist) {
  3088. // If we already have a starttime we avoid processing this again.
  3089. if (this.startTime_ != null) {
  3090. return;
  3091. }
  3092. const startTimeTag =
  3093. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-START');
  3094. if (startTimeTag) {
  3095. this.startTime_ =
  3096. Number(startTimeTag.getRequiredAttrValue('TIME-OFFSET'));
  3097. }
  3098. }
  3099. /**
  3100. * @param {!shaka.hls.Playlist} playlist
  3101. * @private
  3102. */
  3103. determinePresentationType_(playlist) {
  3104. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  3105. const presentationTypeTag =
  3106. shaka.hls.Utils.getFirstTagWithName(playlist.tags,
  3107. 'EXT-X-PLAYLIST-TYPE');
  3108. const endListTag =
  3109. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  3110. const isVod = (presentationTypeTag && presentationTypeTag.value == 'VOD') ||
  3111. endListTag;
  3112. const isEvent = presentationTypeTag &&
  3113. presentationTypeTag.value == 'EVENT' && !isVod;
  3114. const isLive = !isVod && !isEvent;
  3115. if (isVod) {
  3116. this.setPresentationType_(PresentationType.VOD);
  3117. } else {
  3118. // If it's not VOD, it must be presentation type LIVE or an ongoing EVENT.
  3119. if (isLive) {
  3120. this.setPresentationType_(PresentationType.LIVE);
  3121. } else {
  3122. this.setPresentationType_(PresentationType.EVENT);
  3123. }
  3124. }
  3125. }
  3126. /**
  3127. * @param {!shaka.hls.Playlist} playlist
  3128. * @private
  3129. */
  3130. determineLastTargetDuration_(playlist) {
  3131. let lastTargetDuration = Infinity;
  3132. const segments = playlist.segments;
  3133. if (segments.length) {
  3134. let segmentIndex = segments.length - 1;
  3135. while (segmentIndex >= 0) {
  3136. const segment = segments[segmentIndex];
  3137. const extinfTag =
  3138. shaka.hls.Utils.getFirstTagWithName(segment.tags, 'EXTINF');
  3139. if (extinfTag) {
  3140. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  3141. // We're interested in the duration part.
  3142. const extinfValues = extinfTag.value.split(',');
  3143. lastTargetDuration = Number(extinfValues[0]);
  3144. break;
  3145. }
  3146. segmentIndex--;
  3147. }
  3148. }
  3149. const targetDurationTag = this.getRequiredTag_(playlist.tags,
  3150. 'EXT-X-TARGETDURATION');
  3151. const targetDuration = Number(targetDurationTag.value);
  3152. const partialTargetDurationTag =
  3153. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-PART-INF');
  3154. if (partialTargetDurationTag) {
  3155. this.partialTargetDuration_ = Number(
  3156. partialTargetDurationTag.getRequiredAttrValue('PART-TARGET'));
  3157. }
  3158. // Get the server-recommended min distance from the live edge.
  3159. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  3160. playlist.tags, 'EXT-X-SERVER-CONTROL');
  3161. // According to the HLS spec, updates should not happen more often than
  3162. // once in targetDuration. It also requires us to only update the active
  3163. // variant. We might implement that later, but for now every variant
  3164. // will be updated. To get the update period, choose the smallest
  3165. // targetDuration value across all playlists.
  3166. // 1. Update the shortest one to use as update period and segment
  3167. // availability time (for LIVE).
  3168. if (this.lowLatencyMode_ && this.partialTargetDuration_) {
  3169. // For low latency streaming, use the partial segment target duration.
  3170. if (this.lowLatencyByterangeOptimization_) {
  3171. // We always have at least 1 partial segment part, and most servers
  3172. // allow you to make a request with _HLS_msn=X&_HLS_part=0 with a
  3173. // distance of 4 partial segments. With this we ensure that we
  3174. // obtain the minimum latency in this type of case.
  3175. if (this.partialTargetDuration_ * 5 <= lastTargetDuration) {
  3176. this.lastTargetDuration_ = Math.min(
  3177. this.partialTargetDuration_, this.lastTargetDuration_);
  3178. } else {
  3179. this.lastTargetDuration_ = Math.min(
  3180. lastTargetDuration, this.lastTargetDuration_);
  3181. }
  3182. } else {
  3183. this.lastTargetDuration_ = Math.min(
  3184. this.partialTargetDuration_, this.lastTargetDuration_);
  3185. }
  3186. // Use 'PART-HOLD-BACK' as the presentation delay for low latency mode.
  3187. this.lowLatencyPresentationDelay_ = serverControlTag ? Number(
  3188. serverControlTag.getRequiredAttrValue('PART-HOLD-BACK')) : 0;
  3189. } else {
  3190. this.lastTargetDuration_ = Math.min(
  3191. lastTargetDuration, this.lastTargetDuration_);
  3192. // Use 'HOLD-BACK' as the presentation delay for default if defined.
  3193. const holdBack = serverControlTag ?
  3194. serverControlTag.getAttribute('HOLD-BACK') : null;
  3195. this.presentationDelay_ = holdBack ? Number(holdBack.value) : 0;
  3196. }
  3197. // 2. Update the longest target duration if need be to use as a
  3198. // presentation delay later.
  3199. this.maxTargetDuration_ = Math.max(
  3200. targetDuration, this.maxTargetDuration_);
  3201. }
  3202. /**
  3203. * @param {!shaka.hls.Playlist} playlist
  3204. * @private
  3205. */
  3206. changePresentationTimelineToLive_(playlist) {
  3207. // The live edge will be calculated from segments, so we don't need to
  3208. // set a presentation start time. We will assert later that this is
  3209. // working as expected.
  3210. // The HLS spec (RFC 8216) states in 6.3.3:
  3211. //
  3212. // "The client SHALL choose which Media Segment to play first ... the
  3213. // client SHOULD NOT choose a segment that starts less than three target
  3214. // durations from the end of the Playlist file. Doing so can trigger
  3215. // playback stalls."
  3216. //
  3217. // We accomplish this in our DASH-y model by setting a presentation
  3218. // delay of configured value, or 3 segments duration if not configured.
  3219. // This will be the "live edge" of the presentation.
  3220. let presentationDelay = 0;
  3221. if (this.config_.defaultPresentationDelay) {
  3222. presentationDelay = this.config_.defaultPresentationDelay;
  3223. } else if (this.lowLatencyPresentationDelay_) {
  3224. presentationDelay = this.lowLatencyPresentationDelay_;
  3225. } else if (this.presentationDelay_) {
  3226. presentationDelay = this.presentationDelay_;
  3227. } else {
  3228. const totalSegments = playlist.segments.length;
  3229. let delaySegments = this.config_.hls.liveSegmentsDelay;
  3230. if (delaySegments > (totalSegments - 2)) {
  3231. delaySegments = Math.max(1, totalSegments - 2);
  3232. }
  3233. for (let i = totalSegments - delaySegments; i < totalSegments; i++) {
  3234. const extinfTag = shaka.hls.Utils.getFirstTagWithName(
  3235. playlist.segments[i].tags, 'EXTINF');
  3236. if (extinfTag) {
  3237. const extinfValues = extinfTag.value.split(',');
  3238. const duration = Number(extinfValues[0]);
  3239. presentationDelay += Math.ceil(duration);
  3240. } else {
  3241. presentationDelay += this.maxTargetDuration_;
  3242. }
  3243. }
  3244. }
  3245. if (this.startTime_ && this.startTime_ < 0) {
  3246. presentationDelay = Math.min(-this.startTime_, presentationDelay);
  3247. this.startTime_ += presentationDelay;
  3248. }
  3249. this.presentationTimeline_.setPresentationStartTime(0);
  3250. this.presentationTimeline_.setDelay(presentationDelay);
  3251. this.presentationTimeline_.setStatic(false);
  3252. }
  3253. /**
  3254. * Get the InitSegmentReference for a segment if it has a EXT-X-MAP tag.
  3255. * @param {!shaka.hls.Playlist} playlist
  3256. * @param {!Array<!shaka.hls.Tag>} tags Segment tags
  3257. * @param {function(): !Array<string>} getUris
  3258. * @param {?Map<string, string>=} variables
  3259. * @return {shaka.media.InitSegmentReference}
  3260. * @private
  3261. */
  3262. getInitSegmentReference_(playlist, tags, getUris, variables) {
  3263. /** @type {?shaka.hls.Tag} */
  3264. const mapTag = shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-MAP');
  3265. if (!mapTag) {
  3266. return null;
  3267. }
  3268. // Map tag example: #EXT-X-MAP:URI="main.mp4",BYTERANGE="720@0"
  3269. const verbatimInitSegmentUri = mapTag.getRequiredAttrValue('URI');
  3270. const absoluteInitSegmentUris = shaka.hls.Utils.constructSegmentUris(
  3271. getUris(), verbatimInitSegmentUri, variables);
  3272. const mapTagKey = [
  3273. absoluteInitSegmentUris.toString(),
  3274. mapTag.getAttributeValue('BYTERANGE', ''),
  3275. ].join('-');
  3276. if (!this.mapTagToInitSegmentRefMap_.has(mapTagKey)) {
  3277. /** @type {shaka.extern.aesKey|undefined} */
  3278. let aesKey = undefined;
  3279. let byteRangeTag = null;
  3280. let encrypted = false;
  3281. for (const tag of tags) {
  3282. if (tag.name == 'EXT-X-KEY') {
  3283. const method = tag.getRequiredAttrValue('METHOD');
  3284. if (this.isAesMethod_(method) && tag.id < mapTag.id) {
  3285. encrypted = false;
  3286. aesKey =
  3287. this.parseAESDrmTag_(tag, playlist, getUris, variables);
  3288. } else {
  3289. encrypted = method != 'NONE';
  3290. }
  3291. } else if (tag.name == 'EXT-X-BYTERANGE' && tag.id < mapTag.id) {
  3292. byteRangeTag = tag;
  3293. }
  3294. }
  3295. const initSegmentRef = this.createInitSegmentReference_(
  3296. absoluteInitSegmentUris, mapTag, byteRangeTag, aesKey, encrypted);
  3297. this.mapTagToInitSegmentRefMap_.set(mapTagKey, initSegmentRef);
  3298. }
  3299. return this.mapTagToInitSegmentRefMap_.get(mapTagKey);
  3300. }
  3301. /**
  3302. * Create an InitSegmentReference object for the EXT-X-MAP tag in the media
  3303. * playlist.
  3304. * @param {!Array<string>} absoluteInitSegmentUris
  3305. * @param {!shaka.hls.Tag} mapTag EXT-X-MAP
  3306. * @param {shaka.hls.Tag=} byteRangeTag EXT-X-BYTERANGE
  3307. * @param {shaka.extern.aesKey=} aesKey
  3308. * @param {boolean=} encrypted
  3309. * @return {!shaka.media.InitSegmentReference}
  3310. * @private
  3311. */
  3312. createInitSegmentReference_(absoluteInitSegmentUris, mapTag, byteRangeTag,
  3313. aesKey, encrypted) {
  3314. let startByte = 0;
  3315. let endByte = null;
  3316. let byterange = mapTag.getAttributeValue('BYTERANGE');
  3317. if (!byterange && byteRangeTag) {
  3318. byterange = byteRangeTag.value;
  3319. }
  3320. // If a BYTERANGE attribute is not specified, the segment consists
  3321. // of the entire resource.
  3322. if (byterange) {
  3323. const blocks = byterange.split('@');
  3324. const byteLength = Number(blocks[0]);
  3325. startByte = Number(blocks[1]);
  3326. endByte = startByte + byteLength - 1;
  3327. if (aesKey) {
  3328. // MAP segment encrypted with method AES, when served with
  3329. // HTTP Range, has the unencrypted size specified in the range.
  3330. // See: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
  3331. const length = (endByte + 1) - startByte;
  3332. if (length % 16) {
  3333. endByte += (16 - (length % 16));
  3334. }
  3335. }
  3336. }
  3337. const initSegmentRef = new shaka.media.InitSegmentReference(
  3338. () => absoluteInitSegmentUris,
  3339. startByte,
  3340. endByte,
  3341. /* mediaQuality= */ null,
  3342. /* timescale= */ null,
  3343. /* segmentData= */ null,
  3344. aesKey,
  3345. encrypted);
  3346. return initSegmentRef;
  3347. }
  3348. /**
  3349. * Parses one shaka.hls.Segment object into a shaka.media.SegmentReference.
  3350. *
  3351. * @param {shaka.media.InitSegmentReference} initSegmentReference
  3352. * @param {shaka.media.SegmentReference} previousReference
  3353. * @param {!shaka.hls.Segment} hlsSegment
  3354. * @param {number} startTime
  3355. * @param {!Map<string, string>} variables
  3356. * @param {!shaka.hls.Playlist} playlist
  3357. * @param {string} type
  3358. * @param {function(): !Array<string>} getUris
  3359. * @param {shaka.extern.aesKey=} aesKey
  3360. * @return {shaka.media.SegmentReference}
  3361. * @private
  3362. */
  3363. createSegmentReference_(
  3364. initSegmentReference, previousReference, hlsSegment, startTime,
  3365. variables, playlist, type, getUris, aesKey) {
  3366. const HlsParser = shaka.hls.HlsParser;
  3367. const getMimeType = (uri) => {
  3368. const parsedUri = new goog.Uri(uri);
  3369. const extension = parsedUri.getPath().split('.').pop();
  3370. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_.get(type);
  3371. let mimeType = map.get(extension);
  3372. if (!mimeType) {
  3373. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_.get(extension);
  3374. }
  3375. return mimeType;
  3376. };
  3377. const tags = hlsSegment.tags;
  3378. const extinfTag =
  3379. shaka.hls.Utils.getFirstTagWithName(tags, 'EXTINF');
  3380. let endTime = 0;
  3381. let startByte = 0;
  3382. let endByte = null;
  3383. if (hlsSegment.partialSegments.length) {
  3384. this.manifest_.isLowLatency = true;
  3385. }
  3386. let syncTime = null;
  3387. if (!this.config_.hls.ignoreManifestProgramDateTime) {
  3388. const dateTimeTag =
  3389. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-PROGRAM-DATE-TIME');
  3390. if (dateTimeTag && dateTimeTag.value) {
  3391. syncTime = shaka.util.TXml.parseDate(dateTimeTag.value);
  3392. goog.asserts.assert(syncTime != null,
  3393. 'EXT-X-PROGRAM-DATE-TIME format not valid');
  3394. this.usesProgramDateTime_ = true;
  3395. }
  3396. }
  3397. let status = shaka.media.SegmentReference.Status.AVAILABLE;
  3398. if (shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-GAP')) {
  3399. this.manifest_.gapCount++;
  3400. status = shaka.media.SegmentReference.Status.MISSING;
  3401. }
  3402. if (!extinfTag) {
  3403. if (hlsSegment.partialSegments.length == 0) {
  3404. // EXTINF tag must be available if the segment has no partial segments.
  3405. throw new shaka.util.Error(
  3406. shaka.util.Error.Severity.CRITICAL,
  3407. shaka.util.Error.Category.MANIFEST,
  3408. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, 'EXTINF');
  3409. } else if (!this.lowLatencyMode_) {
  3410. // Without EXTINF and without low-latency mode, partial segments get
  3411. // ignored.
  3412. return null;
  3413. }
  3414. }
  3415. // Create SegmentReferences for the partial segments.
  3416. let partialSegmentRefs = [];
  3417. // Optimization for LL-HLS with byterange
  3418. // More info in https://tinyurl.com/hls-open-byte-range
  3419. let segmentWithByteRangeOptimization = false;
  3420. let getUrisOptimization = null;
  3421. let somePartialSegmentWithGap = false;
  3422. let isPreloadSegment = false;
  3423. if (this.lowLatencyMode_ && hlsSegment.partialSegments.length) {
  3424. const byterangeOptimizationSupport =
  3425. initSegmentReference && window.ReadableStream &&
  3426. this.config_.hls.allowLowLatencyByteRangeOptimization;
  3427. let partialSyncTime = syncTime;
  3428. for (let i = 0; i < hlsSegment.partialSegments.length; i++) {
  3429. const item = hlsSegment.partialSegments[i];
  3430. const pPreviousReference = i == 0 ?
  3431. previousReference : partialSegmentRefs[partialSegmentRefs.length - 1];
  3432. const pStartTime = (i == 0) ? startTime : pPreviousReference.endTime;
  3433. // If DURATION is missing from this partial segment, use the target
  3434. // partial duration from the top of the playlist, which is a required
  3435. // attribute for content with partial segments.
  3436. const pDuration = Number(item.getAttributeValue('DURATION')) ||
  3437. this.partialTargetDuration_;
  3438. // If for some reason we have neither an explicit duration, nor a target
  3439. // partial duration, we should SKIP this partial segment to avoid
  3440. // duplicating content in the presentation timeline.
  3441. if (!pDuration) {
  3442. continue;
  3443. }
  3444. const pEndTime = pStartTime + pDuration;
  3445. let pStartByte = 0;
  3446. let pEndByte = null;
  3447. if (item.name == 'EXT-X-PRELOAD-HINT') {
  3448. // A preload hinted partial segment may have byterange start info.
  3449. const pByterangeStart = item.getAttributeValue('BYTERANGE-START');
  3450. pStartByte = pByterangeStart ? Number(pByterangeStart) : 0;
  3451. // A preload hinted partial segment may have byterange length info.
  3452. const pByterangeLength = item.getAttributeValue('BYTERANGE-LENGTH');
  3453. if (pByterangeLength) {
  3454. pEndByte = pStartByte + Number(pByterangeLength) - 1;
  3455. } else if (pStartByte) {
  3456. // If we have a non-zero start byte, but no end byte, follow the
  3457. // recommendation of https://tinyurl.com/hls-open-byte-range and
  3458. // set the end byte explicitly to a large integer.
  3459. pEndByte = Number.MAX_SAFE_INTEGER;
  3460. }
  3461. } else {
  3462. const pByterange = item.getAttributeValue('BYTERANGE');
  3463. [pStartByte, pEndByte] =
  3464. this.parseByteRange_(pPreviousReference, pByterange);
  3465. }
  3466. const pUri = item.getAttributeValue('URI');
  3467. if (!pUri) {
  3468. continue;
  3469. }
  3470. let partialStatus = shaka.media.SegmentReference.Status.AVAILABLE;
  3471. if (item.getAttributeValue('GAP') == 'YES') {
  3472. this.manifest_.gapCount++;
  3473. partialStatus = shaka.media.SegmentReference.Status.MISSING;
  3474. somePartialSegmentWithGap = true;
  3475. }
  3476. let uris = null;
  3477. const getPartialUris = () => {
  3478. if (uris == null) {
  3479. goog.asserts.assert(pUri, 'Partial uri should be defined!');
  3480. uris = shaka.hls.Utils.constructSegmentUris(
  3481. getUris(), pUri, variables);
  3482. }
  3483. return uris;
  3484. };
  3485. if (byterangeOptimizationSupport &&
  3486. pStartByte >= 0 && pEndByte != null) {
  3487. getUrisOptimization = getPartialUris;
  3488. segmentWithByteRangeOptimization = true;
  3489. }
  3490. const partial = new shaka.media.SegmentReference(
  3491. pStartTime,
  3492. pEndTime,
  3493. getPartialUris,
  3494. pStartByte,
  3495. pEndByte,
  3496. initSegmentReference,
  3497. /* timestampOffset= */ 0,
  3498. /* appendWindowStart= */ 0,
  3499. /* appendWindowEnd= */ Infinity,
  3500. /* partialReferences= */ [],
  3501. /* tilesLayout= */ '',
  3502. /* tileDuration= */ null,
  3503. partialSyncTime,
  3504. partialStatus,
  3505. aesKey);
  3506. if (item.name == 'EXT-X-PRELOAD-HINT') {
  3507. partial.markAsPreload();
  3508. isPreloadSegment = true;
  3509. }
  3510. // The spec doesn't say that we can assume INDEPENDENT=YES for the
  3511. // first partial segment. It does call the flag "optional", though, and
  3512. // that cases where there are no such flags on any partial segments, it
  3513. // is sensible to assume the first one is independent.
  3514. if (item.getAttributeValue('INDEPENDENT') != 'YES' && i > 0) {
  3515. partial.markAsNonIndependent();
  3516. }
  3517. const pMimeType = getMimeType(pUri);
  3518. if (pMimeType) {
  3519. partial.mimeType = pMimeType;
  3520. if (HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_.has(pMimeType)) {
  3521. partial.initSegmentReference = null;
  3522. }
  3523. }
  3524. partialSegmentRefs.push(partial);
  3525. if (partialSyncTime) {
  3526. partialSyncTime += pDuration;
  3527. }
  3528. } // for-loop of hlsSegment.partialSegments
  3529. }
  3530. // If the segment has EXTINF tag, set the segment's end time, start byte
  3531. // and end byte based on the duration and byterange information.
  3532. // Otherwise, calculate the end time, start / end byte based on its partial
  3533. // segments.
  3534. // Note that the sum of partial segments durations may be slightly different
  3535. // from the parent segment's duration. In this case, use the duration from
  3536. // the parent segment tag.
  3537. if (extinfTag) {
  3538. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  3539. // We're interested in the duration part.
  3540. const extinfValues = extinfTag.value.split(',');
  3541. const duration = Number(extinfValues[0]);
  3542. // Skip segments without duration
  3543. if (duration == 0) {
  3544. return null;
  3545. }
  3546. endTime = startTime + duration;
  3547. } else if (partialSegmentRefs.length) {
  3548. endTime = partialSegmentRefs[partialSegmentRefs.length - 1].endTime;
  3549. } else {
  3550. // Skip segments without duration and without partial segments
  3551. return null;
  3552. }
  3553. if (segmentWithByteRangeOptimization) {
  3554. // We cannot optimize segments with gaps, or with a start byte that is
  3555. // not 0.
  3556. if (somePartialSegmentWithGap || partialSegmentRefs[0].startByte != 0) {
  3557. segmentWithByteRangeOptimization = false;
  3558. getUrisOptimization = null;
  3559. } else {
  3560. partialSegmentRefs = [];
  3561. }
  3562. }
  3563. // If the segment has EXT-X-BYTERANGE tag, set the start byte and end byte
  3564. // base on the byterange information. If segment has no EXT-X-BYTERANGE tag
  3565. // and has partial segments, set the start byte and end byte base on the
  3566. // partial segments.
  3567. const byterangeTag =
  3568. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-BYTERANGE');
  3569. if (byterangeTag) {
  3570. [startByte, endByte] =
  3571. this.parseByteRange_(previousReference, byterangeTag.value);
  3572. } else if (partialSegmentRefs.length) {
  3573. startByte = partialSegmentRefs[0].startByte;
  3574. endByte = partialSegmentRefs[partialSegmentRefs.length - 1].endByte;
  3575. }
  3576. let tilesLayout = '';
  3577. let tileDuration = null;
  3578. if (type == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  3579. // By default in HLS the tilesLayout is 1x1
  3580. tilesLayout = '1x1';
  3581. const tilesTag =
  3582. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-TILES');
  3583. if (tilesTag) {
  3584. tilesLayout = tilesTag.getRequiredAttrValue('LAYOUT');
  3585. const duration = tilesTag.getAttributeValue('DURATION');
  3586. if (duration) {
  3587. tileDuration = Number(duration);
  3588. }
  3589. }
  3590. }
  3591. let uris = null;
  3592. const getSegmentUris = () => {
  3593. if (getUrisOptimization) {
  3594. return getUrisOptimization();
  3595. }
  3596. if (uris == null) {
  3597. uris = shaka.hls.Utils.constructSegmentUris(getUris(),
  3598. hlsSegment.verbatimSegmentUri, variables);
  3599. }
  3600. return uris || [];
  3601. };
  3602. const allPartialSegments = partialSegmentRefs.length > 0 &&
  3603. !!hlsSegment.verbatimSegmentUri;
  3604. const reference = new shaka.media.SegmentReference(
  3605. startTime,
  3606. endTime,
  3607. getSegmentUris,
  3608. startByte,
  3609. endByte,
  3610. initSegmentReference,
  3611. /* timestampOffset= */ 0,
  3612. /* appendWindowStart= */ 0,
  3613. /* appendWindowEnd= */ Infinity,
  3614. partialSegmentRefs,
  3615. tilesLayout,
  3616. tileDuration,
  3617. syncTime,
  3618. status,
  3619. aesKey,
  3620. allPartialSegments,
  3621. );
  3622. const mimeType = getMimeType(hlsSegment.verbatimSegmentUri);
  3623. if (mimeType) {
  3624. reference.mimeType = mimeType;
  3625. if (HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_.has(mimeType)) {
  3626. reference.initSegmentReference = null;
  3627. }
  3628. }
  3629. if (segmentWithByteRangeOptimization) {
  3630. this.lowLatencyByterangeOptimization_ = true;
  3631. reference.markAsByterangeOptimization();
  3632. if (isPreloadSegment) {
  3633. reference.markAsPreload();
  3634. }
  3635. }
  3636. return reference;
  3637. }
  3638. /**
  3639. * Parse the startByte and endByte.
  3640. * @param {shaka.media.SegmentReference} previousReference
  3641. * @param {?string} byterange
  3642. * @return {!Array<number>} An array with the start byte and end byte.
  3643. * @private
  3644. */
  3645. parseByteRange_(previousReference, byterange) {
  3646. let startByte = 0;
  3647. let endByte = null;
  3648. // If BYTERANGE is not specified, the segment consists of the entire
  3649. // resource.
  3650. if (byterange) {
  3651. const blocks = byterange.split('@');
  3652. const byteLength = Number(blocks[0]);
  3653. if (blocks[1]) {
  3654. startByte = Number(blocks[1]);
  3655. } else {
  3656. goog.asserts.assert(previousReference,
  3657. 'Cannot refer back to previous HLS segment!');
  3658. startByte = previousReference.endByte + 1;
  3659. }
  3660. endByte = startByte + byteLength - 1;
  3661. }
  3662. return [startByte, endByte];
  3663. }
  3664. /**
  3665. * @param {!Array<!shaka.hls.Tag>} tags
  3666. * @param {string} contentType
  3667. * @param {!Map<string, string>} variables
  3668. * @param {function(): !Array<string>} getUris
  3669. * @private
  3670. */
  3671. processDateRangeTags_(tags, contentType, variables, getUris) {
  3672. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  3673. if (contentType != ContentType.VIDEO && contentType != ContentType.AUDIO) {
  3674. // DATE-RANGE should only appear in AUDIO or VIDEO playlists.
  3675. // We ignore those that appear in other playlists.
  3676. return;
  3677. }
  3678. const Utils = shaka.hls.Utils;
  3679. const initialProgramDateTime =
  3680. this.presentationTimeline_.getInitialProgramDateTime();
  3681. if (!initialProgramDateTime ||
  3682. this.ignoreManifestProgramDateTimeFor_(contentType)) {
  3683. return;
  3684. }
  3685. let dateRangeTags =
  3686. shaka.hls.Utils.filterTagsByName(tags, 'EXT-X-DATERANGE');
  3687. dateRangeTags = dateRangeTags.filter((tag) => {
  3688. return tag.getAttribute('START-DATE') != null;
  3689. }).sort((a, b) => {
  3690. const aStartDateValue = a.getRequiredAttrValue('START-DATE');
  3691. const bStartDateValue = b.getRequiredAttrValue('START-DATE');
  3692. if (aStartDateValue < bStartDateValue) {
  3693. return -1;
  3694. }
  3695. if (aStartDateValue > bStartDateValue) {
  3696. return 1;
  3697. }
  3698. return 0;
  3699. });
  3700. for (let i = 0; i < dateRangeTags.length; i++) {
  3701. const tag = dateRangeTags[i];
  3702. try {
  3703. const id = tag.getRequiredAttrValue('ID');
  3704. if (this.dateRangeIdsEmitted_.has(id)) {
  3705. continue;
  3706. }
  3707. const startDateValue = tag.getRequiredAttrValue('START-DATE');
  3708. const startDate = shaka.util.TXml.parseDate(startDateValue);
  3709. if (isNaN(startDate)) {
  3710. // Invalid START-DATE
  3711. continue;
  3712. }
  3713. goog.asserts.assert(startDate != null,
  3714. 'Start date should not be null!');
  3715. const startTime = Math.max(0, startDate - initialProgramDateTime);
  3716. let endTime = null;
  3717. const endDateValue = tag.getAttributeValue('END-DATE');
  3718. if (endDateValue) {
  3719. const endDate = shaka.util.TXml.parseDate(endDateValue);
  3720. if (!isNaN(endDate)) {
  3721. goog.asserts.assert(endDate != null,
  3722. 'End date should not be null!');
  3723. endTime = endDate - initialProgramDateTime;
  3724. if (endTime < 0) {
  3725. // Date range in the past
  3726. continue;
  3727. }
  3728. }
  3729. }
  3730. if (endTime == null) {
  3731. const durationValue = tag.getAttributeValue('DURATION') ||
  3732. tag.getAttributeValue('PLANNED-DURATION');
  3733. if (durationValue) {
  3734. const duration = parseFloat(durationValue);
  3735. if (!isNaN(duration)) {
  3736. endTime = startTime + duration;
  3737. }
  3738. const realEndTime = startDate - initialProgramDateTime + duration;
  3739. if (realEndTime < 0) {
  3740. // Date range in the past
  3741. continue;
  3742. }
  3743. }
  3744. }
  3745. const type =
  3746. tag.getAttributeValue('CLASS') || 'com.apple.quicktime.HLS';
  3747. const endOnNext = tag.getAttributeValue('END-ON-NEXT') == 'YES';
  3748. if (endTime == null && endOnNext) {
  3749. for (let j = i + 1; j < dateRangeTags.length; j++) {
  3750. const otherDateRangeType =
  3751. dateRangeTags[j].getAttributeValue('CLASS') ||
  3752. 'com.apple.quicktime.HLS';
  3753. if (type != otherDateRangeType) {
  3754. continue;
  3755. }
  3756. const otherDateRangeStartDateValue =
  3757. dateRangeTags[j].getRequiredAttrValue('START-DATE');
  3758. const otherDateRangeStartDate =
  3759. shaka.util.TXml.parseDate(otherDateRangeStartDateValue);
  3760. if (isNaN(otherDateRangeStartDate)) {
  3761. // Invalid START-DATE
  3762. continue;
  3763. }
  3764. if (otherDateRangeStartDate &&
  3765. otherDateRangeStartDate > startDate) {
  3766. endTime = Math.max(0,
  3767. otherDateRangeStartDate - initialProgramDateTime);
  3768. break;
  3769. }
  3770. }
  3771. if (endTime == null) {
  3772. // Since we cannot know when it ends, we omit it for now and in the
  3773. // future with an update we will be able to have more information.
  3774. continue;
  3775. }
  3776. }
  3777. // Exclude these attributes from the metadata since they already go into
  3778. // other fields (eg: startTime or endTime) or are not necessary..
  3779. const excludedAttributes = [
  3780. 'CLASS',
  3781. 'START-DATE',
  3782. 'END-DATE',
  3783. 'DURATION',
  3784. 'END-ON-NEXT',
  3785. ];
  3786. /* @type {!Array<shaka.extern.MetadataFrame>} */
  3787. const values = [];
  3788. for (const attribute of tag.attributes) {
  3789. if (excludedAttributes.includes(attribute.name)) {
  3790. continue;
  3791. }
  3792. let data = Utils.variableSubstitution(attribute.value, variables);
  3793. if (attribute.name == 'X-ASSET-URI' ||
  3794. attribute.name == 'X-ASSET-LIST') {
  3795. data = Utils.constructSegmentUris(
  3796. getUris(), attribute.value, variables)[0];
  3797. }
  3798. const metadataFrame = {
  3799. key: attribute.name,
  3800. description: '',
  3801. data,
  3802. mimeType: null,
  3803. pictureType: null,
  3804. };
  3805. values.push(metadataFrame);
  3806. }
  3807. // ID is always required. So we need more than 1 value.
  3808. if (values.length > 1) {
  3809. this.playerInterface_.onMetadata(type, startTime, endTime, values);
  3810. }
  3811. this.dateRangeIdsEmitted_.add(id);
  3812. } catch (e) {
  3813. shaka.log.warning('Ignoring DATERANGE with errors', tag.toString());
  3814. }
  3815. }
  3816. }
  3817. /**
  3818. * Parses shaka.hls.Segment objects into shaka.media.SegmentReferences and
  3819. * get the bandwidth necessary for this segments If it's defined in the
  3820. * playlist.
  3821. *
  3822. * @param {!shaka.hls.Playlist} playlist
  3823. * @param {!Map<number, number>} mediaSequenceToStartTime
  3824. * @param {!Map<string, string>} variables
  3825. * @param {function(): !Array<string>} getUris
  3826. * @param {string} type
  3827. * @return {{segments: !Array<!shaka.media.SegmentReference>,
  3828. * bandwidth: (number|undefined)}}
  3829. * @private
  3830. */
  3831. createSegments_(playlist, mediaSequenceToStartTime, variables,
  3832. getUris, type) {
  3833. /** @type {Array<!shaka.hls.Segment>} */
  3834. const hlsSegments = playlist.segments;
  3835. goog.asserts.assert(hlsSegments.length, 'Playlist should have segments!');
  3836. /** @type {shaka.media.InitSegmentReference} */
  3837. let initSegmentRef;
  3838. /** @type {shaka.extern.aesKey|undefined} */
  3839. let aesKey = undefined;
  3840. let discontinuitySequence = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3841. playlist.tags, 'EXT-X-DISCONTINUITY-SEQUENCE', -1);
  3842. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3843. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  3844. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  3845. playlist.tags, 'EXT-X-SKIP');
  3846. const skippedSegments =
  3847. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  3848. let position = mediaSequenceNumber + skippedSegments;
  3849. let firstStartTime = 0;
  3850. // For live stream, use the cached value in the mediaSequenceToStartTime
  3851. // map if available.
  3852. if (this.isLive_() && mediaSequenceToStartTime.has(position)) {
  3853. firstStartTime = mediaSequenceToStartTime.get(position);
  3854. }
  3855. // This is for recovering from disconnects.
  3856. if (firstStartTime === 0 &&
  3857. this.presentationType_ == shaka.hls.HlsParser.PresentationType_.LIVE &&
  3858. mediaSequenceToStartTime.size > 0 &&
  3859. !mediaSequenceToStartTime.has(position) &&
  3860. this.presentationTimeline_.getPresentationStartTime() != null) {
  3861. firstStartTime = this.presentationTimeline_.getSegmentAvailabilityStart();
  3862. }
  3863. /** @type {!Array<!shaka.media.SegmentReference>} */
  3864. const references = [];
  3865. let previousReference = null;
  3866. /** @type {!Array<{bitrate: number, duration: number}>} */
  3867. const bitrates = [];
  3868. for (let i = 0; i < hlsSegments.length; i++) {
  3869. const item = hlsSegments[i];
  3870. const startTime =
  3871. (i == 0) ? firstStartTime : previousReference.endTime;
  3872. position = mediaSequenceNumber + skippedSegments + i;
  3873. const discontinuityTag = shaka.hls.Utils.getFirstTagWithName(
  3874. item.tags, 'EXT-X-DISCONTINUITY');
  3875. if (discontinuityTag) {
  3876. discontinuitySequence++;
  3877. if (previousReference && previousReference.initSegmentReference) {
  3878. previousReference.initSegmentReference.boundaryEnd = startTime;
  3879. }
  3880. }
  3881. // Apply new AES tags as you see them, keeping a running total.
  3882. for (const drmTag of item.tags) {
  3883. if (drmTag.name == 'EXT-X-KEY') {
  3884. if (this.isAesMethod_(drmTag.getRequiredAttrValue('METHOD'))) {
  3885. aesKey =
  3886. this.parseAESDrmTag_(drmTag, playlist, getUris, variables);
  3887. } else {
  3888. aesKey = undefined;
  3889. }
  3890. }
  3891. }
  3892. mediaSequenceToStartTime.set(position, startTime);
  3893. initSegmentRef = this.getInitSegmentReference_(playlist,
  3894. item.tags, getUris, variables);
  3895. const reference = this.createSegmentReference_(
  3896. initSegmentRef,
  3897. previousReference,
  3898. item,
  3899. startTime,
  3900. variables,
  3901. playlist,
  3902. type,
  3903. getUris,
  3904. aesKey);
  3905. if (reference) {
  3906. const bitrate = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3907. item.tags, 'EXT-X-BITRATE');
  3908. if (bitrate) {
  3909. bitrates.push({
  3910. bitrate,
  3911. duration: reference.endTime - reference.startTime,
  3912. });
  3913. } else if (bitrates.length) {
  3914. // It applies to every segment between it and the next EXT-X-BITRATE,
  3915. // so we use the latest bitrate value
  3916. const prevBitrate = bitrates.pop();
  3917. prevBitrate.duration += reference.endTime - reference.startTime;
  3918. bitrates.push(prevBitrate);
  3919. }
  3920. previousReference = reference;
  3921. reference.discontinuitySequence = discontinuitySequence;
  3922. if (this.ignoreManifestProgramDateTimeFor_(type) &&
  3923. this.minSequenceNumber_ != null &&
  3924. position < this.minSequenceNumber_) {
  3925. // This segment is ignored as part of our fallback synchronization
  3926. // method.
  3927. } else {
  3928. references.push(reference);
  3929. }
  3930. }
  3931. }
  3932. let bandwidth = undefined;
  3933. if (bitrates.length) {
  3934. const duration = bitrates.reduce((sum, value) => {
  3935. return sum + value.duration;
  3936. }, 0);
  3937. bandwidth = Math.round(bitrates.reduce((sum, value) => {
  3938. return sum + value.bitrate * value.duration;
  3939. }, 0) / duration * 1000);
  3940. }
  3941. // If some segments have sync times, but not all, extrapolate the sync
  3942. // times of the ones with none.
  3943. const someSyncTime = references.some((ref) => ref.syncTime != null);
  3944. if (someSyncTime) {
  3945. for (let i = 0; i < references.length; i++) {
  3946. const reference = references[i];
  3947. if (reference.syncTime != null) {
  3948. // No need to extrapolate.
  3949. continue;
  3950. }
  3951. // Find the nearest segment with syncTime, in either direction.
  3952. // This looks forward and backward simultaneously, keeping track of what
  3953. // to offset the syncTime it finds by as it goes.
  3954. let forwardAdd = 0;
  3955. let forwardI = i;
  3956. /**
  3957. * Look forwards one reference at a time, summing all durations as we
  3958. * go, until we find a reference with a syncTime to use as a basis.
  3959. * This DOES count the original reference, but DOESN'T count the first
  3960. * reference with a syncTime (as we approach it from behind).
  3961. * @return {?number}
  3962. */
  3963. const lookForward = () => {
  3964. const other = references[forwardI];
  3965. if (other) {
  3966. if (other.syncTime != null) {
  3967. return other.syncTime + forwardAdd;
  3968. }
  3969. forwardAdd -= other.endTime - other.startTime;
  3970. forwardI += 1;
  3971. }
  3972. return null;
  3973. };
  3974. let backwardAdd = 0;
  3975. let backwardI = i;
  3976. /**
  3977. * Look backwards one reference at a time, summing all durations as we
  3978. * go, until we find a reference with a syncTime to use as a basis.
  3979. * This DOESN'T count the original reference, but DOES count the first
  3980. * reference with a syncTime (as we approach it from ahead).
  3981. * @return {?number}
  3982. */
  3983. const lookBackward = () => {
  3984. const other = references[backwardI];
  3985. if (other) {
  3986. if (other != reference) {
  3987. backwardAdd += other.endTime - other.startTime;
  3988. }
  3989. if (other.syncTime != null) {
  3990. return other.syncTime + backwardAdd;
  3991. }
  3992. backwardI -= 1;
  3993. }
  3994. return null;
  3995. };
  3996. while (reference.syncTime == null) {
  3997. reference.syncTime = lookBackward();
  3998. if (reference.syncTime == null) {
  3999. reference.syncTime = lookForward();
  4000. }
  4001. }
  4002. }
  4003. }
  4004. // Split the sync times properly among partial segments.
  4005. if (someSyncTime) {
  4006. for (const reference of references) {
  4007. let syncTime = reference.syncTime;
  4008. for (const partial of reference.partialReferences) {
  4009. partial.syncTime = syncTime;
  4010. syncTime += partial.endTime - partial.startTime;
  4011. }
  4012. }
  4013. }
  4014. // lowestSyncTime is a value from a previous playlist update. Use it to
  4015. // set reference start times. If this is the first playlist parse, we will
  4016. // skip this step, and wait until we have sync time across stream types.
  4017. const lowestSyncTime = this.lowestSyncTime_;
  4018. if (someSyncTime && lowestSyncTime != Infinity) {
  4019. if (!this.ignoreManifestProgramDateTimeFor_(type)) {
  4020. for (const reference of references) {
  4021. reference.syncAgainst(lowestSyncTime);
  4022. }
  4023. }
  4024. }
  4025. return {
  4026. segments: references,
  4027. bandwidth,
  4028. };
  4029. }
  4030. /**
  4031. * Attempts to guess stream's mime type based on content type and URI.
  4032. *
  4033. * @param {string} contentType
  4034. * @param {string} codecs
  4035. * @return {?string}
  4036. * @private
  4037. */
  4038. guessMimeTypeBeforeLoading_(contentType, codecs) {
  4039. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  4040. if (codecs == 'vtt' || codecs == 'wvtt') {
  4041. // If codecs is 'vtt', it's WebVTT.
  4042. return 'text/vtt';
  4043. } else if (codecs && codecs !== '') {
  4044. // Otherwise, assume MP4-embedded text, since text-based formats tend
  4045. // not to have a codecs string at all.
  4046. return 'application/mp4';
  4047. }
  4048. }
  4049. if (contentType == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  4050. if (!codecs || codecs == 'jpeg') {
  4051. return 'image/jpeg';
  4052. }
  4053. }
  4054. if (contentType == shaka.util.ManifestParserUtils.ContentType.AUDIO) {
  4055. // See: https://bugs.chromium.org/p/chromium/issues/detail?id=489520
  4056. if (codecs == 'mp4a.40.34') {
  4057. return 'audio/mpeg';
  4058. }
  4059. }
  4060. if (codecs == 'mjpg') {
  4061. return 'application/mp4';
  4062. }
  4063. // Not enough information to guess from the content type and codecs.
  4064. return null;
  4065. }
  4066. /**
  4067. * Get a fallback mime type for the content. Used if all the better methods
  4068. * for determining the mime type have failed.
  4069. *
  4070. * @param {string} contentType
  4071. * @return {string}
  4072. * @private
  4073. */
  4074. guessMimeTypeFallback_(contentType) {
  4075. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  4076. // If there was no codecs string and no content-type, assume HLS text
  4077. // streams are WebVTT.
  4078. return 'text/vtt';
  4079. }
  4080. // If the HLS content is lacking in both MIME type metadata and
  4081. // segment file extensions, we fall back to assuming it's MP4.
  4082. const map =
  4083. shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_.get(contentType);
  4084. return map.get('mp4');
  4085. }
  4086. /**
  4087. * @param {!Array<!shaka.media.SegmentReference>} segments
  4088. * @return {{segment: !shaka.media.SegmentReference, segmentIndex: number}}
  4089. * @private
  4090. */
  4091. getAvailableSegment_(segments) {
  4092. goog.asserts.assert(segments.length, 'Should have segments!');
  4093. // If you wait long enough, requesting the first segment can fail
  4094. // because it has fallen off the left edge of DVR, so to be safer,
  4095. // let's request the middle segment.
  4096. let segmentIndex = this.isLive_() ?
  4097. Math.trunc((segments.length - 1) / 2) : 0;
  4098. let segment = segments[segmentIndex];
  4099. while (segment.getStatus() == shaka.media.SegmentReference.Status.MISSING &&
  4100. (segmentIndex + 1) < segments.length) {
  4101. segmentIndex ++;
  4102. segment = segments[segmentIndex];
  4103. }
  4104. return {segment, segmentIndex};
  4105. }
  4106. /**
  4107. * Attempts to guess stream's mime type.
  4108. *
  4109. * @param {string} contentType
  4110. * @param {string} codecs
  4111. * @param {!Array<!shaka.media.SegmentReference>} segments
  4112. * @return {!Promise<string>}
  4113. * @private
  4114. */
  4115. async guessMimeType_(contentType, codecs, segments) {
  4116. const HlsParser = shaka.hls.HlsParser;
  4117. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  4118. const {segment} = this.getAvailableSegment_(segments);
  4119. if (segment.status == shaka.media.SegmentReference.Status.MISSING) {
  4120. return this.guessMimeTypeFallback_(contentType);
  4121. }
  4122. const segmentUris = segment.getUris();
  4123. const parsedUri = new goog.Uri(segmentUris[0]);
  4124. const extension = parsedUri.getPath().split('.').pop();
  4125. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_.get(contentType);
  4126. let mimeType = map.get(extension);
  4127. if (mimeType) {
  4128. return mimeType;
  4129. }
  4130. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_.get(extension);
  4131. if (mimeType) {
  4132. return mimeType;
  4133. }
  4134. // The extension map didn't work, so guess based on codecs.
  4135. mimeType = this.guessMimeTypeBeforeLoading_(contentType, codecs);
  4136. if (mimeType) {
  4137. return mimeType;
  4138. }
  4139. // If unable to guess mime type, request a segment and try getting it
  4140. // from the response.
  4141. let contentMimeType;
  4142. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  4143. const headRequest = shaka.net.NetworkingEngine.makeRequest(
  4144. segmentUris, this.config_.retryParameters);
  4145. let response;
  4146. try {
  4147. headRequest.method = 'HEAD';
  4148. response = await this.makeNetworkRequest_(
  4149. headRequest, requestType, {type}).promise;
  4150. contentMimeType = response.headers['content-type'];
  4151. } catch (error) {
  4152. if (error &&
  4153. (error.code == shaka.util.Error.Code.HTTP_ERROR ||
  4154. error.code == shaka.util.Error.Code.BAD_HTTP_STATUS)) {
  4155. headRequest.method = 'GET';
  4156. if (this.config_.hls.allowRangeRequestsToGuessMimeType) {
  4157. // Only requesting first byte
  4158. headRequest.headers['Range'] = 'bytes=0-0';
  4159. }
  4160. response = await this.makeNetworkRequest_(
  4161. headRequest, requestType, {type}).promise;
  4162. contentMimeType = response.headers['content-type'];
  4163. }
  4164. }
  4165. if (contentMimeType) {
  4166. // Split the MIME type in case the server sent additional parameters.
  4167. mimeType = contentMimeType.toLowerCase().split(';')[0];
  4168. if (mimeType == 'application/octet-stream') {
  4169. if (!response.data.byteLength) {
  4170. headRequest.method = 'GET';
  4171. response = await this.makeNetworkRequest_(
  4172. headRequest, requestType, {type}).promise;
  4173. }
  4174. if (shaka.util.TsParser.probe(
  4175. shaka.util.BufferUtils.toUint8(response.data))) {
  4176. mimeType = 'video/mp2t';
  4177. }
  4178. }
  4179. if (mimeType != 'application/octet-stream') {
  4180. return mimeType;
  4181. }
  4182. }
  4183. return this.guessMimeTypeFallback_(contentType);
  4184. }
  4185. /**
  4186. * Returns a tag with a given name.
  4187. * Throws an error if tag was not found.
  4188. *
  4189. * @param {!Array<shaka.hls.Tag>} tags
  4190. * @param {string} tagName
  4191. * @return {!shaka.hls.Tag}
  4192. * @private
  4193. */
  4194. getRequiredTag_(tags, tagName) {
  4195. const tag = shaka.hls.Utils.getFirstTagWithName(tags, tagName);
  4196. if (!tag) {
  4197. throw new shaka.util.Error(
  4198. shaka.util.Error.Severity.CRITICAL,
  4199. shaka.util.Error.Category.MANIFEST,
  4200. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, tagName);
  4201. }
  4202. return tag;
  4203. }
  4204. /**
  4205. * @param {shaka.extern.Stream} stream
  4206. * @param {?string} width
  4207. * @param {?string} height
  4208. * @param {?string} frameRate
  4209. * @param {?string} videoRange
  4210. * @param {?string} videoLayout
  4211. * @param {?string} colorGamut
  4212. * @private
  4213. */
  4214. addVideoAttributes_(stream, width, height, frameRate, videoRange,
  4215. videoLayout, colorGamut) {
  4216. if (stream) {
  4217. stream.width = Number(width) || undefined;
  4218. stream.height = Number(height) || undefined;
  4219. stream.frameRate = Number(frameRate) || undefined;
  4220. stream.hdr = videoRange || undefined;
  4221. stream.videoLayout = videoLayout || undefined;
  4222. stream.colorGamut = colorGamut || undefined;
  4223. }
  4224. }
  4225. /**
  4226. * Makes a network request for the manifest and returns a Promise
  4227. * with the resulting data.
  4228. *
  4229. * @param {!Array<string>} uris
  4230. * @param {boolean=} isPlaylist
  4231. * @return {!shaka.net.NetworkingEngine.PendingRequest}
  4232. * @private
  4233. */
  4234. requestManifest_(uris, isPlaylist) {
  4235. const requestType = shaka.net.NetworkingEngine.RequestType.MANIFEST;
  4236. const request = shaka.net.NetworkingEngine.makeRequest(
  4237. uris, this.config_.retryParameters);
  4238. const type = isPlaylist ?
  4239. shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_PLAYLIST :
  4240. shaka.net.NetworkingEngine.AdvancedRequestType.MASTER_PLAYLIST;
  4241. return this.makeNetworkRequest_(request, requestType, {type});
  4242. }
  4243. /**
  4244. * Called when the update timer ticks. Because parsing a manifest is async,
  4245. * this method is async. To work with this, this method will schedule the next
  4246. * update when it finished instead of using a repeating-start.
  4247. *
  4248. * @return {!Promise}
  4249. * @private
  4250. */
  4251. async onUpdate_() {
  4252. shaka.log.info('Updating manifest...');
  4253. goog.asserts.assert(
  4254. this.getUpdatePlaylistDelay_() > 0,
  4255. 'We should only call |onUpdate_| when we are suppose to be updating.');
  4256. // Detect a call to stop()
  4257. if (!this.playerInterface_) {
  4258. return;
  4259. }
  4260. try {
  4261. const startTime = Date.now();
  4262. await this.update();
  4263. // Keep track of how long the longest manifest update took.
  4264. const endTime = Date.now();
  4265. // This may have converted to VOD, in which case we stop updating.
  4266. if (this.isLive_()) {
  4267. const updateDuration = (endTime - startTime) / 1000.0;
  4268. this.averageUpdateDuration_.sample(1, updateDuration);
  4269. const delay = this.config_.updatePeriod > 0 ?
  4270. this.config_.updatePeriod : this.getUpdatePlaylistDelay_();
  4271. const finalDelay = Math.max(0,
  4272. delay - this.averageUpdateDuration_.getEstimate());
  4273. this.updatePlaylistTimer_.tickAfter(/* seconds= */ finalDelay);
  4274. }
  4275. } catch (error) {
  4276. // Detect a call to stop() during this.update()
  4277. if (!this.playerInterface_) {
  4278. return;
  4279. }
  4280. goog.asserts.assert(error instanceof shaka.util.Error,
  4281. 'Should only receive a Shaka error');
  4282. if (this.config_.raiseFatalErrorOnManifestUpdateRequestFailure) {
  4283. this.playerInterface_.onError(error);
  4284. return;
  4285. }
  4286. // We will retry updating, so override the severity of the error.
  4287. error.severity = shaka.util.Error.Severity.RECOVERABLE;
  4288. this.playerInterface_.onError(error);
  4289. // Try again very soon.
  4290. this.updatePlaylistTimer_.tickAfter(/* seconds= */ 0.1);
  4291. }
  4292. // Detect a call to stop()
  4293. if (!this.playerInterface_) {
  4294. return;
  4295. }
  4296. this.playerInterface_.onManifestUpdated();
  4297. }
  4298. /**
  4299. * @return {boolean}
  4300. * @private
  4301. */
  4302. isLive_() {
  4303. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  4304. return this.presentationType_ != PresentationType.VOD;
  4305. }
  4306. /**
  4307. * @return {number}
  4308. * @private
  4309. */
  4310. getUpdatePlaylistDelay_() {
  4311. // The HLS spec (RFC 8216) states in 6.3.4:
  4312. // "the client MUST wait for at least the target duration before
  4313. // attempting to reload the Playlist file again".
  4314. // For LL-HLS, the server must add a new partial segment to the Playlist
  4315. // every part target duration.
  4316. return this.lastTargetDuration_;
  4317. }
  4318. /**
  4319. * @param {shaka.hls.HlsParser.PresentationType_} type
  4320. * @private
  4321. */
  4322. setPresentationType_(type) {
  4323. this.presentationType_ = type;
  4324. if (this.presentationTimeline_) {
  4325. this.presentationTimeline_.setStatic(!this.isLive_());
  4326. }
  4327. // If this manifest is not for live content, then we have no reason to
  4328. // update it.
  4329. if (!this.isLive_()) {
  4330. this.updatePlaylistTimer_.stop();
  4331. }
  4332. }
  4333. /**
  4334. * Create a networking request. This will manage the request using the
  4335. * parser's operation manager. If the parser has already been stopped, the
  4336. * request will not be made.
  4337. *
  4338. * @param {shaka.extern.Request} request
  4339. * @param {shaka.net.NetworkingEngine.RequestType} type
  4340. * @param {shaka.extern.RequestContext=} context
  4341. * @return {!shaka.net.NetworkingEngine.PendingRequest}
  4342. * @private
  4343. */
  4344. makeNetworkRequest_(request, type, context) {
  4345. if (!this.operationManager_) {
  4346. throw new shaka.util.Error(
  4347. shaka.util.Error.Severity.CRITICAL,
  4348. shaka.util.Error.Category.PLAYER,
  4349. shaka.util.Error.Code.OPERATION_ABORTED);
  4350. }
  4351. if (!context) {
  4352. context = {};
  4353. }
  4354. context.isPreload = this.isPreloadFn_();
  4355. const op = this.playerInterface_.networkingEngine.request(
  4356. type, request, context);
  4357. this.operationManager_.manage(op);
  4358. return op;
  4359. }
  4360. /**
  4361. * @param {string} method
  4362. * @return {boolean}
  4363. * @private
  4364. */
  4365. isAesMethod_(method) {
  4366. return method == 'AES-128' ||
  4367. method == 'AES-256' ||
  4368. method == 'AES-256-CTR';
  4369. }
  4370. /**
  4371. * @param {!shaka.hls.Tag} drmTag
  4372. * @param {string} mimeType
  4373. * @param {?shaka.media.InitSegmentReference} initSegmentRef
  4374. * @return {!Promise<?shaka.extern.DrmInfo>}
  4375. * @private
  4376. */
  4377. async fairplayDrmParser_(drmTag, mimeType, initSegmentRef) {
  4378. if (mimeType == 'video/mp2t') {
  4379. throw new shaka.util.Error(
  4380. shaka.util.Error.Severity.CRITICAL,
  4381. shaka.util.Error.Category.MANIFEST,
  4382. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  4383. }
  4384. if (shaka.drm.DrmUtils.isMediaKeysPolyfilled('apple')) {
  4385. throw new shaka.util.Error(
  4386. shaka.util.Error.Severity.CRITICAL,
  4387. shaka.util.Error.Category.MANIFEST,
  4388. shaka.util.Error.Code
  4389. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  4390. }
  4391. const method = drmTag.getRequiredAttrValue('METHOD');
  4392. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4393. if (!VALID_METHODS.includes(method)) {
  4394. shaka.log.error('FairPlay in HLS is only supported with [',
  4395. VALID_METHODS.join(', '), '], not', method);
  4396. return null;
  4397. }
  4398. let encryptionScheme = 'cenc';
  4399. if (method == 'SAMPLE-AES') {
  4400. // It should be 'cbcs-1-9' but Safari doesn't support it.
  4401. // See: https://github.com/WebKit/WebKit/blob/main/Source/WebCore/Modules/encryptedmedia/MediaKeyEncryptionScheme.idl
  4402. encryptionScheme = 'cbcs';
  4403. }
  4404. const uri = drmTag.getRequiredAttrValue('URI');
  4405. /*
  4406. * Even if we're not able to construct initData through the HLS tag, adding
  4407. * a DRMInfo will allow DRM Engine to request a media key system access
  4408. * with the correct keySystem and initDataType
  4409. */
  4410. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4411. 'com.apple.fps', encryptionScheme, [
  4412. {initDataType: 'sinf', initData: new Uint8Array(0), keyId: null},
  4413. ], uri);
  4414. let keyId = shaka.drm.FairPlay.defaultGetKeyId(uri);
  4415. if (!keyId && initSegmentRef) {
  4416. keyId = await this.getKeyIdFromInitSegment_(initSegmentRef);
  4417. }
  4418. if (keyId) {
  4419. drmInfo.keyIds.add(keyId);
  4420. }
  4421. return drmInfo;
  4422. }
  4423. /**
  4424. * @param {!shaka.hls.Tag} drmTag
  4425. * @param {string} mimeType
  4426. * @param {?shaka.media.InitSegmentReference} initSegmentRef
  4427. * @return {!Promise<?shaka.extern.DrmInfo>}
  4428. * @private
  4429. */
  4430. widevineDrmParser_(drmTag, mimeType, initSegmentRef) {
  4431. const method = drmTag.getRequiredAttrValue('METHOD');
  4432. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4433. if (!VALID_METHODS.includes(method)) {
  4434. shaka.log.error('Widevine in HLS is only supported with [',
  4435. VALID_METHODS.join(', '), '], not', method);
  4436. return Promise.resolve(null);
  4437. }
  4438. let encryptionScheme = 'cenc';
  4439. if (method == 'SAMPLE-AES') {
  4440. encryptionScheme = 'cbcs';
  4441. }
  4442. const uri = drmTag.getRequiredAttrValue('URI');
  4443. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  4444. // The data encoded in the URI is a PSSH box to be used as init data.
  4445. const pssh = shaka.util.BufferUtils.toUint8(parsedData.data);
  4446. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4447. 'com.widevine.alpha', encryptionScheme, [
  4448. {initDataType: 'cenc', initData: pssh},
  4449. ]);
  4450. const keyId = drmTag.getAttributeValue('KEYID');
  4451. if (keyId) {
  4452. const keyIdLowerCase = keyId.toLowerCase();
  4453. // This value should begin with '0x':
  4454. goog.asserts.assert(
  4455. keyIdLowerCase.startsWith('0x'), 'Incorrect KEYID format!');
  4456. // But the output should not contain the '0x':
  4457. drmInfo.keyIds.add(keyIdLowerCase.substr(2));
  4458. }
  4459. return Promise.resolve(drmInfo);
  4460. }
  4461. /**
  4462. * See: https://docs.microsoft.com/en-us/playready/packaging/mp4-based-formats-supported-by-playready-clients?tabs=case4
  4463. *
  4464. * @param {!shaka.hls.Tag} drmTag
  4465. * @param {string} mimeType
  4466. * @param {?shaka.media.InitSegmentReference} initSegmentRef
  4467. * @return {!Promise<?shaka.extern.DrmInfo>}
  4468. * @private
  4469. */
  4470. playreadyDrmParser_(drmTag, mimeType, initSegmentRef) {
  4471. const method = drmTag.getRequiredAttrValue('METHOD');
  4472. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4473. if (!VALID_METHODS.includes(method)) {
  4474. shaka.log.error('PlayReady in HLS is only supported with [',
  4475. VALID_METHODS.join(', '), '], not', method);
  4476. return Promise.resolve(null);
  4477. }
  4478. let encryptionScheme = 'cenc';
  4479. if (method == 'SAMPLE-AES') {
  4480. encryptionScheme = 'cbcs';
  4481. }
  4482. const uri = drmTag.getRequiredAttrValue('URI');
  4483. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  4484. // The data encoded in the URI is a PlayReady Pro Object, so we need
  4485. // convert it to pssh.
  4486. const data = shaka.util.BufferUtils.toUint8(parsedData.data);
  4487. const systemId = new Uint8Array([
  4488. 0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86,
  4489. 0xab, 0x92, 0xe6, 0x5b, 0xe0, 0x88, 0x5f, 0x95,
  4490. ]);
  4491. const keyIds = new Set();
  4492. const psshVersion = 0;
  4493. const pssh =
  4494. shaka.util.Pssh.createPssh(data, systemId, keyIds, psshVersion);
  4495. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4496. 'com.microsoft.playready', encryptionScheme, [
  4497. {initDataType: 'cenc', initData: pssh},
  4498. ]);
  4499. const input = shaka.util.TXml.parseXmlString([
  4500. '<PLAYREADY>',
  4501. shaka.util.Uint8ArrayUtils.toBase64(data),
  4502. '</PLAYREADY>',
  4503. ].join('\n'));
  4504. if (input) {
  4505. drmInfo.licenseServerUri = shaka.drm.PlayReady.getLicenseUrl(input);
  4506. }
  4507. return Promise.resolve(drmInfo);
  4508. }
  4509. /**
  4510. * @param {!shaka.hls.Tag} drmTag
  4511. * @param {string} mimeType
  4512. * @param {?shaka.media.InitSegmentReference} initSegmentRef
  4513. * @return {!Promise<?shaka.extern.DrmInfo>}
  4514. * @private
  4515. */
  4516. wiseplayDrmParser_(drmTag, mimeType, initSegmentRef) {
  4517. const method = drmTag.getRequiredAttrValue('METHOD');
  4518. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4519. if (!VALID_METHODS.includes(method)) {
  4520. shaka.log.error('WisePlay in HLS is only supported with [',
  4521. VALID_METHODS.join(', '), '], not', method);
  4522. return Promise.resolve(null);
  4523. }
  4524. let encryptionScheme = 'cenc';
  4525. if (method == 'SAMPLE-AES') {
  4526. encryptionScheme = 'cbcs';
  4527. }
  4528. const uri = drmTag.getRequiredAttrValue('URI');
  4529. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  4530. // The data encoded in the URI is a PSSH box to be used as init data.
  4531. const pssh = shaka.util.BufferUtils.toUint8(parsedData.data);
  4532. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4533. 'com.huawei.wiseplay', encryptionScheme, [
  4534. {initDataType: 'cenc', initData: pssh},
  4535. ]);
  4536. const keyId = drmTag.getAttributeValue('KEYID');
  4537. if (keyId) {
  4538. const keyIdLowerCase = keyId.toLowerCase();
  4539. // This value should begin with '0x':
  4540. goog.asserts.assert(
  4541. keyIdLowerCase.startsWith('0x'), 'Incorrect KEYID format!');
  4542. // But the output should not contain the '0x':
  4543. drmInfo.keyIds.add(keyIdLowerCase.substr(2));
  4544. }
  4545. return Promise.resolve(drmInfo);
  4546. }
  4547. /**
  4548. * See: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-5.1
  4549. *
  4550. * @param {!shaka.hls.Tag} drmTag
  4551. * @param {string} mimeType
  4552. * @param {function(): !Array<string>} getUris
  4553. * @param {?shaka.media.InitSegmentReference} initSegmentRef
  4554. * @param {?Map<string, string>=} variables
  4555. * @return {!Promise<?shaka.extern.DrmInfo>}
  4556. * @private
  4557. */
  4558. async identityDrmParser_(drmTag, mimeType, getUris, initSegmentRef,
  4559. variables) {
  4560. if (mimeType == 'video/mp2t') {
  4561. throw new shaka.util.Error(
  4562. shaka.util.Error.Severity.CRITICAL,
  4563. shaka.util.Error.Category.MANIFEST,
  4564. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  4565. }
  4566. if (shaka.drm.DrmUtils.isMediaKeysPolyfilled('apple')) {
  4567. throw new shaka.util.Error(
  4568. shaka.util.Error.Severity.CRITICAL,
  4569. shaka.util.Error.Category.MANIFEST,
  4570. shaka.util.Error.Code
  4571. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  4572. }
  4573. const method = drmTag.getRequiredAttrValue('METHOD');
  4574. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4575. if (!VALID_METHODS.includes(method)) {
  4576. shaka.log.error('Identity (ClearKey) in HLS is only supported with [',
  4577. VALID_METHODS.join(', '), '], not', method);
  4578. return null;
  4579. }
  4580. const keyUris = shaka.hls.Utils.constructSegmentUris(
  4581. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  4582. let key;
  4583. if (keyUris[0].startsWith('data:text/plain;base64,')) {
  4584. key = shaka.util.Uint8ArrayUtils.toHex(
  4585. shaka.util.Uint8ArrayUtils.fromBase64(
  4586. keyUris[0].split('data:text/plain;base64,').pop()));
  4587. } else {
  4588. const keyMapKey = keyUris.sort().join('');
  4589. if (!this.identityKeyMap_.has(keyMapKey)) {
  4590. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  4591. const request = shaka.net.NetworkingEngine.makeRequest(
  4592. keyUris, this.config_.retryParameters);
  4593. const keyResponse = this.makeNetworkRequest_(request, requestType)
  4594. .promise;
  4595. this.identityKeyMap_.set(keyMapKey, keyResponse);
  4596. }
  4597. const keyResponse = await this.identityKeyMap_.get(keyMapKey);
  4598. key = shaka.util.Uint8ArrayUtils.toHex(keyResponse.data);
  4599. }
  4600. // NOTE: The ClearKey CDM requires a key-id to key mapping. HLS doesn't
  4601. // provide a key ID anywhere. So although we could use the 'URI' attribute
  4602. // to fetch the actual 16-byte key, without a key ID, we can't provide this
  4603. // automatically to the ClearKey CDM. By default we assume that keyId is 0,
  4604. // but we will try to get key ID from Init Segment.
  4605. // If the application want override this behavior will have to use
  4606. // player.configure('drm.clearKeys', { ... }) to provide the key IDs
  4607. // and keys or player.configure('drm.servers.org\.w3\.clearkey', ...) to
  4608. // provide a ClearKey license server URI.
  4609. let keyId = '00000000000000000000000000000000';
  4610. if (initSegmentRef) {
  4611. const defaultKID = await this.getKeyIdFromInitSegment_(initSegmentRef);
  4612. if (defaultKID) {
  4613. keyId = defaultKID;
  4614. }
  4615. }
  4616. const clearkeys = new Map();
  4617. clearkeys.set(keyId, key);
  4618. let encryptionScheme = 'cenc';
  4619. if (method == 'SAMPLE-AES') {
  4620. encryptionScheme = 'cbcs';
  4621. }
  4622. return shaka.util.ManifestParserUtils.createDrmInfoFromClearKeys(
  4623. clearkeys, encryptionScheme);
  4624. }
  4625. /**
  4626. * @param {!shaka.media.InitSegmentReference} initSegmentRef
  4627. * @return {!Promise<?string>}
  4628. * @private
  4629. */
  4630. async getKeyIdFromInitSegment_(initSegmentRef) {
  4631. let keyId = null;
  4632. if (this.initSegmentToKidMap_.has(initSegmentRef)) {
  4633. keyId = this.initSegmentToKidMap_.get(initSegmentRef);
  4634. } else {
  4635. const initSegmentRequest = shaka.util.Networking.createSegmentRequest(
  4636. initSegmentRef.getUris(),
  4637. initSegmentRef.getStartByte(),
  4638. initSegmentRef.getEndByte(),
  4639. this.config_.retryParameters);
  4640. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  4641. const initType =
  4642. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  4643. const initResponse = await this.makeNetworkRequest_(
  4644. initSegmentRequest, requestType, {type: initType}).promise;
  4645. initSegmentRef.setSegmentData(initResponse.data);
  4646. keyId = shaka.media.SegmentUtils.getDefaultKID(
  4647. initResponse.data);
  4648. this.initSegmentToKidMap_.set(initSegmentRef, keyId);
  4649. }
  4650. return keyId;
  4651. }
  4652. };
  4653. /**
  4654. * @typedef {{
  4655. * stream: !shaka.extern.Stream,
  4656. * type: string,
  4657. * redirectUris: !Array<string>,
  4658. * getUris: function():!Array<string>,
  4659. * minTimestamp: number,
  4660. * maxTimestamp: number,
  4661. * mediaSequenceToStartTime: !Map<number, number>,
  4662. * canSkipSegments: boolean,
  4663. * canBlockReload: boolean,
  4664. * hasEndList: boolean,
  4665. * firstSequenceNumber: number,
  4666. * nextMediaSequence: number,
  4667. * nextPart: number,
  4668. * loadedOnce: boolean
  4669. * }}
  4670. *
  4671. * @description
  4672. * Contains a stream and information about it.
  4673. *
  4674. * @property {!shaka.extern.Stream} stream
  4675. * The Stream itself.
  4676. * @property {string} type
  4677. * The type value. Could be 'video', 'audio', 'text', or 'image'.
  4678. * @property {!Array<string>} redirectUris
  4679. * The redirect URIs.
  4680. * @property {function():!Array<string>} getUris
  4681. * The verbatim media playlist URIs, as it appeared in the master playlist.
  4682. * @property {number} minTimestamp
  4683. * The minimum timestamp found in the stream.
  4684. * @property {number} maxTimestamp
  4685. * The maximum timestamp found in the stream.
  4686. * @property {!Map<number, number>} mediaSequenceToStartTime
  4687. * A map of media sequence numbers to media start times.
  4688. * Only used for VOD content.
  4689. * @property {boolean} canSkipSegments
  4690. * True if the server supports delta playlist updates, and we can send a
  4691. * request for a playlist that can skip older media segments.
  4692. * @property {boolean} canBlockReload
  4693. * True if the server supports blocking playlist reload, and we can send a
  4694. * request for a playlist that can block reload until some segments are
  4695. * present.
  4696. * @property {boolean} hasEndList
  4697. * True if the stream has an EXT-X-ENDLIST tag.
  4698. * @property {number} firstSequenceNumber
  4699. * The sequence number of the first reference. Only calculated if needed.
  4700. * @property {number} nextMediaSequence
  4701. * The next media sequence.
  4702. * @property {number} nextPart
  4703. * The next part.
  4704. * @property {boolean} loadedOnce
  4705. * True if the stream has been loaded at least once.
  4706. */
  4707. shaka.hls.HlsParser.StreamInfo;
  4708. /**
  4709. * @typedef {{
  4710. * audio: !Array<shaka.hls.HlsParser.StreamInfo>,
  4711. * video: !Array<shaka.hls.HlsParser.StreamInfo>
  4712. * }}
  4713. *
  4714. * @description Audio and video stream infos.
  4715. * @property {!Array<shaka.hls.HlsParser.StreamInfo>} audio
  4716. * @property {!Array<shaka.hls.HlsParser.StreamInfo>} video
  4717. */
  4718. shaka.hls.HlsParser.StreamInfos;
  4719. /**
  4720. * @const {!Map<string, string>}
  4721. * @private
  4722. */
  4723. shaka.hls.HlsParser.RAW_FORMATS_TO_MIME_TYPES_ = new Map()
  4724. .set('aac', 'audio/aac')
  4725. .set('ac3', 'audio/ac3')
  4726. .set('ec3', 'audio/ec3')
  4727. .set('mp3', 'audio/mpeg');
  4728. /**
  4729. * @const {!Map<string, string>}
  4730. * @private
  4731. */
  4732. shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_ = new Map()
  4733. .set('mp4', 'audio/mp4')
  4734. .set('mp4a', 'audio/mp4')
  4735. .set('m4s', 'audio/mp4')
  4736. .set('m4i', 'audio/mp4')
  4737. .set('m4a', 'audio/mp4')
  4738. .set('m4f', 'audio/mp4')
  4739. .set('cmfa', 'audio/mp4')
  4740. // MPEG2-TS also uses video/ for audio: https://bit.ly/TsMse
  4741. .set('ts', 'video/mp2t')
  4742. .set('tsa', 'video/mp2t');
  4743. /**
  4744. * @const {!Map<string, string>}
  4745. * @private
  4746. */
  4747. shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_ = new Map()
  4748. .set('mp4', 'video/mp4')
  4749. .set('mp4v', 'video/mp4')
  4750. .set('m4s', 'video/mp4')
  4751. .set('m4i', 'video/mp4')
  4752. .set('m4v', 'video/mp4')
  4753. .set('m4f', 'video/mp4')
  4754. .set('cmfv', 'video/mp4')
  4755. .set('ts', 'video/mp2t')
  4756. .set('tsv', 'video/mp2t');
  4757. /**
  4758. * @const {!Map<string, string>}
  4759. * @private
  4760. */
  4761. shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_ = new Map()
  4762. .set('mp4', 'application/mp4')
  4763. .set('m4s', 'application/mp4')
  4764. .set('m4i', 'application/mp4')
  4765. .set('m4f', 'application/mp4')
  4766. .set('cmft', 'application/mp4')
  4767. .set('vtt', 'text/vtt')
  4768. .set('webvtt', 'text/vtt')
  4769. .set('ttml', 'application/ttml+xml');
  4770. /**
  4771. * @const {!Map<string, string>}
  4772. * @private
  4773. */
  4774. shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_ = new Map()
  4775. .set('jpg', 'image/jpeg')
  4776. .set('png', 'image/png')
  4777. .set('svg', 'image/svg+xml')
  4778. .set('webp', 'image/webp')
  4779. .set('avif', 'image/avif');
  4780. /**
  4781. * @const {!Map<string, !Map<string, string>>}
  4782. * @private
  4783. */
  4784. shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_ = new Map()
  4785. .set('audio', shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_)
  4786. .set('video', shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_)
  4787. .set('text', shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_)
  4788. .set('image', shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_);
  4789. /**
  4790. * MIME types without init segment.
  4791. *
  4792. * @const {!Set<string>}
  4793. * @private
  4794. */
  4795. shaka.hls.HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_ = new Set([
  4796. 'video/mp2t',
  4797. // Containerless types
  4798. ...shaka.util.MimeUtils.RAW_FORMATS,
  4799. ]);
  4800. /**
  4801. * @typedef {function(!shaka.hls.Tag,
  4802. * string,
  4803. * ?shaka.media.InitSegmentReference):
  4804. * !Promise<?shaka.extern.DrmInfo>}
  4805. * @private
  4806. */
  4807. shaka.hls.HlsParser.DrmParser_;
  4808. /**
  4809. * @enum {string}
  4810. * @private
  4811. */
  4812. shaka.hls.HlsParser.PresentationType_ = {
  4813. VOD: 'VOD',
  4814. EVENT: 'EVENT',
  4815. LIVE: 'LIVE',
  4816. };
  4817. /**
  4818. * @const {string}
  4819. * @private
  4820. */
  4821. shaka.hls.HlsParser.FAKE_MUXED_URL_ = 'shaka://hls-muxed';
  4822. shaka.media.ManifestParser.registerParserByMime(
  4823. 'application/x-mpegurl', () => new shaka.hls.HlsParser());
  4824. shaka.media.ManifestParser.registerParserByMime(
  4825. 'application/vnd.apple.mpegurl', () => new shaka.hls.HlsParser());