Source: lib/hls/hls_parser.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.hls.HlsParser');
  7. goog.require('goog.Uri');
  8. goog.require('goog.asserts');
  9. goog.require('shaka.abr.Ewma');
  10. goog.require('shaka.hls.ManifestTextParser');
  11. goog.require('shaka.hls.Playlist');
  12. goog.require('shaka.hls.PlaylistType');
  13. goog.require('shaka.hls.Tag');
  14. goog.require('shaka.hls.Utils');
  15. goog.require('shaka.log');
  16. goog.require('shaka.media.InitSegmentReference');
  17. goog.require('shaka.media.ManifestParser');
  18. goog.require('shaka.media.PresentationTimeline');
  19. goog.require('shaka.media.QualityObserver');
  20. goog.require('shaka.media.SegmentIndex');
  21. goog.require('shaka.media.SegmentReference');
  22. goog.require('shaka.net.DataUriPlugin');
  23. goog.require('shaka.net.NetworkingEngine');
  24. goog.require('shaka.util.ArrayUtils');
  25. goog.require('shaka.util.BufferUtils');
  26. goog.require('shaka.util.DrmUtils');
  27. goog.require('shaka.util.ContentSteeringManager');
  28. goog.require('shaka.util.Error');
  29. goog.require('shaka.util.EventManager');
  30. goog.require('shaka.util.FakeEvent');
  31. goog.require('shaka.util.LanguageUtils');
  32. goog.require('shaka.util.ManifestParserUtils');
  33. goog.require('shaka.util.MimeUtils');
  34. goog.require('shaka.util.Networking');
  35. goog.require('shaka.util.OperationManager');
  36. goog.require('shaka.util.Pssh');
  37. goog.require('shaka.media.SegmentUtils');
  38. goog.require('shaka.util.Timer');
  39. goog.require('shaka.util.TsParser');
  40. goog.require('shaka.util.TXml');
  41. goog.require('shaka.util.Platform');
  42. goog.require('shaka.util.Uint8ArrayUtils');
  43. goog.requireType('shaka.hls.Segment');
  44. /**
  45. * HLS parser.
  46. *
  47. * @implements {shaka.extern.ManifestParser}
  48. * @export
  49. */
  50. shaka.hls.HlsParser = class {
  51. /**
  52. * Creates an Hls Parser object.
  53. */
  54. constructor() {
  55. /** @private {?shaka.extern.ManifestParser.PlayerInterface} */
  56. this.playerInterface_ = null;
  57. /** @private {?shaka.extern.ManifestConfiguration} */
  58. this.config_ = null;
  59. /** @private {number} */
  60. this.globalId_ = 1;
  61. /** @private {!Map.<string, string>} */
  62. this.globalVariables_ = new Map();
  63. /**
  64. * A map from group id to stream infos created from the media tags.
  65. * @private {!Map.<string, !Array.<?shaka.hls.HlsParser.StreamInfo>>}
  66. */
  67. this.groupIdToStreamInfosMap_ = new Map();
  68. /**
  69. * For media playlist lazy-loading to work in livestreams, we have to assume
  70. * that each stream of a type (video, audio, etc) has the same mappings of
  71. * sequence number to start time.
  72. * This map stores those relationships.
  73. * Only used during livestreams; we do not assume that VOD content is
  74. * aligned in that way.
  75. * @private {!Map.<string, !Map.<number, number>>}
  76. */
  77. this.mediaSequenceToStartTimeByType_ = new Map();
  78. // Set initial maps.
  79. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  80. this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map());
  81. this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map());
  82. this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map());
  83. this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map());
  84. /**
  85. * The values are strings of the form "<VIDEO URI> - <AUDIO URI>",
  86. * where the URIs are the verbatim media playlist URIs as they appeared in
  87. * the master playlist.
  88. *
  89. * Used to avoid duplicates that vary only in their text stream.
  90. *
  91. * @private {!Set.<string>}
  92. */
  93. this.variantUriSet_ = new Set();
  94. /**
  95. * A map from (verbatim) media playlist URI to stream infos representing the
  96. * playlists.
  97. *
  98. * On update, used to iterate through and update from media playlists.
  99. *
  100. * On initial parse, used to iterate through and determine minimum
  101. * timestamps, offsets, and to handle TS rollover.
  102. *
  103. * During parsing, used to avoid duplicates in the async methods
  104. * createStreamInfoFromMediaTags_, createStreamInfoFromImageTag_ and
  105. * createStreamInfoFromVariantTags_.
  106. *
  107. * @private {!Map.<string, shaka.hls.HlsParser.StreamInfo>}
  108. */
  109. this.uriToStreamInfosMap_ = new Map();
  110. /** @private {?shaka.media.PresentationTimeline} */
  111. this.presentationTimeline_ = null;
  112. /**
  113. * The master playlist URI, after redirects.
  114. *
  115. * @private {string}
  116. */
  117. this.masterPlaylistUri_ = '';
  118. /** @private {shaka.hls.ManifestTextParser} */
  119. this.manifestTextParser_ = new shaka.hls.ManifestTextParser();
  120. /**
  121. * The minimum sequence number for generated segments, when ignoring
  122. * EXT-X-PROGRAM-DATE-TIME.
  123. *
  124. * @private {number}
  125. */
  126. this.minSequenceNumber_ = -1;
  127. /**
  128. * The lowest time value for any of the streams, as defined by the
  129. * EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970.
  130. *
  131. * @private {number}
  132. */
  133. this.lowestSyncTime_ = Infinity;
  134. /**
  135. * Whether the streams have previously been "finalized"; that is to say,
  136. * whether we have loaded enough streams to know information about the asset
  137. * such as timing information, live status, etc.
  138. *
  139. * @private {boolean}
  140. */
  141. this.streamsFinalized_ = false;
  142. /**
  143. * Whether the manifest informs about the codec to use.
  144. *
  145. * @private
  146. */
  147. this.codecInfoInManifest_ = false;
  148. /**
  149. * This timer is used to trigger the start of a manifest update. A manifest
  150. * update is async. Once the update is finished, the timer will be restarted
  151. * to trigger the next update. The timer will only be started if the content
  152. * is live content.
  153. *
  154. * @private {shaka.util.Timer}
  155. */
  156. this.updatePlaylistTimer_ = new shaka.util.Timer(() => {
  157. if (this.mediaElement_ && !this.config_.continueLoadingWhenPaused) {
  158. this.eventManager_.unlisten(this.mediaElement_, 'timeupdate');
  159. if (this.mediaElement_.paused) {
  160. this.eventManager_.listenOnce(
  161. this.mediaElement_, 'timeupdate', () => this.onUpdate_());
  162. return;
  163. }
  164. }
  165. this.onUpdate_();
  166. });
  167. /** @private {shaka.hls.HlsParser.PresentationType_} */
  168. this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD;
  169. /** @private {?shaka.extern.Manifest} */
  170. this.manifest_ = null;
  171. /** @private {number} */
  172. this.maxTargetDuration_ = 0;
  173. /** @private {number} */
  174. this.lastTargetDuration_ = Infinity;
  175. /** Partial segments target duration.
  176. * @private {number}
  177. */
  178. this.partialTargetDuration_ = 0;
  179. /** @private {number} */
  180. this.presentationDelay_ = 0;
  181. /** @private {number} */
  182. this.lowLatencyPresentationDelay_ = 0;
  183. /** @private {shaka.util.OperationManager} */
  184. this.operationManager_ = new shaka.util.OperationManager();
  185. /** A map from closed captions' group id, to a map of closed captions info.
  186. * {group id -> {closed captions channel id -> language}}
  187. * @private {Map.<string, Map.<string, string>>}
  188. */
  189. this.groupIdToClosedCaptionsMap_ = new Map();
  190. /** @private {Map.<string, string>} */
  191. this.groupIdToCodecsMap_ = new Map();
  192. /** A cache mapping EXT-X-MAP tag info to the InitSegmentReference created
  193. * from the tag.
  194. * The key is a string combining the EXT-X-MAP tag's absolute uri, and
  195. * its BYTERANGE if available.
  196. * {!Map.<string, !shaka.media.InitSegmentReference>} */
  197. this.mapTagToInitSegmentRefMap_ = new Map();
  198. /** @private {Map.<string, !shaka.extern.aesKey>} */
  199. this.aesKeyInfoMap_ = new Map();
  200. /** @private {Map.<string, !Promise.<shaka.extern.Response>>} */
  201. this.aesKeyMap_ = new Map();
  202. /** @private {Map.<string, !Promise.<shaka.extern.Response>>} */
  203. this.identityKeyMap_ = new Map();
  204. /** @private {Map.<!shaka.media.InitSegmentReference, ?string>} */
  205. this.identityKidMap_ = new Map();
  206. /** @private {boolean} */
  207. this.lowLatencyMode_ = false;
  208. /** @private {boolean} */
  209. this.lowLatencyByterangeOptimization_ = false;
  210. /**
  211. * An ewma that tracks how long updates take.
  212. * This is to mitigate issues caused by slow parsing on embedded devices.
  213. * @private {!shaka.abr.Ewma}
  214. */
  215. this.averageUpdateDuration_ = new shaka.abr.Ewma(5);
  216. /** @private {?shaka.util.ContentSteeringManager} */
  217. this.contentSteeringManager_ = null;
  218. /** @private {boolean} */
  219. this.needsClosedCaptionsDetection_ = true;
  220. /** @private {Set.<string>} */
  221. this.dateRangeIdsEmitted_ = new Set();
  222. /** @private {shaka.util.EventManager} */
  223. this.eventManager_ = new shaka.util.EventManager();
  224. /** @private {HTMLMediaElement} */
  225. this.mediaElement_ = null;
  226. }
  227. /**
  228. * @override
  229. * @exportInterface
  230. */
  231. configure(config) {
  232. this.config_ = config;
  233. if (this.contentSteeringManager_) {
  234. this.contentSteeringManager_.configure(this.config_);
  235. }
  236. }
  237. /**
  238. * @override
  239. * @exportInterface
  240. */
  241. async start(uri, playerInterface) {
  242. goog.asserts.assert(this.config_, 'Must call configure() before start()!');
  243. this.playerInterface_ = playerInterface;
  244. this.lowLatencyMode_ = playerInterface.isLowLatencyMode();
  245. const response = await this.requestManifest_([uri]);
  246. // Record the master playlist URI after redirects.
  247. this.masterPlaylistUri_ = response.uri;
  248. goog.asserts.assert(response.data, 'Response data should be non-null!');
  249. await this.parseManifest_(response.data, uri);
  250. goog.asserts.assert(this.manifest_, 'Manifest should be non-null');
  251. return this.manifest_;
  252. }
  253. /**
  254. * @override
  255. * @exportInterface
  256. */
  257. stop() {
  258. // Make sure we don't update the manifest again. Even if the timer is not
  259. // running, this is safe to call.
  260. if (this.updatePlaylistTimer_) {
  261. this.updatePlaylistTimer_.stop();
  262. this.updatePlaylistTimer_ = null;
  263. }
  264. /** @type {!Array.<!Promise>} */
  265. const pending = [];
  266. if (this.operationManager_) {
  267. pending.push(this.operationManager_.destroy());
  268. this.operationManager_ = null;
  269. }
  270. this.playerInterface_ = null;
  271. this.config_ = null;
  272. this.variantUriSet_.clear();
  273. this.manifest_ = null;
  274. this.uriToStreamInfosMap_.clear();
  275. this.groupIdToStreamInfosMap_.clear();
  276. this.groupIdToCodecsMap_.clear();
  277. this.globalVariables_.clear();
  278. this.mapTagToInitSegmentRefMap_.clear();
  279. this.aesKeyInfoMap_.clear();
  280. this.aesKeyMap_.clear();
  281. this.identityKeyMap_.clear();
  282. this.identityKidMap_.clear();
  283. this.dateRangeIdsEmitted_.clear();
  284. if (this.contentSteeringManager_) {
  285. this.contentSteeringManager_.destroy();
  286. }
  287. if (this.eventManager_) {
  288. this.eventManager_.release();
  289. this.eventManager_ = null;
  290. }
  291. return Promise.all(pending);
  292. }
  293. /**
  294. * @override
  295. * @exportInterface
  296. */
  297. async update() {
  298. if (!this.isLive_()) {
  299. return;
  300. }
  301. /** @type {!Array.<!Promise>} */
  302. const updates = [];
  303. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  304. // This is necessary to calculate correctly the update time.
  305. this.lastTargetDuration_ = Infinity;
  306. this.manifest_.gapCount = 0;
  307. // Only update active streams.
  308. const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex);
  309. for (const streamInfo of activeStreamInfos) {
  310. updates.push(this.updateStream_(streamInfo));
  311. }
  312. await Promise.all(updates);
  313. // Now that streams have been updated, notify the presentation timeline.
  314. this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream));
  315. // If any hasEndList is false, the stream is still live.
  316. const stillLive = activeStreamInfos.some((s) => s.hasEndList == false);
  317. if (activeStreamInfos.length && !stillLive) {
  318. // Convert the presentation to VOD and set the duration.
  319. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  320. this.setPresentationType_(PresentationType.VOD);
  321. // The duration is the minimum of the end times of all active streams.
  322. // Non-active streams are not guaranteed to have useful maxTimestamp
  323. // values, due to the lazy-loading system, so they are ignored.
  324. const maxTimestamps = activeStreamInfos.map((s) => s.maxTimestamp);
  325. // The duration is the minimum of the end times of all streams.
  326. this.presentationTimeline_.setDuration(Math.min(...maxTimestamps));
  327. this.playerInterface_.updateDuration();
  328. }
  329. if (stillLive) {
  330. this.determineDuration_();
  331. }
  332. // Check if any playlist does not have the first reference (due to a
  333. // problem in the live encoder for example), and disable the stream if
  334. // necessary.
  335. for (const streamInfo of activeStreamInfos) {
  336. if (streamInfo.stream.segmentIndex &&
  337. !streamInfo.stream.segmentIndex.earliestReference()) {
  338. this.playerInterface_.disableStream(streamInfo.stream);
  339. }
  340. }
  341. }
  342. /**
  343. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  344. * @return {!Map.<number, number>}
  345. * @private
  346. */
  347. getMediaSequenceToStartTimeFor_(streamInfo) {
  348. if (this.isLive_()) {
  349. return this.mediaSequenceToStartTimeByType_.get(streamInfo.type);
  350. } else {
  351. return streamInfo.mediaSequenceToStartTime;
  352. }
  353. }
  354. /**
  355. * Updates a stream.
  356. *
  357. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  358. * @return {!Promise}
  359. * @private
  360. */
  361. async updateStream_(streamInfo) {
  362. const manifestUris = [];
  363. for (const uri of streamInfo.getUris()) {
  364. const uriObj = new goog.Uri(uri);
  365. const queryData = uriObj.getQueryData();
  366. if (streamInfo.canBlockReload) {
  367. if (streamInfo.nextMediaSequence >= 0) {
  368. // Indicates that the server must hold the request until a Playlist
  369. // contains a Media Segment with Media Sequence
  370. queryData.add('_HLS_msn', String(streamInfo.nextMediaSequence));
  371. }
  372. if (streamInfo.nextPart >= 0) {
  373. // Indicates, in combination with _HLS_msn, that the server must hold
  374. // the request until a Playlist contains Partial Segment N of Media
  375. // Sequence Number M or later.
  376. queryData.add('_HLS_part', String(streamInfo.nextPart));
  377. }
  378. }
  379. if (streamInfo.canSkipSegments) {
  380. // Enable delta updates. This will replace older segments with
  381. // 'EXT-X-SKIP' tag in the media playlist.
  382. queryData.add('_HLS_skip', 'YES');
  383. }
  384. if (queryData.getCount()) {
  385. uriObj.setQueryData(queryData);
  386. }
  387. manifestUris.push(uriObj.toString());
  388. }
  389. let response;
  390. try {
  391. response =
  392. await this.requestManifest_(manifestUris, /* isPlaylist= */ true);
  393. } catch (e) {
  394. if (this.playerInterface_) {
  395. this.playerInterface_.disableStream(streamInfo.stream);
  396. }
  397. throw e;
  398. }
  399. if (!streamInfo.stream.segmentIndex) {
  400. // The stream was closed since the update was first requested.
  401. return;
  402. }
  403. /** @type {shaka.hls.Playlist} */
  404. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  405. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  406. throw new shaka.util.Error(
  407. shaka.util.Error.Severity.CRITICAL,
  408. shaka.util.Error.Category.MANIFEST,
  409. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  410. }
  411. // Record the final URI after redirects.
  412. const responseUri = response.uri;
  413. if (responseUri != response.originalUri &&
  414. !streamInfo.getUris().includes(responseUri)) {
  415. streamInfo.redirectUris.push(responseUri);
  416. }
  417. /** @type {!Array.<!shaka.hls.Tag>} */
  418. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  419. 'EXT-X-DEFINE');
  420. const mediaVariables = this.parseMediaVariables_(
  421. variablesTags, responseUri);
  422. const stream = streamInfo.stream;
  423. const mediaSequenceToStartTime =
  424. this.getMediaSequenceToStartTimeFor_(streamInfo);
  425. const {keyIds, drmInfos} = await this.parseDrmInfo_(
  426. playlist, stream.mimeType, streamInfo.getUris, mediaVariables);
  427. const keysAreEqual =
  428. (a, b) => a.size === b.size && [...a].every((value) => b.has(value));
  429. if (!keysAreEqual(stream.keyIds, keyIds)) {
  430. stream.keyIds = keyIds;
  431. stream.drmInfos = drmInfos;
  432. this.playerInterface_.newDrmInfo(stream);
  433. }
  434. const {segments, bandwidth} = this.createSegments_(
  435. playlist, mediaSequenceToStartTime, mediaVariables,
  436. streamInfo.getUris, streamInfo.type);
  437. if (bandwidth) {
  438. stream.bandwidth = bandwidth;
  439. }
  440. const qualityInfo =
  441. shaka.media.QualityObserver.createQualityInfo(stream);
  442. for (const segment of segments) {
  443. if (segment.initSegmentReference) {
  444. segment.initSegmentReference.mediaQuality = qualityInfo;
  445. }
  446. }
  447. stream.segmentIndex.mergeAndEvict(
  448. segments, this.presentationTimeline_.getSegmentAvailabilityStart());
  449. if (segments.length) {
  450. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  451. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  452. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  453. playlist.tags, 'EXT-X-SKIP');
  454. const skippedSegments =
  455. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  456. const {nextMediaSequence, nextPart} =
  457. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  458. streamInfo.nextMediaSequence = nextMediaSequence + skippedSegments;
  459. streamInfo.nextPart = nextPart;
  460. const playlistStartTime = mediaSequenceToStartTime.get(
  461. mediaSequenceNumber);
  462. stream.segmentIndex.evict(playlistStartTime);
  463. }
  464. const oldSegment = stream.segmentIndex.earliestReference();
  465. if (oldSegment) {
  466. streamInfo.minTimestamp = oldSegment.startTime;
  467. const newestSegment = segments[segments.length - 1];
  468. goog.asserts.assert(newestSegment, 'Should have segments!');
  469. streamInfo.maxTimestamp = newestSegment.endTime;
  470. }
  471. // Once the last segment has been added to the playlist,
  472. // #EXT-X-ENDLIST tag will be appended.
  473. // If that happened, treat the rest of the EVENT presentation as VOD.
  474. const endListTag =
  475. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  476. if (endListTag) {
  477. // Flag this for later. We don't convert the whole presentation into VOD
  478. // until we've seen the ENDLIST tag for all active playlists.
  479. streamInfo.hasEndList = true;
  480. }
  481. this.determineLastTargetDuration_(playlist);
  482. this.processDateRangeTags_(
  483. playlist.tags, stream.type, mediaVariables, streamInfo.getUris);
  484. }
  485. /**
  486. * @override
  487. * @exportInterface
  488. */
  489. onExpirationUpdated(sessionId, expiration) {
  490. // No-op
  491. }
  492. /**
  493. * @override
  494. * @exportInterface
  495. */
  496. onInitialVariantChosen(variant) {
  497. // No-op
  498. }
  499. /**
  500. * @override
  501. * @exportInterface
  502. */
  503. banLocation(uri) {
  504. if (this.contentSteeringManager_) {
  505. this.contentSteeringManager_.banLocation(uri);
  506. }
  507. }
  508. /**
  509. * @override
  510. * @exportInterface
  511. */
  512. setMediaElement(mediaElement) {
  513. this.mediaElement_ = mediaElement;
  514. }
  515. /**
  516. * Align the streams by sequence number by dropping early segments. Then
  517. * offset the streams to begin at presentation time 0.
  518. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  519. * @private
  520. */
  521. syncStreamsWithSequenceNumber_(streamInfos) {
  522. // We assume that, when this is first called, we have enough info to
  523. // determine how to use the program date times (e.g. we have both a video
  524. // and an audio, and all other videos and audios match those).
  525. // Thus, we only need to calculate this once.
  526. const updateMinSequenceNumber = this.minSequenceNumber_ == -1;
  527. // Sync using media sequence number. Find the highest starting sequence
  528. // number among all streams. Later, we will drop any references to
  529. // earlier segments in other streams, then offset everything back to 0.
  530. for (const streamInfo of streamInfos) {
  531. const segmentIndex = streamInfo.stream.segmentIndex;
  532. goog.asserts.assert(segmentIndex,
  533. 'Only loaded streams should be synced');
  534. const mediaSequenceToStartTime =
  535. this.getMediaSequenceToStartTimeFor_(streamInfo);
  536. const segment0 = segmentIndex.earliestReference();
  537. if (segment0) {
  538. // This looks inefficient, but iteration order is insertion order.
  539. // So the very first entry should be the one we want.
  540. // We assert that this holds true so that we are alerted by debug
  541. // builds and tests if it changes. We still do a loop, though, so
  542. // that the code functions correctly in production no matter what.
  543. if (goog.DEBUG) {
  544. const firstSequenceStartTime =
  545. mediaSequenceToStartTime.values().next().value;
  546. if (firstSequenceStartTime != segment0.startTime) {
  547. shaka.log.warning(
  548. 'Sequence number map is not ordered as expected!');
  549. }
  550. }
  551. for (const [sequence, start] of mediaSequenceToStartTime) {
  552. if (start == segment0.startTime) {
  553. if (updateMinSequenceNumber) {
  554. this.minSequenceNumber_ = Math.max(
  555. this.minSequenceNumber_, sequence);
  556. }
  557. // Even if we already have decided on a value for
  558. // |this.minSequenceNumber_|, we still need to determine the first
  559. // sequence number for the stream, to offset it in the code below.
  560. streamInfo.firstSequenceNumber = sequence;
  561. break;
  562. }
  563. }
  564. }
  565. }
  566. if (this.minSequenceNumber_ < 0) {
  567. // Nothing to sync.
  568. return;
  569. }
  570. shaka.log.debug('Syncing HLS streams against base sequence number:',
  571. this.minSequenceNumber_);
  572. for (const streamInfo of streamInfos) {
  573. if (!this.ignoreManifestProgramDateTimeFor_(streamInfo.type)) {
  574. continue;
  575. }
  576. const segmentIndex = streamInfo.stream.segmentIndex;
  577. if (segmentIndex) {
  578. // Drop any earlier references.
  579. const numSegmentsToDrop =
  580. this.minSequenceNumber_ - streamInfo.firstSequenceNumber;
  581. if (numSegmentsToDrop > 0) {
  582. segmentIndex.dropFirstReferences(numSegmentsToDrop);
  583. // Now adjust timestamps back to begin at 0.
  584. const segmentN = segmentIndex.earliestReference();
  585. if (segmentN) {
  586. const streamOffset = -segmentN.startTime;
  587. // Modify all SegmentReferences equally.
  588. streamInfo.stream.segmentIndex.offset(streamOffset);
  589. // Update other parts of streamInfo the same way.
  590. this.offsetStreamInfo_(streamInfo, streamOffset);
  591. }
  592. }
  593. }
  594. }
  595. }
  596. /**
  597. * Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their
  598. * segments. Also normalizes segment times so that the earliest segment in
  599. * any stream is at time 0.
  600. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  601. * @private
  602. */
  603. syncStreamsWithProgramDateTime_(streamInfos) {
  604. // We assume that, when this is first called, we have enough info to
  605. // determine how to use the program date times (e.g. we have both a video
  606. // and an audio, and all other videos and audios match those).
  607. // Thus, we only need to calculate this once.
  608. if (this.lowestSyncTime_ == Infinity) {
  609. for (const streamInfo of streamInfos) {
  610. const segmentIndex = streamInfo.stream.segmentIndex;
  611. goog.asserts.assert(segmentIndex,
  612. 'Only loaded streams should be synced');
  613. const segment0 = segmentIndex.earliestReference();
  614. if (segment0 != null && segment0.syncTime != null) {
  615. this.lowestSyncTime_ =
  616. Math.min(this.lowestSyncTime_, segment0.syncTime);
  617. }
  618. }
  619. }
  620. const lowestSyncTime = this.lowestSyncTime_;
  621. if (lowestSyncTime == Infinity) {
  622. // Nothing to sync.
  623. return;
  624. }
  625. shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime);
  626. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  627. if (this.ignoreManifestProgramDateTimeFor_(streamInfo.type)) {
  628. continue;
  629. }
  630. const segmentIndex = streamInfo.stream.segmentIndex;
  631. if (segmentIndex != null) {
  632. // A segment's startTime should be based on its syncTime vs the lowest
  633. // syncTime across all streams. The earliest segment sync time from
  634. // any stream will become presentation time 0. If two streams start
  635. // e.g. 6 seconds apart in syncTime, then their first segments will
  636. // also start 6 seconds apart in presentation time.
  637. const segment0 = segmentIndex.earliestReference();
  638. if (!segment0) {
  639. continue;
  640. }
  641. if (segment0.syncTime == null) {
  642. shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream',
  643. streamInfo.getUris(),
  644. 'Expect AV sync issues!');
  645. } else {
  646. // Stream metadata are offset by a fixed amount based on the
  647. // first segment.
  648. const segment0TargetTime = segment0.syncTime - lowestSyncTime;
  649. const streamOffset = segment0TargetTime - segment0.startTime;
  650. this.offsetStreamInfo_(streamInfo, streamOffset);
  651. // This is computed across all segments separately to manage
  652. // accumulated drift in durations.
  653. for (const segment of segmentIndex) {
  654. segment.syncAgainst(lowestSyncTime);
  655. }
  656. }
  657. }
  658. }
  659. }
  660. /**
  661. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  662. * @param {number} offset
  663. * @private
  664. */
  665. offsetStreamInfo_(streamInfo, offset) {
  666. // Adjust our accounting of the minimum timestamp.
  667. streamInfo.minTimestamp += offset;
  668. // Adjust our accounting of the maximum timestamp.
  669. streamInfo.maxTimestamp += offset;
  670. goog.asserts.assert(streamInfo.maxTimestamp >= 0,
  671. 'Negative maxTimestamp after adjustment!');
  672. // Update our map from sequence number to start time.
  673. const mediaSequenceToStartTime =
  674. this.getMediaSequenceToStartTimeFor_(streamInfo);
  675. for (const [key, value] of mediaSequenceToStartTime) {
  676. mediaSequenceToStartTime.set(key, value + offset);
  677. }
  678. shaka.log.debug('Offset', offset, 'applied to',
  679. streamInfo.getUris());
  680. }
  681. /**
  682. * Parses the manifest.
  683. *
  684. * @param {BufferSource} data
  685. * @param {string} uri
  686. * @return {!Promise}
  687. * @private
  688. */
  689. async parseManifest_(data, uri) {
  690. const Utils = shaka.hls.Utils;
  691. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  692. goog.asserts.assert(this.masterPlaylistUri_,
  693. 'Master playlist URI must be set before calling parseManifest_!');
  694. const playlist = this.manifestTextParser_.parsePlaylist(data);
  695. /** @type {!Array.<!shaka.hls.Tag>} */
  696. const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE');
  697. /** @type {!Array.<!shaka.extern.Variant>} */
  698. let variants = [];
  699. /** @type {!Array.<!shaka.extern.Stream>} */
  700. let textStreams = [];
  701. /** @type {!Array.<!shaka.extern.Stream>} */
  702. let imageStreams = [];
  703. // This assert is our own sanity check.
  704. goog.asserts.assert(this.presentationTimeline_ == null,
  705. 'Presentation timeline created early!');
  706. // We don't know if the presentation is VOD or live until we parse at least
  707. // one media playlist, so make a VOD-style presentation timeline for now
  708. // and change the type later if we discover this is live.
  709. // Since the player will load the first variant chosen early in the process,
  710. // there isn't a window during playback where the live-ness is unknown.
  711. this.presentationTimeline_ = new shaka.media.PresentationTimeline(
  712. /* presentationStartTime= */ null, /* delay= */ 0);
  713. this.presentationTimeline_.setStatic(true);
  714. const getUris = () => {
  715. return [uri];
  716. };
  717. /** @type {?string} */
  718. let mediaPlaylistType = null;
  719. /** @type {!Map.<string, string>} */
  720. let mediaVariables = new Map();
  721. // Parsing a media playlist results in a single-variant stream.
  722. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  723. this.needsClosedCaptionsDetection_ = false;
  724. /** @type {!Array.<!shaka.hls.Tag>} */
  725. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  726. 'EXT-X-DEFINE');
  727. mediaVariables = this.parseMediaVariables_(
  728. variablesTags, this.masterPlaylistUri_);
  729. // By default we assume it is video, but in a later step the correct type
  730. // is obtained.
  731. mediaPlaylistType = ContentType.VIDEO;
  732. // These values can be obtained later so these default values are good.
  733. const codecs = '';
  734. const languageValue = '';
  735. const channelsCount = null;
  736. const sampleRate = null;
  737. const closedCaptions = new Map();
  738. const spatialAudio = false;
  739. const characteristics = null;
  740. const forced = false; // Only relevant for text.
  741. const primary = true; // This is the only stream!
  742. const name = 'Media Playlist';
  743. // Make the stream info, with those values.
  744. const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  745. this.globalId_++, mediaVariables, playlist, getUris, uri, codecs,
  746. mediaPlaylistType, languageValue, primary, name, channelsCount,
  747. closedCaptions, characteristics, forced, sampleRate, spatialAudio);
  748. this.uriToStreamInfosMap_.set(uri, streamInfo);
  749. if (streamInfo.stream) {
  750. const qualityInfo =
  751. shaka.media.QualityObserver.createQualityInfo(streamInfo.stream);
  752. streamInfo.stream.segmentIndex.forEachTopLevelReference(
  753. (reference) => {
  754. if (reference.initSegmentReference) {
  755. reference.initSegmentReference.mediaQuality = qualityInfo;
  756. }
  757. });
  758. }
  759. mediaPlaylistType = streamInfo.stream.type;
  760. // Wrap the stream from that stream info with a variant.
  761. variants.push({
  762. id: 0,
  763. language: this.getLanguage_(languageValue),
  764. disabledUntilTime: 0,
  765. primary: true,
  766. audio: mediaPlaylistType == 'audio' ? streamInfo.stream : null,
  767. video: mediaPlaylistType == 'video' ? streamInfo.stream : null,
  768. bandwidth: streamInfo.stream.bandwidth || 0,
  769. allowedByApplication: true,
  770. allowedByKeySystem: true,
  771. decodingInfos: [],
  772. });
  773. } else {
  774. this.parseMasterVariables_(variablesTags);
  775. /** @type {!Array.<!shaka.hls.Tag>} */
  776. const mediaTags = Utils.filterTagsByName(
  777. playlist.tags, 'EXT-X-MEDIA');
  778. /** @type {!Array.<!shaka.hls.Tag>} */
  779. const variantTags = Utils.filterTagsByName(
  780. playlist.tags, 'EXT-X-STREAM-INF');
  781. /** @type {!Array.<!shaka.hls.Tag>} */
  782. const imageTags = Utils.filterTagsByName(
  783. playlist.tags, 'EXT-X-IMAGE-STREAM-INF');
  784. /** @type {!Array.<!shaka.hls.Tag>} */
  785. const iFrameTags = Utils.filterTagsByName(
  786. playlist.tags, 'EXT-X-I-FRAME-STREAM-INF');
  787. /** @type {!Array.<!shaka.hls.Tag>} */
  788. const sessionKeyTags = Utils.filterTagsByName(
  789. playlist.tags, 'EXT-X-SESSION-KEY');
  790. /** @type {!Array.<!shaka.hls.Tag>} */
  791. const sessionDataTags = Utils.filterTagsByName(
  792. playlist.tags, 'EXT-X-SESSION-DATA');
  793. /** @type {!Array.<!shaka.hls.Tag>} */
  794. const contentSteeringTags = Utils.filterTagsByName(
  795. playlist.tags, 'EXT-X-CONTENT-STEERING');
  796. this.processSessionData_(sessionDataTags);
  797. await this.processContentSteering_(contentSteeringTags);
  798. this.parseCodecs_(variantTags);
  799. this.parseClosedCaptions_(mediaTags);
  800. variants = await this.createVariantsForTags_(
  801. variantTags, sessionKeyTags, mediaTags, getUris,
  802. this.globalVariables_);
  803. textStreams = this.parseTexts_(mediaTags);
  804. imageStreams = await this.parseImages_(imageTags, iFrameTags);
  805. }
  806. // Make sure that the parser has not been destroyed.
  807. if (!this.playerInterface_) {
  808. throw new shaka.util.Error(
  809. shaka.util.Error.Severity.CRITICAL,
  810. shaka.util.Error.Category.PLAYER,
  811. shaka.util.Error.Code.OPERATION_ABORTED);
  812. }
  813. // Single-variant streams aren't lazy-loaded, so for them we already have
  814. // enough info here to determine the presentation type and duration.
  815. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  816. if (this.isLive_()) {
  817. this.changePresentationTimelineToLive_(playlist);
  818. const delay = this.getUpdatePlaylistDelay_();
  819. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  820. }
  821. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  822. this.finalizeStreams_(streamInfos);
  823. this.determineDuration_();
  824. goog.asserts.assert(mediaPlaylistType,
  825. 'mediaPlaylistType should be non-null');
  826. this.processDateRangeTags_(
  827. playlist.tags, mediaPlaylistType, mediaVariables, getUris);
  828. }
  829. this.manifest_ = {
  830. presentationTimeline: this.presentationTimeline_,
  831. variants,
  832. textStreams,
  833. imageStreams,
  834. offlineSessionIds: [],
  835. minBufferTime: 0,
  836. sequenceMode: this.config_.hls.sequenceMode,
  837. ignoreManifestTimestampsInSegmentsMode:
  838. this.config_.hls.ignoreManifestTimestampsInSegmentsMode,
  839. type: shaka.media.ManifestParser.HLS,
  840. serviceDescription: null,
  841. nextUrl: null,
  842. periodCount: 1,
  843. gapCount: 0,
  844. isLowLatency: false,
  845. };
  846. // If there is no 'CODECS' attribute in the manifest and codec guessing is
  847. // disabled, we need to create the segment indexes now so that missing info
  848. // can be parsed from the media data and added to the stream objects.
  849. if (!this.codecInfoInManifest_ && this.config_.hls.disableCodecGuessing) {
  850. const createIndexes = [];
  851. for (const variant of this.manifest_.variants) {
  852. if (variant.audio && variant.audio.codecs === '') {
  853. createIndexes.push(variant.audio.createSegmentIndex());
  854. }
  855. if (variant.video && variant.video.codecs === '') {
  856. createIndexes.push(variant.video.createSegmentIndex());
  857. }
  858. }
  859. await Promise.all(createIndexes);
  860. }
  861. this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_);
  862. if (variants.length == 1) {
  863. const createSegmentIndexPromises = [];
  864. const variant = variants[0];
  865. for (const stream of [variant.video, variant.audio]) {
  866. if (stream && !stream.segmentIndex) {
  867. createSegmentIndexPromises.push(stream.createSegmentIndex());
  868. }
  869. }
  870. if (createSegmentIndexPromises.length > 0) {
  871. await Promise.all(createSegmentIndexPromises);
  872. }
  873. }
  874. }
  875. /**
  876. * @param {!Array.<!shaka.media.SegmentReference>} segments
  877. * @return {!Promise.<shaka.media.SegmentUtils.BasicInfo>}
  878. * @private
  879. */
  880. async getBasicInfoFromSegments_(segments) {
  881. const HlsParser = shaka.hls.HlsParser;
  882. const defaultBasicInfo = shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  883. this.config_.hls.mediaPlaylistFullMimeType);
  884. if (!segments.length) {
  885. return defaultBasicInfo;
  886. }
  887. const {segment, segmentIndex} = this.getAvailableSegment_(segments);
  888. const segmentUris = segment.getUris();
  889. const segmentUri = segmentUris[0];
  890. const parsedUri = new goog.Uri(segmentUri);
  891. const extension = parsedUri.getPath().split('.').pop();
  892. const rawMimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  893. if (rawMimeType) {
  894. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  895. rawMimeType);
  896. }
  897. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  898. let initData = null;
  899. let initMimeType = null;
  900. const initSegmentRef = segment.initSegmentReference;
  901. if (initSegmentRef) {
  902. const initSegmentRequest = shaka.util.Networking.createSegmentRequest(
  903. initSegmentRef.getUris(), initSegmentRef.getStartByte(),
  904. initSegmentRef.getEndByte(), this.config_.retryParameters);
  905. const initType =
  906. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  907. const initResponse = await this.makeNetworkRequest_(
  908. initSegmentRequest, requestType, {type: initType});
  909. initData = initResponse.data;
  910. if (initSegmentRef.aesKey) {
  911. initData = await shaka.media.SegmentUtils.aesDecrypt(
  912. initData, initSegmentRef.aesKey, 0);
  913. }
  914. initMimeType = initResponse.headers['content-type'];
  915. if (initMimeType) {
  916. // Split the MIME type in case the server sent additional parameters.
  917. initMimeType = initMimeType.split(';')[0].toLowerCase();
  918. }
  919. }
  920. const segmentRequest = shaka.util.Networking.createSegmentRequest(
  921. segment.getUris(), segment.getStartByte(), segment.getEndByte(),
  922. this.config_.retryParameters);
  923. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  924. const response = await this.makeNetworkRequest_(
  925. segmentRequest, requestType, {type});
  926. let data = response.data;
  927. if (segment.aesKey) {
  928. data = await shaka.media.SegmentUtils.aesDecrypt(
  929. data, segment.aesKey, segmentIndex);
  930. }
  931. let contentMimeType = response.headers['content-type'];
  932. if (contentMimeType) {
  933. // Split the MIME type in case the server sent additional parameters.
  934. contentMimeType = contentMimeType.split(';')[0].toLowerCase();
  935. }
  936. const validMp4Extensions = [
  937. 'mp4',
  938. 'mp4a',
  939. 'm4s',
  940. 'm4i',
  941. 'm4a',
  942. 'm4f',
  943. 'cmfa',
  944. 'mp4v',
  945. 'm4v',
  946. 'cmfv',
  947. 'fmp4',
  948. ];
  949. const validMp4MimeType = [
  950. 'audio/mp4',
  951. 'video/mp4',
  952. 'video/iso.segment',
  953. ];
  954. if (shaka.util.TsParser.probe(
  955. shaka.util.BufferUtils.toUint8(data))) {
  956. const basicInfo =
  957. shaka.media.SegmentUtils.getBasicInfoFromTs(data);
  958. if (basicInfo) {
  959. return basicInfo;
  960. }
  961. } else if (validMp4Extensions.includes(extension) ||
  962. validMp4MimeType.includes(contentMimeType) ||
  963. (initMimeType && validMp4MimeType.includes(initMimeType))) {
  964. const basicInfo = shaka.media.SegmentUtils.getBasicInfoFromMp4(
  965. initData, data);
  966. if (basicInfo) {
  967. return basicInfo;
  968. }
  969. }
  970. if (contentMimeType) {
  971. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  972. contentMimeType);
  973. }
  974. if (initMimeType) {
  975. return shaka.media.SegmentUtils.getBasicInfoFromMimeType(
  976. initMimeType);
  977. }
  978. return defaultBasicInfo;
  979. }
  980. /** @private */
  981. determineDuration_() {
  982. goog.asserts.assert(this.presentationTimeline_,
  983. 'Presentation timeline not created!');
  984. if (this.isLive_()) {
  985. // The spec says nothing much about seeking in live content, but Safari's
  986. // built-in HLS implementation does not allow it. Therefore we will set
  987. // the availability window equal to the presentation delay. The player
  988. // will be able to buffer ahead three segments, but the seek window will
  989. // be zero-sized.
  990. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  991. if (this.presentationType_ == PresentationType.LIVE) {
  992. let segmentAvailabilityDuration = this.getLiveDuration_();
  993. // This defaults to the presentation delay, which has the effect of
  994. // making the live stream unseekable. This is consistent with Apple's
  995. // HLS implementation.
  996. if (this.config_.hls.useSafariBehaviorForLive) {
  997. segmentAvailabilityDuration = this.presentationTimeline_.getDelay();
  998. }
  999. // The app can override that with a longer duration, to allow seeking.
  1000. if (!isNaN(this.config_.availabilityWindowOverride)) {
  1001. segmentAvailabilityDuration = this.config_.availabilityWindowOverride;
  1002. }
  1003. this.presentationTimeline_.setSegmentAvailabilityDuration(
  1004. segmentAvailabilityDuration);
  1005. }
  1006. } else {
  1007. // Use the minimum duration as the presentation duration.
  1008. this.presentationTimeline_.setDuration(this.getMinDuration_());
  1009. }
  1010. if (!this.presentationTimeline_.isStartTimeLocked()) {
  1011. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  1012. if (!streamInfo.stream.segmentIndex) {
  1013. continue; // Not active.
  1014. }
  1015. if (streamInfo.type != 'audio' && streamInfo.type != 'video') {
  1016. continue;
  1017. }
  1018. const firstReference = streamInfo.stream.segmentIndex.get(0);
  1019. if (firstReference && firstReference.syncTime) {
  1020. const syncTime = firstReference.syncTime;
  1021. this.presentationTimeline_.setInitialProgramDateTime(syncTime);
  1022. }
  1023. }
  1024. }
  1025. // This is the first point where we have a meaningful presentation start
  1026. // time, and we need to tell PresentationTimeline that so that it can
  1027. // maintain consistency from here on.
  1028. this.presentationTimeline_.lockStartTime();
  1029. // This asserts that the live edge is being calculated from segment times.
  1030. // For VOD and event streams, this check should still pass.
  1031. goog.asserts.assert(
  1032. !this.presentationTimeline_.usingPresentationStartTime(),
  1033. 'We should not be using the presentation start time in HLS!');
  1034. }
  1035. /**
  1036. * Get the variables of each variant tag, and store in a map.
  1037. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1038. * @private
  1039. */
  1040. parseMasterVariables_(tags) {
  1041. const queryParams = new goog.Uri(this.masterPlaylistUri_).getQueryData();
  1042. for (const variableTag of tags) {
  1043. const name = variableTag.getAttributeValue('NAME');
  1044. const value = variableTag.getAttributeValue('VALUE');
  1045. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  1046. if (name && value) {
  1047. if (!this.globalVariables_.has(name)) {
  1048. this.globalVariables_.set(name, value);
  1049. }
  1050. }
  1051. if (queryParam) {
  1052. const queryParamValue = queryParams.get(queryParam)[0];
  1053. if (queryParamValue && !this.globalVariables_.has(queryParamValue)) {
  1054. this.globalVariables_.set(queryParam, queryParamValue);
  1055. }
  1056. }
  1057. }
  1058. }
  1059. /**
  1060. * Get the variables of each variant tag, and store in a map.
  1061. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1062. * @param {string} uri Media playlist URI.
  1063. * @return {!Map.<string, string>}
  1064. * @private
  1065. */
  1066. parseMediaVariables_(tags, uri) {
  1067. const queryParams = new goog.Uri(uri).getQueryData();
  1068. const mediaVariables = new Map();
  1069. for (const variableTag of tags) {
  1070. const name = variableTag.getAttributeValue('NAME');
  1071. const value = variableTag.getAttributeValue('VALUE');
  1072. const queryParam = variableTag.getAttributeValue('QUERYPARAM');
  1073. const mediaImport = variableTag.getAttributeValue('IMPORT');
  1074. if (name && value) {
  1075. if (!mediaVariables.has(name)) {
  1076. mediaVariables.set(name, value);
  1077. }
  1078. }
  1079. if (queryParam) {
  1080. const queryParamValue = queryParams.get(queryParam)[0];
  1081. if (queryParamValue && !mediaVariables.has(queryParamValue)) {
  1082. mediaVariables.set(queryParam, queryParamValue);
  1083. }
  1084. }
  1085. if (mediaImport) {
  1086. const globalValue = this.globalVariables_.get(mediaImport);
  1087. if (globalValue) {
  1088. mediaVariables.set(mediaImport, globalValue);
  1089. }
  1090. }
  1091. }
  1092. return mediaVariables;
  1093. }
  1094. /**
  1095. * Get the codecs of each variant tag, and store in a map from
  1096. * audio/video/subtitle group id to the codecs arraylist.
  1097. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1098. * @private
  1099. */
  1100. parseCodecs_(tags) {
  1101. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1102. for (const variantTag of tags) {
  1103. const audioGroupId = variantTag.getAttributeValue('AUDIO');
  1104. const videoGroupId = variantTag.getAttributeValue('VIDEO');
  1105. const subGroupId = variantTag.getAttributeValue('SUBTITLES');
  1106. const allCodecs = this.getCodecsForVariantTag_(variantTag);
  1107. if (subGroupId) {
  1108. const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1109. ContentType.TEXT, allCodecs);
  1110. goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.');
  1111. this.groupIdToCodecsMap_.set(subGroupId, textCodecs);
  1112. shaka.util.ArrayUtils.remove(allCodecs, textCodecs);
  1113. }
  1114. if (audioGroupId) {
  1115. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1116. ContentType.AUDIO, allCodecs);
  1117. if (!codecs) {
  1118. codecs = this.config_.hls.defaultAudioCodec;
  1119. }
  1120. this.groupIdToCodecsMap_.set(audioGroupId, codecs);
  1121. }
  1122. if (videoGroupId) {
  1123. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1124. ContentType.VIDEO, allCodecs);
  1125. if (!codecs) {
  1126. codecs = this.config_.hls.defaultVideoCodec;
  1127. }
  1128. this.groupIdToCodecsMap_.set(videoGroupId, codecs);
  1129. }
  1130. }
  1131. }
  1132. /**
  1133. * Process EXT-X-SESSION-DATA tags.
  1134. *
  1135. * @param {!Array.<!shaka.hls.Tag>} tags
  1136. * @private
  1137. */
  1138. processSessionData_(tags) {
  1139. for (const tag of tags) {
  1140. const id = tag.getAttributeValue('DATA-ID');
  1141. const uri = tag.getAttributeValue('URI');
  1142. const language = tag.getAttributeValue('LANGUAGE');
  1143. const value = tag.getAttributeValue('VALUE');
  1144. const data = (new Map()).set('id', id);
  1145. if (uri) {
  1146. data.set('uri', shaka.hls.Utils.constructSegmentUris(
  1147. [this.masterPlaylistUri_], uri, this.globalVariables_)[0]);
  1148. }
  1149. if (language) {
  1150. data.set('language', language);
  1151. }
  1152. if (value) {
  1153. data.set('value', value);
  1154. }
  1155. const event = new shaka.util.FakeEvent('sessiondata', data);
  1156. if (this.playerInterface_) {
  1157. this.playerInterface_.onEvent(event);
  1158. }
  1159. }
  1160. }
  1161. /**
  1162. * Process EXT-X-CONTENT-STEERING tags.
  1163. *
  1164. * @param {!Array.<!shaka.hls.Tag>} tags
  1165. * @return {!Promise}
  1166. * @private
  1167. */
  1168. async processContentSteering_(tags) {
  1169. if (!this.playerInterface_ || !this.config_) {
  1170. return;
  1171. }
  1172. let contentSteeringPromise;
  1173. for (const tag of tags) {
  1174. const defaultPathwayId = tag.getAttributeValue('PATHWAY-ID');
  1175. const uri = tag.getAttributeValue('SERVER-URI');
  1176. if (!defaultPathwayId || !uri) {
  1177. continue;
  1178. }
  1179. this.contentSteeringManager_ =
  1180. new shaka.util.ContentSteeringManager(this.playerInterface_);
  1181. this.contentSteeringManager_.configure(this.config_);
  1182. this.contentSteeringManager_.setBaseUris([this.masterPlaylistUri_]);
  1183. this.contentSteeringManager_.setManifestType(
  1184. shaka.media.ManifestParser.HLS);
  1185. this.contentSteeringManager_.setDefaultPathwayId(defaultPathwayId);
  1186. contentSteeringPromise =
  1187. this.contentSteeringManager_.requestInfo(uri);
  1188. break;
  1189. }
  1190. await contentSteeringPromise;
  1191. }
  1192. /**
  1193. * Parse Subtitles and Closed Captions from 'EXT-X-MEDIA' tags.
  1194. * Create text streams for Subtitles, but not Closed Captions.
  1195. *
  1196. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1197. * @return {!Array.<!shaka.extern.Stream>}
  1198. * @private
  1199. */
  1200. parseTexts_(mediaTags) {
  1201. // Create text stream for each Subtitle media tag.
  1202. const subtitleTags =
  1203. shaka.hls.Utils.filterTagsByType(mediaTags, 'SUBTITLES');
  1204. const textStreams = subtitleTags.map((tag) => {
  1205. const disableText = this.config_.disableText;
  1206. if (disableText) {
  1207. return null;
  1208. }
  1209. try {
  1210. return this.createStreamInfoFromMediaTags_([tag], new Map()).stream;
  1211. } catch (e) {
  1212. if (this.config_.hls.ignoreTextStreamFailures) {
  1213. return null;
  1214. }
  1215. throw e;
  1216. }
  1217. });
  1218. const type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1219. // Set the codecs for text streams.
  1220. for (const tag of subtitleTags) {
  1221. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1222. const codecs = this.groupIdToCodecsMap_.get(groupId);
  1223. if (codecs) {
  1224. const textStreamInfos = this.groupIdToStreamInfosMap_.get(groupId);
  1225. if (textStreamInfos) {
  1226. for (const textStreamInfo of textStreamInfos) {
  1227. textStreamInfo.stream.codecs = codecs;
  1228. textStreamInfo.stream.mimeType =
  1229. this.guessMimeTypeBeforeLoading_(type, codecs) ||
  1230. this.guessMimeTypeFallback_(type);
  1231. this.setFullTypeForStream_(textStreamInfo.stream);
  1232. }
  1233. }
  1234. }
  1235. }
  1236. // Do not create text streams for Closed captions.
  1237. return textStreams.filter((s) => s);
  1238. }
  1239. /**
  1240. * @param {!shaka.extern.Stream} stream
  1241. * @private
  1242. */
  1243. setFullTypeForStream_(stream) {
  1244. const combinations = new Set([shaka.util.MimeUtils.getFullType(
  1245. stream.mimeType, stream.codecs)]);
  1246. if (stream.segmentIndex) {
  1247. stream.segmentIndex.forEachTopLevelReference((reference) => {
  1248. if (reference.mimeType) {
  1249. combinations.add(shaka.util.MimeUtils.getFullType(
  1250. reference.mimeType, stream.codecs));
  1251. }
  1252. });
  1253. }
  1254. stream.fullMimeTypes = combinations;
  1255. }
  1256. /**
  1257. * @param {!Array.<!shaka.hls.Tag>} imageTags from the playlist.
  1258. * @param {!Array.<!shaka.hls.Tag>} iFrameTags from the playlist.
  1259. * @return {!Promise.<!Array.<!shaka.extern.Stream>>}
  1260. * @private
  1261. */
  1262. async parseImages_(imageTags, iFrameTags) {
  1263. // Create image stream for each image tag.
  1264. const imageStreamPromises = imageTags.map(async (tag) => {
  1265. const disableThumbnails = this.config_.disableThumbnails;
  1266. if (disableThumbnails) {
  1267. return null;
  1268. }
  1269. try {
  1270. const streamInfo = await this.createStreamInfoFromImageTag_(tag);
  1271. return streamInfo.stream;
  1272. } catch (e) {
  1273. if (this.config_.hls.ignoreImageStreamFailures) {
  1274. return null;
  1275. }
  1276. throw e;
  1277. }
  1278. }).concat(iFrameTags.map((tag) => {
  1279. const disableThumbnails = this.config_.disableThumbnails;
  1280. if (disableThumbnails) {
  1281. return null;
  1282. }
  1283. try {
  1284. const streamInfo = this.createStreamInfoFromIframeTag_(tag);
  1285. if (streamInfo.stream.codecs !== 'mjpg') {
  1286. return null;
  1287. }
  1288. return streamInfo.stream;
  1289. } catch (e) {
  1290. if (this.config_.hls.ignoreImageStreamFailures) {
  1291. return null;
  1292. }
  1293. throw e;
  1294. }
  1295. }));
  1296. const imageStreams = await Promise.all(imageStreamPromises);
  1297. return imageStreams.filter((s) => s);
  1298. }
  1299. /**
  1300. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  1301. * @param {!Map.<string, string>} groupIdPathwayIdMapping
  1302. * @private
  1303. */
  1304. createStreamInfosFromMediaTags_(mediaTags, groupIdPathwayIdMapping) {
  1305. // Filter out subtitles and media tags without uri.
  1306. mediaTags = mediaTags.filter((tag) => {
  1307. const uri = tag.getAttributeValue('URI') || '';
  1308. const type = tag.getAttributeValue('TYPE');
  1309. return type != 'SUBTITLES' && uri != '';
  1310. });
  1311. const groupedTags = {};
  1312. for (const tag of mediaTags) {
  1313. const key = tag.getTagKey(!this.contentSteeringManager_);
  1314. if (!groupedTags[key]) {
  1315. groupedTags[key] = [tag];
  1316. } else {
  1317. groupedTags[key].push(tag);
  1318. }
  1319. }
  1320. for (const key in groupedTags) {
  1321. // Create stream info for each audio / video media grouped tag.
  1322. this.createStreamInfoFromMediaTags_(
  1323. groupedTags[key], groupIdPathwayIdMapping);
  1324. }
  1325. }
  1326. /**
  1327. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  1328. * @param {!Array.<!shaka.hls.Tag>} sessionKeyTags EXT-X-SESSION-KEY tags
  1329. * from the playlist.
  1330. * @param {!Array.<!shaka.hls.Tag>} mediaTags EXT-X-MEDIA tags from the
  1331. * playlist.
  1332. * @param {function():!Array.<string>} getUris
  1333. * @param {?Map.<string, string>=} variables
  1334. * @return {!Promise.<!Array.<!shaka.extern.Variant>>}
  1335. * @private
  1336. */
  1337. async createVariantsForTags_(tags, sessionKeyTags, mediaTags, getUris,
  1338. variables) {
  1339. // EXT-X-SESSION-KEY processing
  1340. const drmInfos = [];
  1341. const keyIds = new Set();
  1342. if (sessionKeyTags.length > 0) {
  1343. for (const drmTag of sessionKeyTags) {
  1344. const method = drmTag.getRequiredAttrValue('METHOD');
  1345. // According to the HLS spec, KEYFORMAT is optional and implicitly
  1346. // defaults to "identity".
  1347. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  1348. const keyFormat =
  1349. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  1350. let drmInfo = null;
  1351. if (method == 'NONE') {
  1352. continue;
  1353. } else if (this.isAesMethod_(method)) {
  1354. const keyUris = shaka.hls.Utils.constructSegmentUris(
  1355. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  1356. const keyMapKey = keyUris.sort().join('');
  1357. if (!this.aesKeyMap_.has(keyMapKey)) {
  1358. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  1359. const request = shaka.net.NetworkingEngine.makeRequest(
  1360. keyUris, this.config_.retryParameters);
  1361. const keyResponse = this.makeNetworkRequest_(request, requestType);
  1362. this.aesKeyMap_.set(keyMapKey, keyResponse);
  1363. }
  1364. continue;
  1365. } else if (keyFormat == 'identity') {
  1366. // eslint-disable-next-line no-await-in-loop
  1367. drmInfo = await this.identityDrmParser_(
  1368. drmTag, /* mimeType= */ '', getUris,
  1369. /* initSegmentRef= */ null, variables);
  1370. } else {
  1371. const drmParser =
  1372. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  1373. drmInfo = drmParser ?
  1374. drmParser(drmTag, /* mimeType= */ '') : null;
  1375. }
  1376. if (drmInfo) {
  1377. if (drmInfo.keyIds) {
  1378. for (const keyId of drmInfo.keyIds) {
  1379. keyIds.add(keyId);
  1380. }
  1381. }
  1382. drmInfos.push(drmInfo);
  1383. } else {
  1384. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  1385. }
  1386. }
  1387. }
  1388. const groupedTags = {};
  1389. for (const tag of tags) {
  1390. const key = tag.getTagKey(!this.contentSteeringManager_);
  1391. if (!groupedTags[key]) {
  1392. groupedTags[key] = [tag];
  1393. } else {
  1394. groupedTags[key].push(tag);
  1395. }
  1396. }
  1397. const allVariants = [];
  1398. // Create variants for each group of variant tag.
  1399. for (const key in groupedTags) {
  1400. const tags = groupedTags[key];
  1401. const firstTag = tags[0];
  1402. const frameRate = firstTag.getAttributeValue('FRAME-RATE');
  1403. const bandwidth =
  1404. Number(firstTag.getAttributeValue('AVERAGE-BANDWIDTH')) ||
  1405. Number(firstTag.getRequiredAttrValue('BANDWIDTH'));
  1406. const resolution = firstTag.getAttributeValue('RESOLUTION');
  1407. const [width, height] = resolution ? resolution.split('x') : [null, null];
  1408. const videoRange = firstTag.getAttributeValue('VIDEO-RANGE');
  1409. let videoLayout = firstTag.getAttributeValue('REQ-VIDEO-LAYOUT');
  1410. if (videoLayout && videoLayout.includes(',')) {
  1411. // If multiple video layout strings are present, pick the first valid
  1412. // one.
  1413. const layoutStrings = videoLayout.split(',').filter((layoutString) => {
  1414. return layoutString == 'CH-STEREO' || layoutString == 'CH-MONO';
  1415. });
  1416. videoLayout = layoutStrings[0];
  1417. }
  1418. // According to the HLS spec:
  1419. // By default a video variant is monoscopic, so an attribute
  1420. // consisting entirely of REQ-VIDEO-LAYOUT="CH-MONO" is unnecessary
  1421. // and SHOULD NOT be present.
  1422. videoLayout = videoLayout || 'CH-MONO';
  1423. const streamInfos = this.createStreamInfosForVariantTags_(tags,
  1424. mediaTags, resolution, frameRate);
  1425. goog.asserts.assert(streamInfos.audio.length ||
  1426. streamInfos.video.length, 'We should have created a stream!');
  1427. allVariants.push(...this.createVariants_(
  1428. streamInfos.audio,
  1429. streamInfos.video,
  1430. bandwidth,
  1431. width,
  1432. height,
  1433. frameRate,
  1434. videoRange,
  1435. videoLayout,
  1436. drmInfos,
  1437. keyIds));
  1438. }
  1439. return allVariants.filter((variant) => variant != null);
  1440. }
  1441. /**
  1442. * Create audio and video streamInfos from an 'EXT-X-STREAM-INF' tag and its
  1443. * related media tags.
  1444. *
  1445. * @param {!Array.<!shaka.hls.Tag>} tags
  1446. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1447. * @param {?string} resolution
  1448. * @param {?string} frameRate
  1449. * @return {!shaka.hls.HlsParser.StreamInfos}
  1450. * @private
  1451. */
  1452. createStreamInfosForVariantTags_(tags, mediaTags, resolution, frameRate) {
  1453. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1454. /** @type {shaka.hls.HlsParser.StreamInfos} */
  1455. const res = {
  1456. audio: [],
  1457. video: [],
  1458. };
  1459. const groupIdPathwayIdMapping = new Map();
  1460. const globalGroupIds = [];
  1461. let isAudioGroup = false;
  1462. let isVideoGroup = false;
  1463. for (const tag of tags) {
  1464. const audioGroupId = tag.getAttributeValue('AUDIO');
  1465. const videoGroupId = tag.getAttributeValue('VIDEO');
  1466. goog.asserts.assert(audioGroupId == null || videoGroupId == null,
  1467. 'Unexpected: both video and audio described by media tags!');
  1468. const groupId = audioGroupId || videoGroupId;
  1469. if (!groupId) {
  1470. continue;
  1471. }
  1472. if (!globalGroupIds.includes(groupId)) {
  1473. globalGroupIds.push(groupId);
  1474. }
  1475. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  1476. if (pathwayId) {
  1477. groupIdPathwayIdMapping.set(groupId, pathwayId);
  1478. }
  1479. if (audioGroupId) {
  1480. isAudioGroup = true;
  1481. } else if (videoGroupId) {
  1482. isVideoGroup = true;
  1483. }
  1484. // Make an educated guess about the stream type.
  1485. shaka.log.debug('Guessing stream type for', tag.toString());
  1486. }
  1487. if (globalGroupIds.length && mediaTags.length) {
  1488. const mediaTagsForVariant = mediaTags.filter((tag) => {
  1489. return globalGroupIds.includes(tag.getRequiredAttrValue('GROUP-ID'));
  1490. });
  1491. this.createStreamInfosFromMediaTags_(
  1492. mediaTagsForVariant, groupIdPathwayIdMapping);
  1493. }
  1494. const globalGroupId = globalGroupIds.sort().join(',');
  1495. const streamInfos =
  1496. (globalGroupId && this.groupIdToStreamInfosMap_.has(globalGroupId)) ?
  1497. this.groupIdToStreamInfosMap_.get(globalGroupId) : [];
  1498. if (isAudioGroup) {
  1499. res.audio.push(...streamInfos);
  1500. } else if (isVideoGroup) {
  1501. res.video.push(...streamInfos);
  1502. }
  1503. let type;
  1504. let ignoreStream = false;
  1505. // The Microsoft HLS manifest generators will make audio-only variants
  1506. // that link to their URI both directly and through an audio tag.
  1507. // In that case, ignore the local URI and use the version in the
  1508. // AUDIO tag, so you inherit its language.
  1509. // As an example, see the manifest linked in issue #860.
  1510. const allStreamUris = tags.map((tag) => tag.getRequiredAttrValue('URI'));
  1511. const hasSameUri = res.audio.find((audio) => {
  1512. return audio && audio.getUris().find((uri) => {
  1513. return allStreamUris.includes(uri);
  1514. });
  1515. });
  1516. /** @type {!Array.<string>} */
  1517. let allCodecs = this.getCodecsForVariantTag_(tags[0]);
  1518. const videoCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1519. ContentType.VIDEO, allCodecs);
  1520. const audioCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  1521. ContentType.AUDIO, allCodecs);
  1522. if (audioCodecs && !videoCodecs) {
  1523. // There are no associated media tags, and there's only audio codec,
  1524. // and no video codec, so it should be audio.
  1525. type = ContentType.AUDIO;
  1526. shaka.log.debug('Guessing audio-only.');
  1527. ignoreStream = res.audio.length > 0;
  1528. } else if (!res.audio.length && !res.video.length &&
  1529. audioCodecs && videoCodecs) {
  1530. // There are both audio and video codecs, so assume multiplexed content.
  1531. // Note that the default used when CODECS is missing assumes multiple
  1532. // (and therefore multiplexed).
  1533. // Recombine the codec strings into one so that MediaSource isn't
  1534. // lied to later. (That would trigger an error in Chrome.)
  1535. shaka.log.debug('Guessing multiplexed audio+video.');
  1536. type = ContentType.VIDEO;
  1537. allCodecs = [[videoCodecs, audioCodecs].join(',')];
  1538. } else if (res.audio.length && hasSameUri) {
  1539. shaka.log.debug('Guessing audio-only.');
  1540. type = ContentType.AUDIO;
  1541. ignoreStream = true;
  1542. } else if (res.video.length && !res.audio.length) {
  1543. // There are associated video streams. Assume this is audio.
  1544. shaka.log.debug('Guessing audio-only.');
  1545. type = ContentType.AUDIO;
  1546. } else {
  1547. shaka.log.debug('Guessing video-only.');
  1548. type = ContentType.VIDEO;
  1549. }
  1550. if (!ignoreStream) {
  1551. let language = null;
  1552. let name = null;
  1553. let channelsCount = null;
  1554. let spatialAudio = false;
  1555. let characteristics = null;
  1556. let sampleRate = null;
  1557. if (!streamInfos.length) {
  1558. const mediaTag = mediaTags.find((tag) => {
  1559. const uri = tag.getAttributeValue('URI') || '';
  1560. const type = tag.getAttributeValue('TYPE');
  1561. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1562. return type != 'SUBTITLES' && uri == '' &&
  1563. globalGroupIds.includes(groupId);
  1564. });
  1565. if (mediaTag) {
  1566. language = mediaTag.getAttributeValue('LANGUAGE');
  1567. name = mediaTag.getAttributeValue('NAME');
  1568. channelsCount = this.getChannelsCount_(mediaTag);
  1569. spatialAudio = this.isSpatialAudio_(mediaTag);
  1570. characteristics = mediaTag.getAttributeValue('CHARACTERISTICS');
  1571. sampleRate = this.getSampleRate_(mediaTag);
  1572. }
  1573. }
  1574. const streamInfo = this.createStreamInfoFromVariantTags_(
  1575. tags, allCodecs, type, language, name, channelsCount,
  1576. characteristics, sampleRate, spatialAudio);
  1577. if (globalGroupId) {
  1578. streamInfo.stream.groupId = globalGroupId;
  1579. }
  1580. res[streamInfo.stream.type] = [streamInfo];
  1581. }
  1582. return res;
  1583. }
  1584. /**
  1585. * Get the codecs from the 'EXT-X-STREAM-INF' tag.
  1586. *
  1587. * @param {!shaka.hls.Tag} tag
  1588. * @return {!Array.<string>} codecs
  1589. * @private
  1590. */
  1591. getCodecsForVariantTag_(tag) {
  1592. let codecsString = tag.getAttributeValue('CODECS') || '';
  1593. const supplementalCodecsString =
  1594. tag.getAttributeValue('SUPPLEMENTAL-CODECS');
  1595. this.codecInfoInManifest_ = codecsString.length > 0;
  1596. if (!this.codecInfoInManifest_ && !this.config_.hls.disableCodecGuessing) {
  1597. // These are the default codecs to assume if none are specified.
  1598. const defaultCodecsArray = [];
  1599. if (!this.config_.disableVideo) {
  1600. defaultCodecsArray.push(this.config_.hls.defaultVideoCodec);
  1601. }
  1602. if (!this.config_.disableAudio) {
  1603. defaultCodecsArray.push(this.config_.hls.defaultAudioCodec);
  1604. }
  1605. codecsString = defaultCodecsArray.join(',');
  1606. }
  1607. // Strip out internal whitespace while splitting on commas:
  1608. /** @type {!Array.<string>} */
  1609. const codecs = codecsString.split(/\s*,\s*/);
  1610. if (supplementalCodecsString) {
  1611. const supplementalCodecs = supplementalCodecsString.split(/\s*,\s*/)
  1612. .map((codec) => {
  1613. return codec.split('/')[0];
  1614. });
  1615. codecs.push(...supplementalCodecs);
  1616. }
  1617. return shaka.media.SegmentUtils.codecsFiltering(codecs);
  1618. }
  1619. /**
  1620. * Get the channel count information for an HLS audio track.
  1621. * CHANNELS specifies an ordered, "/" separated list of parameters.
  1622. * If the type is audio, the first parameter will be a decimal integer
  1623. * specifying the number of independent, simultaneous audio channels.
  1624. * No other channels parameters are currently defined.
  1625. *
  1626. * @param {!shaka.hls.Tag} tag
  1627. * @return {?number}
  1628. * @private
  1629. */
  1630. getChannelsCount_(tag) {
  1631. const channels = tag.getAttributeValue('CHANNELS');
  1632. if (!channels) {
  1633. return null;
  1634. }
  1635. const channelcountstring = channels.split('/')[0];
  1636. const count = parseInt(channelcountstring, 10);
  1637. return count;
  1638. }
  1639. /**
  1640. * Get the sample rate information for an HLS audio track.
  1641. *
  1642. * @param {!shaka.hls.Tag} tag
  1643. * @return {?number}
  1644. * @private
  1645. */
  1646. getSampleRate_(tag) {
  1647. const sampleRate = tag.getAttributeValue('SAMPLE-RATE');
  1648. if (!sampleRate) {
  1649. return null;
  1650. }
  1651. return parseInt(sampleRate, 10);
  1652. }
  1653. /**
  1654. * Get the spatial audio information for an HLS audio track.
  1655. * In HLS the channels field indicates the number of audio channels that the
  1656. * stream has (eg: 2). In the case of Dolby Atmos, the complexity is
  1657. * expressed with the number of channels followed by the word JOC
  1658. * (eg: 16/JOC), so 16 would be the number of channels (eg: 7.3.6 layout),
  1659. * and JOC indicates that the stream has spatial audio.
  1660. * @see https://developer.apple.com/documentation/http_live_streaming/hls_authoring_specification_for_apple_devices/hls_authoring_specification_for_apple_devices_appendixes
  1661. *
  1662. * @param {!shaka.hls.Tag} tag
  1663. * @return {boolean}
  1664. * @private
  1665. */
  1666. isSpatialAudio_(tag) {
  1667. const channels = tag.getAttributeValue('CHANNELS');
  1668. if (!channels) {
  1669. return false;
  1670. }
  1671. return channels.includes('/JOC');
  1672. }
  1673. /**
  1674. * Get the closed captions map information for the EXT-X-STREAM-INF tag, to
  1675. * create the stream info.
  1676. * @param {!shaka.hls.Tag} tag
  1677. * @param {string} type
  1678. * @return {Map.<string, string>} closedCaptions
  1679. * @private
  1680. */
  1681. getClosedCaptions_(tag, type) {
  1682. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1683. // The attribute of closed captions is optional, and the value may be
  1684. // 'NONE'.
  1685. const closedCaptionsAttr = tag.getAttributeValue('CLOSED-CAPTIONS');
  1686. // EXT-X-STREAM-INF tags may have CLOSED-CAPTIONS attributes.
  1687. // The value can be either a quoted-string or an enumerated-string with
  1688. // the value NONE. If the value is a quoted-string, it MUST match the
  1689. // value of the GROUP-ID attribute of an EXT-X-MEDIA tag elsewhere in the
  1690. // Playlist whose TYPE attribute is CLOSED-CAPTIONS.
  1691. if (type == ContentType.VIDEO ) {
  1692. if (this.config_.disableText) {
  1693. this.needsClosedCaptionsDetection_ = false;
  1694. return null;
  1695. }
  1696. if (closedCaptionsAttr) {
  1697. if (closedCaptionsAttr != 'NONE') {
  1698. return this.groupIdToClosedCaptionsMap_.get(closedCaptionsAttr);
  1699. }
  1700. this.needsClosedCaptionsDetection_ = false;
  1701. } else if (!closedCaptionsAttr && this.groupIdToClosedCaptionsMap_.size) {
  1702. for (const key of this.groupIdToClosedCaptionsMap_.keys()) {
  1703. return this.groupIdToClosedCaptionsMap_.get(key);
  1704. }
  1705. }
  1706. }
  1707. return null;
  1708. }
  1709. /**
  1710. * Get the normalized language value.
  1711. *
  1712. * @param {?string} languageValue
  1713. * @return {string}
  1714. * @private
  1715. */
  1716. getLanguage_(languageValue) {
  1717. const LanguageUtils = shaka.util.LanguageUtils;
  1718. return LanguageUtils.normalize(languageValue || 'und');
  1719. }
  1720. /**
  1721. * Get the type value.
  1722. * Shaka recognizes the content types 'audio', 'video', 'text', and 'image'.
  1723. * The HLS 'subtitles' type needs to be mapped to 'text'.
  1724. * @param {!shaka.hls.Tag} tag
  1725. * @return {string}
  1726. * @private
  1727. */
  1728. getType_(tag) {
  1729. let type = tag.getRequiredAttrValue('TYPE').toLowerCase();
  1730. if (type == 'subtitles') {
  1731. type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1732. }
  1733. return type;
  1734. }
  1735. /**
  1736. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} audioInfos
  1737. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} videoInfos
  1738. * @param {number} bandwidth
  1739. * @param {?string} width
  1740. * @param {?string} height
  1741. * @param {?string} frameRate
  1742. * @param {?string} videoRange
  1743. * @param {?string} videoLayout
  1744. * @param {!Array.<shaka.extern.DrmInfo>} drmInfos
  1745. * @param {!Set.<string>} keyIds
  1746. * @return {!Array.<!shaka.extern.Variant>}
  1747. * @private
  1748. */
  1749. createVariants_(
  1750. audioInfos, videoInfos, bandwidth, width, height, frameRate, videoRange,
  1751. videoLayout, drmInfos, keyIds) {
  1752. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1753. const DrmUtils = shaka.util.DrmUtils;
  1754. for (const info of videoInfos) {
  1755. this.addVideoAttributes_(
  1756. info.stream, width, height, frameRate, videoRange, videoLayout,
  1757. /** colorGamut= */ null);
  1758. }
  1759. // In case of audio-only or video-only content or the audio/video is
  1760. // disabled by the config, we create an array of one item containing
  1761. // a null. This way, the double-loop works for all kinds of content.
  1762. // NOTE: we currently don't have support for audio-only content.
  1763. const disableAudio = this.config_.disableAudio;
  1764. if (!audioInfos.length || disableAudio) {
  1765. audioInfos = [null];
  1766. }
  1767. const disableVideo = this.config_.disableVideo;
  1768. if (!videoInfos.length || disableVideo) {
  1769. videoInfos = [null];
  1770. }
  1771. const variants = [];
  1772. for (const audioInfo of audioInfos) {
  1773. for (const videoInfo of videoInfos) {
  1774. const audioStream = audioInfo ? audioInfo.stream : null;
  1775. if (audioStream) {
  1776. audioStream.drmInfos = drmInfos;
  1777. audioStream.keyIds = keyIds;
  1778. }
  1779. const videoStream = videoInfo ? videoInfo.stream : null;
  1780. if (videoStream) {
  1781. videoStream.drmInfos = drmInfos;
  1782. videoStream.keyIds = keyIds;
  1783. }
  1784. if (videoStream && !audioStream) {
  1785. videoStream.bandwidth = bandwidth;
  1786. }
  1787. if (!videoStream && audioStream) {
  1788. audioStream.bandwidth = bandwidth;
  1789. }
  1790. const audioDrmInfos = audioInfo ? audioInfo.stream.drmInfos : null;
  1791. const videoDrmInfos = videoInfo ? videoInfo.stream.drmInfos : null;
  1792. const videoStreamUri =
  1793. videoInfo ? videoInfo.getUris().sort().join(',') : '';
  1794. const audioStreamUri =
  1795. audioInfo ? audioInfo.getUris().sort().join(',') : '';
  1796. const variantUriKey = videoStreamUri + ' - ' + audioStreamUri;
  1797. if (audioStream && videoStream) {
  1798. if (!DrmUtils.areDrmCompatible(audioDrmInfos, videoDrmInfos)) {
  1799. shaka.log.warning(
  1800. 'Incompatible DRM info in HLS variant. Skipping.');
  1801. continue;
  1802. }
  1803. }
  1804. if (this.variantUriSet_.has(variantUriKey)) {
  1805. // This happens when two variants only differ in their text streams.
  1806. shaka.log.debug(
  1807. 'Skipping variant which only differs in text streams.');
  1808. continue;
  1809. }
  1810. // Since both audio and video are of the same type, this assertion will
  1811. // catch certain mistakes at runtime that the compiler would miss.
  1812. goog.asserts.assert(!audioStream ||
  1813. audioStream.type == ContentType.AUDIO, 'Audio parameter mismatch!');
  1814. goog.asserts.assert(!videoStream ||
  1815. videoStream.type == ContentType.VIDEO, 'Video parameter mismatch!');
  1816. const variant = {
  1817. id: this.globalId_++,
  1818. language: audioStream ? audioStream.language : 'und',
  1819. disabledUntilTime: 0,
  1820. primary: (!!audioStream && audioStream.primary) ||
  1821. (!!videoStream && videoStream.primary),
  1822. audio: audioStream,
  1823. video: videoStream,
  1824. bandwidth,
  1825. allowedByApplication: true,
  1826. allowedByKeySystem: true,
  1827. decodingInfos: [],
  1828. };
  1829. variants.push(variant);
  1830. this.variantUriSet_.add(variantUriKey);
  1831. }
  1832. }
  1833. return variants;
  1834. }
  1835. /**
  1836. * Parses an array of EXT-X-MEDIA tags, then stores the values of all tags
  1837. * with TYPE="CLOSED-CAPTIONS" into a map of group id to closed captions.
  1838. *
  1839. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1840. * @private
  1841. */
  1842. parseClosedCaptions_(mediaTags) {
  1843. const closedCaptionsTags =
  1844. shaka.hls.Utils.filterTagsByType(mediaTags, 'CLOSED-CAPTIONS');
  1845. this.needsClosedCaptionsDetection_ = closedCaptionsTags.length == 0;
  1846. for (const tag of closedCaptionsTags) {
  1847. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1848. 'Should only be called on media tags!');
  1849. const languageValue = tag.getAttributeValue('LANGUAGE');
  1850. let language = this.getLanguage_(languageValue);
  1851. if (!languageValue) {
  1852. const nameValue = tag.getAttributeValue('NAME');
  1853. if (nameValue) {
  1854. language = nameValue;
  1855. }
  1856. }
  1857. // The GROUP-ID value is a quoted-string that specifies the group to which
  1858. // the Rendition belongs.
  1859. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1860. // The value of INSTREAM-ID is a quoted-string that specifies a Rendition
  1861. // within the segments in the Media Playlist. This attribute is REQUIRED
  1862. // if the TYPE attribute is CLOSED-CAPTIONS.
  1863. // We need replace SERVICE string by our internal svc string.
  1864. const instreamId = tag.getRequiredAttrValue('INSTREAM-ID')
  1865. .replace('SERVICE', 'svc');
  1866. if (!this.groupIdToClosedCaptionsMap_.get(groupId)) {
  1867. this.groupIdToClosedCaptionsMap_.set(groupId, new Map());
  1868. }
  1869. this.groupIdToClosedCaptionsMap_.get(groupId).set(instreamId, language);
  1870. }
  1871. }
  1872. /**
  1873. * Parse EXT-X-MEDIA media tag into a Stream object.
  1874. *
  1875. * @param {!Array.<!shaka.hls.Tag>} tags
  1876. * @param {!Map.<string, string>} groupIdPathwayIdMapping
  1877. * @return {!shaka.hls.HlsParser.StreamInfo}
  1878. * @private
  1879. */
  1880. createStreamInfoFromMediaTags_(tags, groupIdPathwayIdMapping) {
  1881. const verbatimMediaPlaylistUris = [];
  1882. const globalGroupIds = [];
  1883. const groupIdUriMappping = new Map();
  1884. for (const tag of tags) {
  1885. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1886. 'Should only be called on media tags!');
  1887. const uri = tag.getRequiredAttrValue('URI');
  1888. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1889. verbatimMediaPlaylistUris.push(uri);
  1890. globalGroupIds.push(groupId);
  1891. groupIdUriMappping.set(groupId, uri);
  1892. }
  1893. const globalGroupId = globalGroupIds.sort().join(',');
  1894. const firstTag = tags[0];
  1895. let codecs = '';
  1896. /** @type {string} */
  1897. const type = this.getType_(firstTag);
  1898. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  1899. codecs = firstTag.getAttributeValue('CODECS') || '';
  1900. } else {
  1901. for (const groupId of globalGroupIds) {
  1902. if (this.groupIdToCodecsMap_.has(groupId)) {
  1903. codecs = this.groupIdToCodecsMap_.get(groupId);
  1904. break;
  1905. }
  1906. }
  1907. }
  1908. // Check if the stream has already been created as part of another Variant
  1909. // and return it if it has.
  1910. const key = verbatimMediaPlaylistUris.sort().join(',');
  1911. if (this.uriToStreamInfosMap_.has(key)) {
  1912. return this.uriToStreamInfosMap_.get(key);
  1913. }
  1914. const streamId = this.globalId_++;
  1915. if (this.contentSteeringManager_) {
  1916. for (const [groupId, uri] of groupIdUriMappping) {
  1917. const pathwayId = groupIdPathwayIdMapping.get(groupId);
  1918. if (pathwayId) {
  1919. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  1920. }
  1921. }
  1922. }
  1923. const language = firstTag.getAttributeValue('LANGUAGE');
  1924. const name = firstTag.getAttributeValue('NAME');
  1925. // NOTE: According to the HLS spec, "DEFAULT=YES" requires "AUTOSELECT=YES".
  1926. // However, we don't bother to validate "AUTOSELECT", since we don't
  1927. // actually use it in our streaming model, and we treat everything as
  1928. // "AUTOSELECT=YES". A value of "AUTOSELECT=NO" would imply that it may
  1929. // only be selected explicitly by the user, and we don't have a way to
  1930. // represent that in our model.
  1931. const defaultAttrValue = firstTag.getAttributeValue('DEFAULT');
  1932. const primary = defaultAttrValue == 'YES';
  1933. const channelsCount =
  1934. type == 'audio' ? this.getChannelsCount_(firstTag) : null;
  1935. const spatialAudio =
  1936. type == 'audio' ? this.isSpatialAudio_(firstTag) : false;
  1937. const characteristics = firstTag.getAttributeValue('CHARACTERISTICS');
  1938. const forcedAttrValue = firstTag.getAttributeValue('FORCED');
  1939. const forced = forcedAttrValue == 'YES';
  1940. const sampleRate = type == 'audio' ? this.getSampleRate_(firstTag) : null;
  1941. // TODO: Should we take into account some of the currently ignored
  1942. // attributes: INSTREAM-ID, Attribute descriptions: https://bit.ly/2lpjOhj
  1943. const streamInfo = this.createStreamInfo_(
  1944. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  1945. primary, name, channelsCount, /* closedCaptions= */ null,
  1946. characteristics, forced, sampleRate, spatialAudio);
  1947. if (streamInfo.stream) {
  1948. streamInfo.stream.groupId = globalGroupId;
  1949. }
  1950. if (this.groupIdToStreamInfosMap_.has(globalGroupId)) {
  1951. this.groupIdToStreamInfosMap_.get(globalGroupId).push(streamInfo);
  1952. } else {
  1953. this.groupIdToStreamInfosMap_.set(globalGroupId, [streamInfo]);
  1954. }
  1955. this.uriToStreamInfosMap_.set(key, streamInfo);
  1956. return streamInfo;
  1957. }
  1958. /**
  1959. * Parse EXT-X-IMAGE-STREAM-INF media tag into a Stream object.
  1960. *
  1961. * @param {shaka.hls.Tag} tag
  1962. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  1963. * @private
  1964. */
  1965. async createStreamInfoFromImageTag_(tag) {
  1966. goog.asserts.assert(tag.name == 'EXT-X-IMAGE-STREAM-INF',
  1967. 'Should only be called on image tags!');
  1968. /** @type {string} */
  1969. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  1970. const verbatimImagePlaylistUri = tag.getRequiredAttrValue('URI');
  1971. const codecs = tag.getAttributeValue('CODECS', 'jpeg') || '';
  1972. // Check if the stream has already been created as part of another Variant
  1973. // and return it if it has.
  1974. if (this.uriToStreamInfosMap_.has(verbatimImagePlaylistUri)) {
  1975. return this.uriToStreamInfosMap_.get(verbatimImagePlaylistUri);
  1976. }
  1977. const language = tag.getAttributeValue('LANGUAGE');
  1978. const name = tag.getAttributeValue('NAME');
  1979. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  1980. const streamInfo = this.createStreamInfo_(
  1981. this.globalId_++, [verbatimImagePlaylistUri], codecs, type, language,
  1982. /* primary= */ false, name, /* channelsCount= */ null,
  1983. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  1984. /* sampleRate= */ null, /* spatialAudio= */ false);
  1985. // Parse misc attributes.
  1986. const resolution = tag.getAttributeValue('RESOLUTION');
  1987. if (resolution) {
  1988. // The RESOLUTION tag represents the resolution of a single thumbnail, not
  1989. // of the entire sheet at once (like we expect in the output).
  1990. // So multiply by the layout size.
  1991. // Since we need to have generated the segment index for this, we can't
  1992. // lazy-load in this situation.
  1993. await streamInfo.stream.createSegmentIndex();
  1994. const reference = streamInfo.stream.segmentIndex.get(0);
  1995. const layout = reference.getTilesLayout();
  1996. if (layout) {
  1997. streamInfo.stream.width =
  1998. Number(resolution.split('x')[0]) * Number(layout.split('x')[0]);
  1999. streamInfo.stream.height =
  2000. Number(resolution.split('x')[1]) * Number(layout.split('x')[1]);
  2001. // TODO: What happens if there are multiple grids, with different
  2002. // layout sizes, inside this image stream?
  2003. }
  2004. }
  2005. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  2006. if (bandwidth) {
  2007. streamInfo.stream.bandwidth = Number(bandwidth);
  2008. }
  2009. this.uriToStreamInfosMap_.set(verbatimImagePlaylistUri, streamInfo);
  2010. return streamInfo;
  2011. }
  2012. /**
  2013. * Parse EXT-X-I-FRAME-STREAM-INF media tag into a Stream object.
  2014. *
  2015. * @param {shaka.hls.Tag} tag
  2016. * @return {!shaka.hls.HlsParser.StreamInfo}
  2017. * @private
  2018. */
  2019. createStreamInfoFromIframeTag_(tag) {
  2020. goog.asserts.assert(tag.name == 'EXT-X-I-FRAME-STREAM-INF',
  2021. 'Should only be called on iframe tags!');
  2022. /** @type {string} */
  2023. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  2024. const verbatimIFramePlaylistUri = tag.getRequiredAttrValue('URI');
  2025. const codecs = tag.getAttributeValue('CODECS') || '';
  2026. // Check if the stream has already been created as part of another Variant
  2027. // and return it if it has.
  2028. if (this.uriToStreamInfosMap_.has(verbatimIFramePlaylistUri)) {
  2029. return this.uriToStreamInfosMap_.get(verbatimIFramePlaylistUri);
  2030. }
  2031. const language = tag.getAttributeValue('LANGUAGE');
  2032. const name = tag.getAttributeValue('NAME');
  2033. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  2034. const streamInfo = this.createStreamInfo_(
  2035. this.globalId_++, [verbatimIFramePlaylistUri], codecs, type, language,
  2036. /* primary= */ false, name, /* channelsCount= */ null,
  2037. /* closedCaptions= */ null, characteristics, /* forced= */ false,
  2038. /* sampleRate= */ null, /* spatialAudio= */ false);
  2039. // Parse misc attributes.
  2040. const resolution = tag.getAttributeValue('RESOLUTION');
  2041. const [width, height] = resolution ? resolution.split('x') : [null, null];
  2042. streamInfo.stream.width = Number(width) || undefined;
  2043. streamInfo.stream.height = Number(height) || undefined;
  2044. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  2045. if (bandwidth) {
  2046. streamInfo.stream.bandwidth = Number(bandwidth);
  2047. }
  2048. this.uriToStreamInfosMap_.set(verbatimIFramePlaylistUri, streamInfo);
  2049. return streamInfo;
  2050. }
  2051. /**
  2052. * Parse an EXT-X-STREAM-INF media tag into a Stream object.
  2053. *
  2054. * @param {!Array.<!shaka.hls.Tag>} tags
  2055. * @param {!Array.<string>} allCodecs
  2056. * @param {string} type
  2057. * @param {?string} language
  2058. * @param {?string} name
  2059. * @param {?number} channelsCount
  2060. * @param {?string} characteristics
  2061. * @param {?number} sampleRate
  2062. * @param {boolean} spatialAudio
  2063. * @return {!shaka.hls.HlsParser.StreamInfo}
  2064. * @private
  2065. */
  2066. createStreamInfoFromVariantTags_(tags, allCodecs, type, language, name,
  2067. channelsCount, characteristics, sampleRate, spatialAudio) {
  2068. const streamId = this.globalId_++;
  2069. const verbatimMediaPlaylistUris = [];
  2070. for (const tag of tags) {
  2071. goog.asserts.assert(tag.name == 'EXT-X-STREAM-INF',
  2072. 'Should only be called on variant tags!');
  2073. const uri = tag.getRequiredAttrValue('URI');
  2074. const pathwayId = tag.getAttributeValue('PATHWAY-ID');
  2075. if (this.contentSteeringManager_ && pathwayId) {
  2076. this.contentSteeringManager_.addLocation(streamId, pathwayId, uri);
  2077. }
  2078. verbatimMediaPlaylistUris.push(uri);
  2079. }
  2080. const key = verbatimMediaPlaylistUris.sort().join(',');
  2081. if (this.uriToStreamInfosMap_.has(key)) {
  2082. return this.uriToStreamInfosMap_.get(key);
  2083. }
  2084. const closedCaptions = this.getClosedCaptions_(tags[0], type);
  2085. const codecs = shaka.util.ManifestParserUtils.guessCodecs(type, allCodecs);
  2086. const streamInfo = this.createStreamInfo_(
  2087. streamId, verbatimMediaPlaylistUris, codecs, type, language,
  2088. /* primary= */ false, name, channelsCount, closedCaptions,
  2089. characteristics, /* forced= */ false, sampleRate,
  2090. /* spatialAudio= */ false);
  2091. this.uriToStreamInfosMap_.set(key, streamInfo);
  2092. return streamInfo;
  2093. }
  2094. /**
  2095. * @param {number} streamId
  2096. * @param {!Array.<string>} verbatimMediaPlaylistUris
  2097. * @param {string} codecs
  2098. * @param {string} type
  2099. * @param {?string} languageValue
  2100. * @param {boolean} primary
  2101. * @param {?string} name
  2102. * @param {?number} channelsCount
  2103. * @param {Map.<string, string>} closedCaptions
  2104. * @param {?string} characteristics
  2105. * @param {boolean} forced
  2106. * @param {?number} sampleRate
  2107. * @param {boolean} spatialAudio
  2108. * @return {!shaka.hls.HlsParser.StreamInfo}
  2109. * @private
  2110. */
  2111. createStreamInfo_(streamId, verbatimMediaPlaylistUris, codecs, type,
  2112. languageValue, primary, name, channelsCount, closedCaptions,
  2113. characteristics, forced, sampleRate, spatialAudio) {
  2114. // TODO: Refactor, too many parameters
  2115. // This stream is lazy-loaded inside the createSegmentIndex function.
  2116. // So we start out with a stream object that does not contain the actual
  2117. // segment index, then download when createSegmentIndex is called.
  2118. const stream = this.makeStreamObject_(streamId, codecs, type,
  2119. languageValue, primary, name, channelsCount, closedCaptions,
  2120. characteristics, forced, sampleRate, spatialAudio);
  2121. const redirectUris = [];
  2122. const getUris = () => {
  2123. if (this.contentSteeringManager_ &&
  2124. verbatimMediaPlaylistUris.length > 1) {
  2125. return this.contentSteeringManager_.getLocations(streamId);
  2126. }
  2127. return redirectUris.concat(shaka.hls.Utils.constructUris(
  2128. [this.masterPlaylistUri_], verbatimMediaPlaylistUris,
  2129. this.globalVariables_));
  2130. };
  2131. const streamInfo = {
  2132. stream,
  2133. type,
  2134. redirectUris,
  2135. getUris,
  2136. // These values are filled out or updated after lazy-loading:
  2137. minTimestamp: 0,
  2138. maxTimestamp: 0,
  2139. mediaSequenceToStartTime: new Map(),
  2140. canSkipSegments: false,
  2141. canBlockReload: false,
  2142. hasEndList: false,
  2143. firstSequenceNumber: -1,
  2144. nextMediaSequence: -1,
  2145. nextPart: -1,
  2146. loadedOnce: false,
  2147. };
  2148. /** @param {!AbortSignal} abortSignal */
  2149. const downloadSegmentIndex = async (abortSignal) => {
  2150. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2151. const uris = streamInfo.getUris();
  2152. // Download the actual manifest.
  2153. let response;
  2154. try {
  2155. response = await this.requestManifest_(
  2156. streamInfo.getUris(), /* isPlaylist= */ true);
  2157. } catch (e) {
  2158. stream.closeSegmentIndex();
  2159. const handled = this.playerInterface_.disableStream(stream);
  2160. if (handled) {
  2161. return;
  2162. } else {
  2163. throw e;
  2164. }
  2165. }
  2166. if (abortSignal.aborted) {
  2167. return;
  2168. }
  2169. // Record the final URI after redirects.
  2170. const responseUri = response.uri;
  2171. if (responseUri != response.originalUri && !uris.includes(responseUri)) {
  2172. redirectUris.push(responseUri);
  2173. }
  2174. // Record the redirected, final URI of this media playlist when we parse
  2175. // it.
  2176. /** @type {!shaka.hls.Playlist} */
  2177. const playlist = this.manifestTextParser_.parsePlaylist(response.data);
  2178. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  2179. throw new shaka.util.Error(
  2180. shaka.util.Error.Severity.CRITICAL,
  2181. shaka.util.Error.Category.MANIFEST,
  2182. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  2183. }
  2184. /** @type {!Array.<!shaka.hls.Tag>} */
  2185. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  2186. 'EXT-X-DEFINE');
  2187. const mediaVariables =
  2188. this.parseMediaVariables_(variablesTags, responseUri);
  2189. const mimeType = undefined;
  2190. let requestBasicInfo = false;
  2191. // If no codec info was provided in the manifest and codec guessing is
  2192. // disabled we try to get necessary info from the media data.
  2193. if ((!this.codecInfoInManifest_ &&
  2194. this.config_.hls.disableCodecGuessing) ||
  2195. (this.needsClosedCaptionsDetection_ && type == ContentType.VIDEO &&
  2196. !this.config_.hls.disableClosedCaptionsDetection)) {
  2197. if (playlist.segments.length > 0) {
  2198. this.needsClosedCaptionsDetection_ = false;
  2199. requestBasicInfo = true;
  2200. }
  2201. }
  2202. const allowOverrideMimeType = !this.codecInfoInManifest_ &&
  2203. this.config_.hls.disableCodecGuessing;
  2204. const wasLive = this.isLive_();
  2205. const realStreamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  2206. streamId, mediaVariables, playlist, getUris, responseUri, codecs,
  2207. type, languageValue, primary, name, channelsCount, closedCaptions,
  2208. characteristics, forced, sampleRate, spatialAudio, mimeType,
  2209. requestBasicInfo, allowOverrideMimeType);
  2210. if (abortSignal.aborted) {
  2211. return;
  2212. }
  2213. const realStream = realStreamInfo.stream;
  2214. if (this.isLive_() && !wasLive) {
  2215. // Now that we know that the presentation is live, convert the timeline
  2216. // to live.
  2217. this.changePresentationTimelineToLive_(playlist);
  2218. }
  2219. // Copy values from the real stream info to our initial one.
  2220. streamInfo.minTimestamp = realStreamInfo.minTimestamp;
  2221. streamInfo.maxTimestamp = realStreamInfo.maxTimestamp;
  2222. streamInfo.canSkipSegments = realStreamInfo.canSkipSegments;
  2223. streamInfo.canBlockReload = realStreamInfo.canBlockReload;
  2224. streamInfo.hasEndList = realStreamInfo.hasEndList;
  2225. streamInfo.mediaSequenceToStartTime =
  2226. realStreamInfo.mediaSequenceToStartTime;
  2227. streamInfo.nextMediaSequence = realStreamInfo.nextMediaSequence;
  2228. streamInfo.nextPart = realStreamInfo.nextPart;
  2229. streamInfo.loadedOnce = true;
  2230. stream.segmentIndex = realStream.segmentIndex;
  2231. stream.encrypted = realStream.encrypted;
  2232. stream.drmInfos = realStream.drmInfos;
  2233. stream.keyIds = realStream.keyIds;
  2234. stream.mimeType = realStream.mimeType;
  2235. stream.bandwidth = stream.bandwidth || realStream.bandwidth;
  2236. stream.codecs = stream.codecs || realStream.codecs;
  2237. stream.closedCaptions =
  2238. stream.closedCaptions || realStream.closedCaptions;
  2239. stream.width = stream.width || realStream.width;
  2240. stream.height = stream.height || realStream.height;
  2241. stream.hdr = stream.hdr || realStream.hdr;
  2242. stream.colorGamut = stream.colorGamut || realStream.colorGamut;
  2243. if (stream.language == 'und' && realStream.language != 'und') {
  2244. stream.language = realStream.language;
  2245. }
  2246. stream.language = stream.language || realStream.language;
  2247. stream.channelsCount = stream.channelsCount || realStream.channelsCount;
  2248. stream.audioSamplingRate =
  2249. stream.audioSamplingRate || realStream.audioSamplingRate;
  2250. this.setFullTypeForStream_(stream);
  2251. // Since we lazy-loaded this content, the player may need to create new
  2252. // sessions for the DRM info in this stream.
  2253. if (stream.drmInfos.length) {
  2254. this.playerInterface_.newDrmInfo(stream);
  2255. }
  2256. let closedCaptionsUpdated = false;
  2257. if ((!closedCaptions && stream.closedCaptions) ||
  2258. (closedCaptions && stream.closedCaptions &&
  2259. closedCaptions.size != stream.closedCaptions.size)) {
  2260. closedCaptionsUpdated = true;
  2261. }
  2262. if (this.manifest_ && closedCaptionsUpdated) {
  2263. this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_);
  2264. }
  2265. if (type == ContentType.VIDEO || type == ContentType.AUDIO) {
  2266. for (const otherStreamInfo of this.uriToStreamInfosMap_.values()) {
  2267. if (!otherStreamInfo.loadedOnce && otherStreamInfo.type == type) {
  2268. // To aid manifest filtering, assume before loading that all video
  2269. // renditions have the same MIME type. (And likewise for audio.)
  2270. otherStreamInfo.stream.mimeType = realStream.mimeType;
  2271. this.setFullTypeForStream_(otherStreamInfo.stream);
  2272. }
  2273. }
  2274. }
  2275. if (type == ContentType.TEXT) {
  2276. const firstSegment = realStream.segmentIndex.get(0);
  2277. if (firstSegment && firstSegment.initSegmentReference) {
  2278. stream.mimeType = 'application/mp4';
  2279. this.setFullTypeForStream_(stream);
  2280. }
  2281. }
  2282. const qualityInfo =
  2283. shaka.media.QualityObserver.createQualityInfo(stream);
  2284. stream.segmentIndex.forEachTopLevelReference((reference) => {
  2285. if (reference.initSegmentReference) {
  2286. reference.initSegmentReference.mediaQuality = qualityInfo;
  2287. }
  2288. });
  2289. // Add finishing touches to the stream that can only be done once we have
  2290. // more full context on the media as a whole.
  2291. if (this.hasEnoughInfoToFinalizeStreams_()) {
  2292. if (!this.streamsFinalized_) {
  2293. // Mark this manifest as having been finalized, so we don't go through
  2294. // this whole process of finishing touches a second time.
  2295. this.streamsFinalized_ = true;
  2296. // Finalize all of the currently-loaded streams.
  2297. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  2298. const activeStreamInfos =
  2299. streamInfos.filter((s) => s.stream.segmentIndex);
  2300. this.finalizeStreams_(activeStreamInfos);
  2301. // With the addition of this new stream, we now have enough info to
  2302. // figure out how long the streams should be. So process all streams
  2303. // we have downloaded up until this point.
  2304. this.determineDuration_();
  2305. // Finally, start the update timer, if this asset has been determined
  2306. // to be a livestream.
  2307. const delay = this.getUpdatePlaylistDelay_();
  2308. if (delay > 0) {
  2309. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  2310. }
  2311. } else {
  2312. // We don't need to go through the full process; just finalize this
  2313. // single stream.
  2314. this.finalizeStreams_([streamInfo]);
  2315. }
  2316. }
  2317. this.processDateRangeTags_(
  2318. playlist.tags, stream.type, mediaVariables, getUris);
  2319. };
  2320. /** @type {Promise} */
  2321. let creationPromise = null;
  2322. /** @type {!AbortController} */
  2323. let abortController = new AbortController();
  2324. const safeCreateSegmentIndex = () => {
  2325. // An operation is already in progress. The second and subsequent
  2326. // callers receive the same Promise as the first caller, and only one
  2327. // download operation will occur.
  2328. if (creationPromise) {
  2329. return creationPromise;
  2330. }
  2331. // Create a new AbortController to be able to cancel this specific
  2332. // download.
  2333. abortController = new AbortController();
  2334. // Create a Promise tied to the outcome of downloadSegmentIndex(). If
  2335. // downloadSegmentIndex is rejected, creationPromise will also be
  2336. // rejected.
  2337. creationPromise = new Promise((resolve) => {
  2338. resolve(downloadSegmentIndex(abortController.signal));
  2339. });
  2340. return creationPromise;
  2341. };
  2342. stream.createSegmentIndex = safeCreateSegmentIndex;
  2343. stream.closeSegmentIndex = () => {
  2344. // If we're mid-creation, cancel it.
  2345. if (creationPromise && !stream.segmentIndex) {
  2346. abortController.abort();
  2347. }
  2348. // If we have a segment index, release it.
  2349. if (stream.segmentIndex) {
  2350. stream.segmentIndex.release();
  2351. stream.segmentIndex = null;
  2352. }
  2353. // Clear the creation Promise so that a new operation can begin.
  2354. creationPromise = null;
  2355. };
  2356. return streamInfo;
  2357. }
  2358. /**
  2359. * @return {number}
  2360. * @private
  2361. */
  2362. getMinDuration_() {
  2363. let minDuration = Infinity;
  2364. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2365. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  2366. // Since everything is already offset to 0 (either by sync or by being
  2367. // VOD), only maxTimestamp is necessary to compute the duration.
  2368. minDuration = Math.min(minDuration, streamInfo.maxTimestamp);
  2369. }
  2370. }
  2371. return minDuration;
  2372. }
  2373. /**
  2374. * @return {number}
  2375. * @private
  2376. */
  2377. getLiveDuration_() {
  2378. let maxTimestamp = Infinity;
  2379. let minTimestamp = Infinity;
  2380. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  2381. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  2382. maxTimestamp = Math.min(maxTimestamp, streamInfo.maxTimestamp);
  2383. minTimestamp = Math.min(minTimestamp, streamInfo.minTimestamp);
  2384. }
  2385. }
  2386. return maxTimestamp - minTimestamp;
  2387. }
  2388. /**
  2389. * @param {!Array.<!shaka.extern.Stream>} streams
  2390. * @private
  2391. */
  2392. notifySegmentsForStreams_(streams) {
  2393. const references = [];
  2394. for (const stream of streams) {
  2395. if (!stream.segmentIndex) {
  2396. // The stream was closed since the list of streams was built.
  2397. continue;
  2398. }
  2399. stream.segmentIndex.forEachTopLevelReference((reference) => {
  2400. references.push(reference);
  2401. });
  2402. }
  2403. this.presentationTimeline_.notifySegments(references);
  2404. }
  2405. /**
  2406. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  2407. * @private
  2408. */
  2409. finalizeStreams_(streamInfos) {
  2410. if (!this.isLive_()) {
  2411. const minDuration = this.getMinDuration_();
  2412. for (const streamInfo of streamInfos) {
  2413. streamInfo.stream.segmentIndex.fit(/* periodStart= */ 0, minDuration);
  2414. }
  2415. }
  2416. this.notifySegmentsForStreams_(streamInfos.map((s) => s.stream));
  2417. if (this.config_.hls.ignoreManifestProgramDateTime) {
  2418. this.syncStreamsWithSequenceNumber_(streamInfos);
  2419. } else {
  2420. this.syncStreamsWithProgramDateTime_(streamInfos);
  2421. if (this.config_.hls.ignoreManifestProgramDateTimeForTypes.length > 0) {
  2422. this.syncStreamsWithSequenceNumber_(streamInfos);
  2423. }
  2424. }
  2425. }
  2426. /**
  2427. * @param {string} type
  2428. * @return {boolean}
  2429. * @private
  2430. */
  2431. ignoreManifestProgramDateTimeFor_(type) {
  2432. if (this.config_.hls.ignoreManifestProgramDateTime) {
  2433. return true;
  2434. }
  2435. const forTypes = this.config_.hls.ignoreManifestProgramDateTimeForTypes;
  2436. return forTypes.includes(type);
  2437. }
  2438. /**
  2439. * There are some values on streams that can only be set once we know about
  2440. * both the video and audio content, if present.
  2441. * This checks if there is at least one video downloaded (if the media has
  2442. * video), and that there is at least one audio downloaded (if the media has
  2443. * audio).
  2444. * @return {boolean}
  2445. * @private
  2446. */
  2447. hasEnoughInfoToFinalizeStreams_() {
  2448. if (!this.manifest_) {
  2449. return false;
  2450. }
  2451. const videos = [];
  2452. const audios = [];
  2453. for (const variant of this.manifest_.variants) {
  2454. if (variant.video) {
  2455. videos.push(variant.video);
  2456. }
  2457. if (variant.audio) {
  2458. audios.push(variant.audio);
  2459. }
  2460. }
  2461. if (videos.length > 0 && !videos.some((stream) => stream.segmentIndex)) {
  2462. return false;
  2463. }
  2464. if (audios.length > 0 && !audios.some((stream) => stream.segmentIndex)) {
  2465. return false;
  2466. }
  2467. return true;
  2468. }
  2469. /**
  2470. * @param {number} streamId
  2471. * @param {!shaka.hls.Playlist} playlist
  2472. * @param {function():!Array.<string>} getUris
  2473. * @param {string} responseUri
  2474. * @param {string} codecs
  2475. * @param {string} type
  2476. * @param {?string} languageValue
  2477. * @param {boolean} primary
  2478. * @param {?string} name
  2479. * @param {?number} channelsCount
  2480. * @param {Map.<string, string>} closedCaptions
  2481. * @param {?string} characteristics
  2482. * @param {boolean} forced
  2483. * @param {?number} sampleRate
  2484. * @param {boolean} spatialAudio
  2485. * @param {(string|undefined)} mimeType
  2486. * @param {boolean=} requestBasicInfo
  2487. * @param {boolean=} allowOverrideMimeType
  2488. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  2489. * @private
  2490. */
  2491. async convertParsedPlaylistIntoStreamInfo_(streamId, variables, playlist,
  2492. getUris, responseUri, codecs, type, languageValue, primary, name,
  2493. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2494. spatialAudio, mimeType = undefined, requestBasicInfo = true,
  2495. allowOverrideMimeType = true) {
  2496. goog.asserts.assert(playlist.segments != null,
  2497. 'Media playlist should have segments!');
  2498. this.determinePresentationType_(playlist);
  2499. if (this.isLive_()) {
  2500. this.determineLastTargetDuration_(playlist);
  2501. }
  2502. const mediaSequenceToStartTime = this.isLive_() ?
  2503. this.mediaSequenceToStartTimeByType_.get(type) : new Map();
  2504. const {segments, bandwidth} = this.createSegments_(
  2505. playlist, mediaSequenceToStartTime, variables, getUris, type);
  2506. let width = null;
  2507. let height = null;
  2508. let videoRange = null;
  2509. let colorGamut = null;
  2510. if (segments.length > 0 && requestBasicInfo) {
  2511. const basicInfo = await this.getBasicInfoFromSegments_(segments);
  2512. type = basicInfo.type;
  2513. languageValue = basicInfo.language;
  2514. channelsCount = basicInfo.channelCount;
  2515. sampleRate = basicInfo.sampleRate;
  2516. if (!this.config_.disableText) {
  2517. closedCaptions = basicInfo.closedCaptions;
  2518. }
  2519. height = basicInfo.height;
  2520. width = basicInfo.width;
  2521. videoRange = basicInfo.videoRange;
  2522. colorGamut = basicInfo.colorGamut;
  2523. if (allowOverrideMimeType) {
  2524. mimeType = basicInfo.mimeType;
  2525. codecs = basicInfo.codecs;
  2526. }
  2527. }
  2528. if (!mimeType) {
  2529. mimeType = await this.guessMimeType_(type, codecs, segments);
  2530. }
  2531. const {drmInfos, keyIds, encrypted, aesEncrypted} =
  2532. await this.parseDrmInfo_(playlist, mimeType, getUris, variables);
  2533. if (encrypted && !drmInfos.length && !aesEncrypted) {
  2534. throw new shaka.util.Error(
  2535. shaka.util.Error.Severity.CRITICAL,
  2536. shaka.util.Error.Category.MANIFEST,
  2537. shaka.util.Error.Code.HLS_KEYFORMATS_NOT_SUPPORTED);
  2538. }
  2539. const stream = this.makeStreamObject_(streamId, codecs, type,
  2540. languageValue, primary, name, channelsCount, closedCaptions,
  2541. characteristics, forced, sampleRate, spatialAudio);
  2542. stream.encrypted = encrypted;
  2543. stream.drmInfos = drmInfos;
  2544. stream.keyIds = keyIds;
  2545. stream.mimeType = mimeType;
  2546. if (bandwidth) {
  2547. stream.bandwidth = bandwidth;
  2548. }
  2549. this.setFullTypeForStream_(stream);
  2550. if (type == shaka.util.ManifestParserUtils.ContentType.VIDEO &&
  2551. (width || height || videoRange || colorGamut)) {
  2552. this.addVideoAttributes_(stream, width, height,
  2553. /* frameRate= */ null, videoRange, /* videoLayout= */ null,
  2554. colorGamut);
  2555. }
  2556. // This new calculation is necessary for Low Latency streams.
  2557. if (this.isLive_()) {
  2558. this.determineLastTargetDuration_(playlist);
  2559. }
  2560. const firstStartTime = segments[0].startTime;
  2561. const lastSegment = segments[segments.length - 1];
  2562. const lastEndTime = lastSegment.endTime;
  2563. /** @type {!shaka.media.SegmentIndex} */
  2564. const segmentIndex = new shaka.media.SegmentIndex(segments);
  2565. stream.segmentIndex = segmentIndex;
  2566. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  2567. playlist.tags, 'EXT-X-SERVER-CONTROL');
  2568. const canSkipSegments = serverControlTag ?
  2569. serverControlTag.getAttribute('CAN-SKIP-UNTIL') != null : false;
  2570. const canBlockReload = serverControlTag ?
  2571. serverControlTag.getAttribute('CAN-BLOCK-RELOAD') != null : false;
  2572. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2573. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2574. const {nextMediaSequence, nextPart} =
  2575. this.getNextMediaSequenceAndPart_(mediaSequenceNumber, segments);
  2576. return {
  2577. stream,
  2578. type,
  2579. redirectUris: [],
  2580. getUris,
  2581. minTimestamp: firstStartTime,
  2582. maxTimestamp: lastEndTime,
  2583. canSkipSegments,
  2584. canBlockReload,
  2585. hasEndList: false,
  2586. firstSequenceNumber: -1,
  2587. nextMediaSequence,
  2588. nextPart,
  2589. mediaSequenceToStartTime,
  2590. loadedOnce: false,
  2591. };
  2592. }
  2593. /**
  2594. * Get the next msn and part
  2595. *
  2596. * @param {number} mediaSequenceNumber
  2597. * @param {!Array.<!shaka.media.SegmentReference>} segments
  2598. * @return {{nextMediaSequence: number, nextPart:number}}}
  2599. * @private
  2600. */
  2601. getNextMediaSequenceAndPart_(mediaSequenceNumber, segments) {
  2602. const currentMediaSequence = mediaSequenceNumber + segments.length - 1;
  2603. let nextMediaSequence = currentMediaSequence;
  2604. let nextPart = -1;
  2605. if (!segments.length) {
  2606. nextMediaSequence++;
  2607. return {
  2608. nextMediaSequence,
  2609. nextPart,
  2610. };
  2611. }
  2612. const lastSegment = segments[segments.length - 1];
  2613. const partialReferences = lastSegment.partialReferences;
  2614. if (!lastSegment.partialReferences.length) {
  2615. nextMediaSequence++;
  2616. if (lastSegment.hasByterangeOptimization()) {
  2617. nextPart = 0;
  2618. }
  2619. return {
  2620. nextMediaSequence,
  2621. nextPart,
  2622. };
  2623. }
  2624. nextPart = partialReferences.length - 1;
  2625. const lastPartialReference =
  2626. partialReferences[partialReferences.length - 1];
  2627. if (!lastPartialReference.isPreload()) {
  2628. nextMediaSequence++;
  2629. nextPart = 0;
  2630. }
  2631. return {
  2632. nextMediaSequence,
  2633. nextPart,
  2634. };
  2635. }
  2636. /**
  2637. * Creates a stream object with the given parameters.
  2638. * The parameters that are passed into here are only the things that can be
  2639. * known without downloading the media playlist; other values must be set
  2640. * manually on the object after creation.
  2641. * @param {number} id
  2642. * @param {string} codecs
  2643. * @param {string} type
  2644. * @param {?string} languageValue
  2645. * @param {boolean} primary
  2646. * @param {?string} name
  2647. * @param {?number} channelsCount
  2648. * @param {Map.<string, string>} closedCaptions
  2649. * @param {?string} characteristics
  2650. * @param {boolean} forced
  2651. * @param {?number} sampleRate
  2652. * @param {boolean} spatialAudio
  2653. * @return {!shaka.extern.Stream}
  2654. * @private
  2655. */
  2656. makeStreamObject_(id, codecs, type, languageValue, primary, name,
  2657. channelsCount, closedCaptions, characteristics, forced, sampleRate,
  2658. spatialAudio) {
  2659. // Fill out a "best-guess" mimeType, for now. It will be replaced once the
  2660. // stream is lazy-loaded.
  2661. const mimeType = this.guessMimeTypeBeforeLoading_(type, codecs) ||
  2662. this.guessMimeTypeFallback_(type);
  2663. const roles = [];
  2664. if (characteristics) {
  2665. for (const characteristic of characteristics.split(',')) {
  2666. roles.push(characteristic);
  2667. }
  2668. }
  2669. let kind = undefined;
  2670. let accessibilityPurpose = null;
  2671. if (type == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2672. if (roles.includes('public.accessibility.transcribes-spoken-dialog') &&
  2673. roles.includes('public.accessibility.describes-music-and-sound')) {
  2674. kind = shaka.util.ManifestParserUtils.TextStreamKind.CLOSED_CAPTION;
  2675. } else {
  2676. kind = shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE;
  2677. }
  2678. } else {
  2679. if (roles.includes('public.accessibility.describes-video')) {
  2680. accessibilityPurpose =
  2681. shaka.media.ManifestParser.AccessibilityPurpose.VISUALLY_IMPAIRED;
  2682. }
  2683. }
  2684. // If there are no roles, and we have defaulted to the subtitle "kind" for
  2685. // this track, add the implied subtitle role.
  2686. if (!roles.length &&
  2687. kind === shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE) {
  2688. roles.push(shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE);
  2689. }
  2690. const stream = {
  2691. id: this.globalId_++,
  2692. originalId: name,
  2693. groupId: null,
  2694. createSegmentIndex: () => Promise.resolve(),
  2695. segmentIndex: null,
  2696. mimeType,
  2697. codecs,
  2698. kind: (type == shaka.util.ManifestParserUtils.ContentType.TEXT) ?
  2699. shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE : undefined,
  2700. encrypted: false,
  2701. drmInfos: [],
  2702. keyIds: new Set(),
  2703. language: this.getLanguage_(languageValue),
  2704. originalLanguage: languageValue,
  2705. label: name, // For historical reasons, since before "originalId".
  2706. type,
  2707. primary,
  2708. // TODO: trick mode
  2709. trickModeVideo: null,
  2710. emsgSchemeIdUris: null,
  2711. frameRate: undefined,
  2712. pixelAspectRatio: undefined,
  2713. width: undefined,
  2714. height: undefined,
  2715. bandwidth: undefined,
  2716. roles,
  2717. forced,
  2718. channelsCount,
  2719. audioSamplingRate: sampleRate,
  2720. spatialAudio,
  2721. closedCaptions,
  2722. hdr: undefined,
  2723. colorGamut: undefined,
  2724. videoLayout: undefined,
  2725. tilesLayout: undefined,
  2726. accessibilityPurpose: accessibilityPurpose,
  2727. external: false,
  2728. fastSwitching: false,
  2729. fullMimeTypes: new Set(),
  2730. };
  2731. this.setFullTypeForStream_(stream);
  2732. return stream;
  2733. }
  2734. /**
  2735. * @param {!shaka.hls.Playlist} playlist
  2736. * @param {string} mimeType
  2737. * @param {function():!Array.<string>} getUris
  2738. * @param {?Map.<string, string>=} variables
  2739. * @return {Promise.<{
  2740. * drmInfos: !Array.<shaka.extern.DrmInfo>,
  2741. * keyIds: !Set.<string>,
  2742. * encrypted: boolean,
  2743. * aesEncrypted: boolean
  2744. * }>}
  2745. * @private
  2746. */
  2747. async parseDrmInfo_(playlist, mimeType, getUris, variables) {
  2748. /** @type {!Map<!shaka.hls.Tag, ?shaka.media.InitSegmentReference>} */
  2749. const drmTagsMap = new Map();
  2750. if (playlist.segments) {
  2751. for (const segment of playlist.segments) {
  2752. const segmentKeyTags = shaka.hls.Utils.filterTagsByName(segment.tags,
  2753. 'EXT-X-KEY');
  2754. let initSegmentRef = null;
  2755. if (segmentKeyTags.length) {
  2756. initSegmentRef = this.getInitSegmentReference_(playlist,
  2757. segment.tags, getUris, variables);
  2758. for (const segmentKeyTag of segmentKeyTags) {
  2759. drmTagsMap.set(segmentKeyTag, initSegmentRef);
  2760. }
  2761. }
  2762. }
  2763. }
  2764. let encrypted = false;
  2765. let aesEncrypted = false;
  2766. /** @type {!Array.<shaka.extern.DrmInfo>}*/
  2767. const drmInfos = [];
  2768. const keyIds = new Set();
  2769. for (const [key, value] of drmTagsMap) {
  2770. const drmTag = /** @type {!shaka.hls.Tag} */ (key);
  2771. const initSegmentRef =
  2772. /** @type {?shaka.media.InitSegmentReference} */ (value);
  2773. const method = drmTag.getRequiredAttrValue('METHOD');
  2774. if (method != 'NONE') {
  2775. encrypted = true;
  2776. // According to the HLS spec, KEYFORMAT is optional and implicitly
  2777. // defaults to "identity".
  2778. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  2779. const keyFormat =
  2780. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  2781. let drmInfo = null;
  2782. if (this.isAesMethod_(method)) {
  2783. // These keys are handled separately.
  2784. aesEncrypted = true;
  2785. continue;
  2786. } else if (keyFormat == 'identity') {
  2787. // eslint-disable-next-line no-await-in-loop
  2788. drmInfo = await this.identityDrmParser_(
  2789. drmTag, mimeType, getUris, initSegmentRef, variables);
  2790. } else {
  2791. const drmParser =
  2792. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  2793. drmInfo = drmParser ? drmParser(drmTag, mimeType) : null;
  2794. }
  2795. if (drmInfo) {
  2796. if (drmInfo.keyIds) {
  2797. for (const keyId of drmInfo.keyIds) {
  2798. keyIds.add(keyId);
  2799. }
  2800. }
  2801. drmInfos.push(drmInfo);
  2802. } else {
  2803. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  2804. }
  2805. }
  2806. }
  2807. return {drmInfos, keyIds, encrypted, aesEncrypted};
  2808. }
  2809. /**
  2810. * @param {!shaka.hls.Tag} drmTag
  2811. * @param {!shaka.hls.Playlist} playlist
  2812. * @param {function():!Array.<string>} getUris
  2813. * @param {?Map.<string, string>=} variables
  2814. * @return {!shaka.extern.aesKey}
  2815. * @private
  2816. */
  2817. parseAESDrmTag_(drmTag, playlist, getUris, variables) {
  2818. // Check if the Web Crypto API is available.
  2819. if (!window.crypto || !window.crypto.subtle) {
  2820. shaka.log.alwaysWarn('Web Crypto API is not available to decrypt ' +
  2821. 'AES. (Web Crypto only exists in secure origins like https)');
  2822. throw new shaka.util.Error(
  2823. shaka.util.Error.Severity.CRITICAL,
  2824. shaka.util.Error.Category.MANIFEST,
  2825. shaka.util.Error.Code.NO_WEB_CRYPTO_API);
  2826. }
  2827. // HLS RFC 8216 Section 5.2:
  2828. // An EXT-X-KEY tag with a KEYFORMAT of "identity" that does not have an IV
  2829. // attribute indicates that the Media Sequence Number is to be used as the
  2830. // IV when decrypting a Media Segment, by putting its big-endian binary
  2831. // representation into a 16-octet (128-bit) buffer and padding (on the left)
  2832. // with zeros.
  2833. let firstMediaSequenceNumber = 0;
  2834. let iv;
  2835. const ivHex = drmTag.getAttributeValue('IV', '');
  2836. if (!ivHex) {
  2837. // Media Sequence Number will be used as IV.
  2838. firstMediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2839. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2840. } else {
  2841. // Exclude 0x at the start of string.
  2842. iv = shaka.util.Uint8ArrayUtils.fromHex(ivHex.substr(2));
  2843. if (iv.byteLength != 16) {
  2844. throw new shaka.util.Error(
  2845. shaka.util.Error.Severity.CRITICAL,
  2846. shaka.util.Error.Category.MANIFEST,
  2847. shaka.util.Error.Code.AES_128_INVALID_IV_LENGTH);
  2848. }
  2849. }
  2850. const aesKeyInfoKey = `${drmTag.toString()}-${firstMediaSequenceNumber}`;
  2851. if (!this.aesKeyInfoMap_.has(aesKeyInfoKey)) {
  2852. // Default AES-128
  2853. const keyInfo = {
  2854. bitsKey: 128,
  2855. blockCipherMode: 'CBC',
  2856. iv,
  2857. firstMediaSequenceNumber,
  2858. };
  2859. const method = drmTag.getRequiredAttrValue('METHOD');
  2860. switch (method) {
  2861. case 'AES-256':
  2862. keyInfo.bitsKey = 256;
  2863. break;
  2864. case 'AES-256-CTR':
  2865. keyInfo.bitsKey = 256;
  2866. keyInfo.blockCipherMode = 'CTR';
  2867. break;
  2868. }
  2869. // Don't download the key object until the segment is parsed, to avoid a
  2870. // startup delay for long manifests with lots of keys.
  2871. keyInfo.fetchKey = async () => {
  2872. const keyUris = shaka.hls.Utils.constructSegmentUris(
  2873. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  2874. const keyMapKey = keyUris.sort().join('');
  2875. if (!this.aesKeyMap_.has(keyMapKey)) {
  2876. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  2877. const request = shaka.net.NetworkingEngine.makeRequest(
  2878. keyUris, this.config_.retryParameters);
  2879. const keyResponse = this.makeNetworkRequest_(request, requestType);
  2880. this.aesKeyMap_.set(keyMapKey, keyResponse);
  2881. }
  2882. const keyResponse = await this.aesKeyMap_.get(keyMapKey);
  2883. // keyResponse.status is undefined when URI is "data:text/plain;base64,"
  2884. if (!keyResponse.data ||
  2885. keyResponse.data.byteLength != (keyInfo.bitsKey / 8)) {
  2886. throw new shaka.util.Error(
  2887. shaka.util.Error.Severity.CRITICAL,
  2888. shaka.util.Error.Category.MANIFEST,
  2889. shaka.util.Error.Code.AES_128_INVALID_KEY_LENGTH);
  2890. }
  2891. const algorithm = {
  2892. name: keyInfo.blockCipherMode == 'CTR' ? 'AES-CTR' : 'AES-CBC',
  2893. length: keyInfo.bitsKey,
  2894. };
  2895. keyInfo.cryptoKey = await window.crypto.subtle.importKey(
  2896. 'raw', keyResponse.data, algorithm, true, ['decrypt']);
  2897. keyInfo.fetchKey = undefined; // No longer needed.
  2898. };
  2899. this.aesKeyInfoMap_.set(aesKeyInfoKey, keyInfo);
  2900. }
  2901. return this.aesKeyInfoMap_.get(aesKeyInfoKey);
  2902. }
  2903. /**
  2904. * @param {!shaka.hls.Playlist} playlist
  2905. * @private
  2906. */
  2907. determinePresentationType_(playlist) {
  2908. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  2909. const presentationTypeTag =
  2910. shaka.hls.Utils.getFirstTagWithName(playlist.tags,
  2911. 'EXT-X-PLAYLIST-TYPE');
  2912. const endListTag =
  2913. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  2914. const isVod = (presentationTypeTag && presentationTypeTag.value == 'VOD') ||
  2915. endListTag;
  2916. const isEvent = presentationTypeTag &&
  2917. presentationTypeTag.value == 'EVENT' && !isVod;
  2918. const isLive = !isVod && !isEvent;
  2919. if (isVod) {
  2920. this.setPresentationType_(PresentationType.VOD);
  2921. } else {
  2922. // If it's not VOD, it must be presentation type LIVE or an ongoing EVENT.
  2923. if (isLive) {
  2924. this.setPresentationType_(PresentationType.LIVE);
  2925. } else {
  2926. this.setPresentationType_(PresentationType.EVENT);
  2927. }
  2928. }
  2929. }
  2930. /**
  2931. * @param {!shaka.hls.Playlist} playlist
  2932. * @private
  2933. */
  2934. determineLastTargetDuration_(playlist) {
  2935. let lastTargetDuration = Infinity;
  2936. const segments = playlist.segments;
  2937. if (segments.length) {
  2938. let segmentIndex = segments.length - 1;
  2939. while (segmentIndex >= 0) {
  2940. const segment = segments[segmentIndex];
  2941. const extinfTag =
  2942. shaka.hls.Utils.getFirstTagWithName(segment.tags, 'EXTINF');
  2943. if (extinfTag) {
  2944. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  2945. // We're interested in the duration part.
  2946. const extinfValues = extinfTag.value.split(',');
  2947. lastTargetDuration = Number(extinfValues[0]);
  2948. break;
  2949. }
  2950. segmentIndex--;
  2951. }
  2952. }
  2953. const targetDurationTag = this.getRequiredTag_(playlist.tags,
  2954. 'EXT-X-TARGETDURATION');
  2955. const targetDuration = Number(targetDurationTag.value);
  2956. const partialTargetDurationTag =
  2957. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-PART-INF');
  2958. if (partialTargetDurationTag) {
  2959. this.partialTargetDuration_ = Number(
  2960. partialTargetDurationTag.getRequiredAttrValue('PART-TARGET'));
  2961. }
  2962. // Get the server-recommended min distance from the live edge.
  2963. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  2964. playlist.tags, 'EXT-X-SERVER-CONTROL');
  2965. // According to the HLS spec, updates should not happen more often than
  2966. // once in targetDuration. It also requires us to only update the active
  2967. // variant. We might implement that later, but for now every variant
  2968. // will be updated. To get the update period, choose the smallest
  2969. // targetDuration value across all playlists.
  2970. // 1. Update the shortest one to use as update period and segment
  2971. // availability time (for LIVE).
  2972. if (this.lowLatencyMode_ && this.partialTargetDuration_) {
  2973. // For low latency streaming, use the partial segment target duration.
  2974. if (this.lowLatencyByterangeOptimization_) {
  2975. // We always have at least 1 partial segment part, and most servers
  2976. // allow you to make a request with _HLS_msn=X&_HLS_part=0 with a
  2977. // distance of 4 partial segments. With this we ensure that we
  2978. // obtain the minimum latency in this type of case.
  2979. if (this.partialTargetDuration_ * 5 <= lastTargetDuration) {
  2980. this.lastTargetDuration_ = Math.min(
  2981. this.partialTargetDuration_, this.lastTargetDuration_);
  2982. } else {
  2983. this.lastTargetDuration_ = Math.min(
  2984. lastTargetDuration, this.lastTargetDuration_);
  2985. }
  2986. } else {
  2987. this.lastTargetDuration_ = Math.min(
  2988. this.partialTargetDuration_, this.lastTargetDuration_);
  2989. }
  2990. // Use 'PART-HOLD-BACK' as the presentation delay for low latency mode.
  2991. this.lowLatencyPresentationDelay_ = serverControlTag ? Number(
  2992. serverControlTag.getRequiredAttrValue('PART-HOLD-BACK')) : 0;
  2993. } else {
  2994. this.lastTargetDuration_ = Math.min(
  2995. lastTargetDuration, this.lastTargetDuration_);
  2996. // Use 'HOLD-BACK' as the presentation delay for default if defined.
  2997. const holdBack = serverControlTag ?
  2998. serverControlTag.getAttribute('HOLD-BACK') : null;
  2999. this.presentationDelay_ = holdBack ? Number(holdBack.value) : 0;
  3000. }
  3001. // 2. Update the longest target duration if need be to use as a
  3002. // presentation delay later.
  3003. this.maxTargetDuration_ = Math.max(
  3004. targetDuration, this.maxTargetDuration_);
  3005. }
  3006. /**
  3007. * @param {!shaka.hls.Playlist} playlist
  3008. * @private
  3009. */
  3010. changePresentationTimelineToLive_(playlist) {
  3011. // The live edge will be calculated from segments, so we don't need to
  3012. // set a presentation start time. We will assert later that this is
  3013. // working as expected.
  3014. // The HLS spec (RFC 8216) states in 6.3.3:
  3015. //
  3016. // "The client SHALL choose which Media Segment to play first ... the
  3017. // client SHOULD NOT choose a segment that starts less than three target
  3018. // durations from the end of the Playlist file. Doing so can trigger
  3019. // playback stalls."
  3020. //
  3021. // We accomplish this in our DASH-y model by setting a presentation
  3022. // delay of configured value, or 3 segments duration if not configured.
  3023. // This will be the "live edge" of the presentation.
  3024. let presentationDelay;
  3025. if (this.config_.defaultPresentationDelay) {
  3026. presentationDelay = this.config_.defaultPresentationDelay;
  3027. } else if (this.lowLatencyPresentationDelay_) {
  3028. presentationDelay = this.lowLatencyPresentationDelay_;
  3029. } else if (this.presentationDelay_) {
  3030. presentationDelay = this.presentationDelay_;
  3031. } else {
  3032. const playlistSegments = playlist.segments.length;
  3033. let delaySegments = this.config_.hls.liveSegmentsDelay;
  3034. if (delaySegments > (playlistSegments - 2)) {
  3035. delaySegments = Math.max(1, playlistSegments - 2);
  3036. }
  3037. presentationDelay = this.maxTargetDuration_ * delaySegments;
  3038. }
  3039. this.presentationTimeline_.setPresentationStartTime(0);
  3040. this.presentationTimeline_.setDelay(presentationDelay);
  3041. this.presentationTimeline_.setStatic(false);
  3042. }
  3043. /**
  3044. * Get the InitSegmentReference for a segment if it has a EXT-X-MAP tag.
  3045. * @param {!shaka.hls.Playlist} playlist
  3046. * @param {!Array.<!shaka.hls.Tag>} tags Segment tags
  3047. * @param {function():!Array.<string>} getUris
  3048. * @param {?Map.<string, string>=} variables
  3049. * @return {shaka.media.InitSegmentReference}
  3050. * @private
  3051. */
  3052. getInitSegmentReference_(playlist, tags, getUris, variables) {
  3053. /** @type {?shaka.hls.Tag} */
  3054. const mapTag = shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-MAP');
  3055. if (!mapTag) {
  3056. return null;
  3057. }
  3058. // Map tag example: #EXT-X-MAP:URI="main.mp4",BYTERANGE="720@0"
  3059. const verbatimInitSegmentUri = mapTag.getRequiredAttrValue('URI');
  3060. const absoluteInitSegmentUris = shaka.hls.Utils.constructSegmentUris(
  3061. getUris(), verbatimInitSegmentUri, variables);
  3062. const mapTagKey = [
  3063. absoluteInitSegmentUris.toString(),
  3064. mapTag.getAttributeValue('BYTERANGE', ''),
  3065. ].join('-');
  3066. if (!this.mapTagToInitSegmentRefMap_.has(mapTagKey)) {
  3067. /** @type {shaka.extern.aesKey|undefined} */
  3068. let aesKey = undefined;
  3069. let byteRangeTag = null;
  3070. for (const tag of tags) {
  3071. if (tag.name == 'EXT-X-KEY') {
  3072. if (this.isAesMethod_(tag.getRequiredAttrValue('METHOD')) &&
  3073. tag.id < mapTag.id) {
  3074. aesKey =
  3075. this.parseAESDrmTag_(tag, playlist, getUris, variables);
  3076. }
  3077. } else if (tag.name == 'EXT-X-BYTERANGE' && tag.id < mapTag.id) {
  3078. byteRangeTag = tag;
  3079. }
  3080. }
  3081. const initSegmentRef = this.createInitSegmentReference_(
  3082. absoluteInitSegmentUris, mapTag, byteRangeTag, aesKey);
  3083. this.mapTagToInitSegmentRefMap_.set(mapTagKey, initSegmentRef);
  3084. }
  3085. return this.mapTagToInitSegmentRefMap_.get(mapTagKey);
  3086. }
  3087. /**
  3088. * Create an InitSegmentReference object for the EXT-X-MAP tag in the media
  3089. * playlist.
  3090. * @param {!Array.<string>} absoluteInitSegmentUris
  3091. * @param {!shaka.hls.Tag} mapTag EXT-X-MAP
  3092. * @param {shaka.hls.Tag=} byteRangeTag EXT-X-BYTERANGE
  3093. * @param {shaka.extern.aesKey=} aesKey
  3094. * @return {!shaka.media.InitSegmentReference}
  3095. * @private
  3096. */
  3097. createInitSegmentReference_(absoluteInitSegmentUris, mapTag, byteRangeTag,
  3098. aesKey) {
  3099. let startByte = 0;
  3100. let endByte = null;
  3101. let byterange = mapTag.getAttributeValue('BYTERANGE');
  3102. if (!byterange && byteRangeTag) {
  3103. byterange = byteRangeTag.value;
  3104. }
  3105. // If a BYTERANGE attribute is not specified, the segment consists
  3106. // of the entire resource.
  3107. if (byterange) {
  3108. const blocks = byterange.split('@');
  3109. const byteLength = Number(blocks[0]);
  3110. startByte = Number(blocks[1]);
  3111. endByte = startByte + byteLength - 1;
  3112. if (aesKey) {
  3113. // MAP segment encrypted with method AES, when served with
  3114. // HTTP Range, has the unencrypted size specified in the range.
  3115. // See: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
  3116. const length = (endByte + 1) - startByte;
  3117. if (length % 16) {
  3118. endByte += (16 - (length % 16));
  3119. }
  3120. }
  3121. }
  3122. const initSegmentRef = new shaka.media.InitSegmentReference(
  3123. () => absoluteInitSegmentUris,
  3124. startByte,
  3125. endByte,
  3126. /* mediaQuality= */ null,
  3127. /* timescale= */ null,
  3128. /* segmentData= */ null,
  3129. aesKey);
  3130. return initSegmentRef;
  3131. }
  3132. /**
  3133. * Parses one shaka.hls.Segment object into a shaka.media.SegmentReference.
  3134. *
  3135. * @param {shaka.media.InitSegmentReference} initSegmentReference
  3136. * @param {shaka.media.SegmentReference} previousReference
  3137. * @param {!shaka.hls.Segment} hlsSegment
  3138. * @param {number} startTime
  3139. * @param {!Map.<string, string>} variables
  3140. * @param {!shaka.hls.Playlist} playlist
  3141. * @param {string} type
  3142. * @param {function():!Array.<string>} getUris
  3143. * @param {shaka.extern.aesKey=} aesKey
  3144. * @return {shaka.media.SegmentReference}
  3145. * @private
  3146. */
  3147. createSegmentReference_(
  3148. initSegmentReference, previousReference, hlsSegment, startTime,
  3149. variables, playlist, type, getUris, aesKey) {
  3150. const HlsParser = shaka.hls.HlsParser;
  3151. const getMimeType = (uri) => {
  3152. const parsedUri = new goog.Uri(uri);
  3153. const extension = parsedUri.getPath().split('.').pop();
  3154. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[type];
  3155. let mimeType = map[extension];
  3156. if (!mimeType) {
  3157. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  3158. }
  3159. return mimeType;
  3160. };
  3161. const tags = hlsSegment.tags;
  3162. const extinfTag =
  3163. shaka.hls.Utils.getFirstTagWithName(tags, 'EXTINF');
  3164. let endTime = 0;
  3165. let startByte = 0;
  3166. let endByte = null;
  3167. if (hlsSegment.partialSegments.length) {
  3168. this.manifest_.isLowLatency = true;
  3169. }
  3170. let syncTime = null;
  3171. if (!this.config_.hls.ignoreManifestProgramDateTime) {
  3172. const dateTimeTag =
  3173. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-PROGRAM-DATE-TIME');
  3174. if (dateTimeTag && dateTimeTag.value) {
  3175. syncTime = shaka.util.TXml.parseDate(dateTimeTag.value);
  3176. goog.asserts.assert(syncTime != null,
  3177. 'EXT-X-PROGRAM-DATE-TIME format not valid');
  3178. }
  3179. }
  3180. let status = shaka.media.SegmentReference.Status.AVAILABLE;
  3181. if (shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-GAP')) {
  3182. this.manifest_.gapCount++;
  3183. status = shaka.media.SegmentReference.Status.MISSING;
  3184. }
  3185. if (!extinfTag) {
  3186. if (hlsSegment.partialSegments.length == 0) {
  3187. // EXTINF tag must be available if the segment has no partial segments.
  3188. throw new shaka.util.Error(
  3189. shaka.util.Error.Severity.CRITICAL,
  3190. shaka.util.Error.Category.MANIFEST,
  3191. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, 'EXTINF');
  3192. } else if (!this.lowLatencyMode_) {
  3193. // Without EXTINF and without low-latency mode, partial segments get
  3194. // ignored.
  3195. return null;
  3196. }
  3197. }
  3198. // Create SegmentReferences for the partial segments.
  3199. let partialSegmentRefs = [];
  3200. // Optimization for LL-HLS with byterange
  3201. // More info in https://tinyurl.com/hls-open-byte-range
  3202. let segmentWithByteRangeOptimization = false;
  3203. let getUrisOptimization = null;
  3204. let somePartialSegmentWithGap = false;
  3205. let isPreloadSegment = false;
  3206. if (this.lowLatencyMode_ && hlsSegment.partialSegments.length) {
  3207. const byterangeOptimizationSupport =
  3208. initSegmentReference && window.ReadableStream &&
  3209. this.config_.hls.allowLowLatencyByteRangeOptimization;
  3210. let partialSyncTime = syncTime;
  3211. for (let i = 0; i < hlsSegment.partialSegments.length; i++) {
  3212. const item = hlsSegment.partialSegments[i];
  3213. const pPreviousReference = i == 0 ?
  3214. previousReference : partialSegmentRefs[partialSegmentRefs.length - 1];
  3215. const pStartTime = (i == 0) ? startTime : pPreviousReference.endTime;
  3216. // If DURATION is missing from this partial segment, use the target
  3217. // partial duration from the top of the playlist, which is a required
  3218. // attribute for content with partial segments.
  3219. const pDuration = Number(item.getAttributeValue('DURATION')) ||
  3220. this.partialTargetDuration_;
  3221. // If for some reason we have neither an explicit duration, nor a target
  3222. // partial duration, we should SKIP this partial segment to avoid
  3223. // duplicating content in the presentation timeline.
  3224. if (!pDuration) {
  3225. continue;
  3226. }
  3227. const pEndTime = pStartTime + pDuration;
  3228. let pStartByte = 0;
  3229. let pEndByte = null;
  3230. if (item.name == 'EXT-X-PRELOAD-HINT') {
  3231. // A preload hinted partial segment may have byterange start info.
  3232. const pByterangeStart = item.getAttributeValue('BYTERANGE-START');
  3233. pStartByte = pByterangeStart ? Number(pByterangeStart) : 0;
  3234. // A preload hinted partial segment may have byterange length info.
  3235. const pByterangeLength = item.getAttributeValue('BYTERANGE-LENGTH');
  3236. if (pByterangeLength) {
  3237. pEndByte = pStartByte + Number(pByterangeLength) - 1;
  3238. } else if (pStartByte) {
  3239. // If we have a non-zero start byte, but no end byte, follow the
  3240. // recommendation of https://tinyurl.com/hls-open-byte-range and
  3241. // set the end byte explicitly to a large integer.
  3242. pEndByte = Number.MAX_SAFE_INTEGER;
  3243. }
  3244. } else {
  3245. const pByterange = item.getAttributeValue('BYTERANGE');
  3246. [pStartByte, pEndByte] =
  3247. this.parseByteRange_(pPreviousReference, pByterange);
  3248. }
  3249. const pUri = item.getAttributeValue('URI');
  3250. if (!pUri) {
  3251. continue;
  3252. }
  3253. let partialStatus = shaka.media.SegmentReference.Status.AVAILABLE;
  3254. if (item.getAttributeValue('GAP') == 'YES') {
  3255. this.manifest_.gapCount++;
  3256. partialStatus = shaka.media.SegmentReference.Status.MISSING;
  3257. somePartialSegmentWithGap = true;
  3258. }
  3259. let uris = null;
  3260. const getPartialUris = () => {
  3261. if (uris == null) {
  3262. goog.asserts.assert(pUri, 'Partial uri should be defined!');
  3263. uris = shaka.hls.Utils.constructSegmentUris(
  3264. getUris(), pUri, variables);
  3265. }
  3266. return uris;
  3267. };
  3268. if (byterangeOptimizationSupport &&
  3269. pStartByte >= 0 && pEndByte != null) {
  3270. getUrisOptimization = getPartialUris;
  3271. segmentWithByteRangeOptimization = true;
  3272. }
  3273. const partial = new shaka.media.SegmentReference(
  3274. pStartTime,
  3275. pEndTime,
  3276. getPartialUris,
  3277. pStartByte,
  3278. pEndByte,
  3279. initSegmentReference,
  3280. /* timestampOffset= */ 0,
  3281. /* appendWindowStart= */ 0,
  3282. /* appendWindowEnd= */ Infinity,
  3283. /* partialReferences= */ [],
  3284. /* tilesLayout= */ '',
  3285. /* tileDuration= */ null,
  3286. partialSyncTime,
  3287. partialStatus,
  3288. aesKey);
  3289. if (item.name == 'EXT-X-PRELOAD-HINT') {
  3290. partial.markAsPreload();
  3291. isPreloadSegment = true;
  3292. }
  3293. // The spec doesn't say that we can assume INDEPENDENT=YES for the
  3294. // first partial segment. It does call the flag "optional", though, and
  3295. // that cases where there are no such flags on any partial segments, it
  3296. // is sensible to assume the first one is independent.
  3297. if (item.getAttributeValue('INDEPENDENT') != 'YES' && i > 0) {
  3298. partial.markAsNonIndependent();
  3299. }
  3300. const pMimeType = getMimeType(pUri);
  3301. if (pMimeType) {
  3302. partial.mimeType = pMimeType;
  3303. if (HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_.has(pMimeType)) {
  3304. partial.initSegmentReference = null;
  3305. }
  3306. }
  3307. partialSegmentRefs.push(partial);
  3308. if (partialSyncTime) {
  3309. partialSyncTime += pDuration;
  3310. }
  3311. } // for-loop of hlsSegment.partialSegments
  3312. }
  3313. // If the segment has EXTINF tag, set the segment's end time, start byte
  3314. // and end byte based on the duration and byterange information.
  3315. // Otherwise, calculate the end time, start / end byte based on its partial
  3316. // segments.
  3317. // Note that the sum of partial segments durations may be slightly different
  3318. // from the parent segment's duration. In this case, use the duration from
  3319. // the parent segment tag.
  3320. if (extinfTag) {
  3321. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  3322. // We're interested in the duration part.
  3323. const extinfValues = extinfTag.value.split(',');
  3324. const duration = Number(extinfValues[0]);
  3325. // Skip segments without duration
  3326. if (duration == 0) {
  3327. return null;
  3328. }
  3329. endTime = startTime + duration;
  3330. } else if (partialSegmentRefs.length) {
  3331. endTime = partialSegmentRefs[partialSegmentRefs.length - 1].endTime;
  3332. } else {
  3333. // Skip segments without duration and without partialsegments
  3334. return null;
  3335. }
  3336. if (segmentWithByteRangeOptimization) {
  3337. // We cannot optimize segments with gaps, or with a start byte that is
  3338. // not 0.
  3339. if (somePartialSegmentWithGap || partialSegmentRefs[0].startByte != 0) {
  3340. segmentWithByteRangeOptimization = false;
  3341. getUrisOptimization = null;
  3342. } else {
  3343. partialSegmentRefs = [];
  3344. }
  3345. }
  3346. // If the segment has EXT-X-BYTERANGE tag, set the start byte and end byte
  3347. // base on the byterange information. If segment has no EXT-X-BYTERANGE tag
  3348. // and has partial segments, set the start byte and end byte base on the
  3349. // partial segments.
  3350. const byterangeTag =
  3351. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-BYTERANGE');
  3352. if (byterangeTag) {
  3353. [startByte, endByte] =
  3354. this.parseByteRange_(previousReference, byterangeTag.value);
  3355. } else if (partialSegmentRefs.length) {
  3356. startByte = partialSegmentRefs[0].startByte;
  3357. endByte = partialSegmentRefs[partialSegmentRefs.length - 1].endByte;
  3358. }
  3359. let tilesLayout = '';
  3360. let tileDuration = null;
  3361. if (type == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  3362. // By default in HLS the tilesLayout is 1x1
  3363. tilesLayout = '1x1';
  3364. const tilesTag =
  3365. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-TILES');
  3366. if (tilesTag) {
  3367. tilesLayout = tilesTag.getRequiredAttrValue('LAYOUT');
  3368. const duration = tilesTag.getAttributeValue('DURATION');
  3369. if (duration) {
  3370. tileDuration = Number(duration);
  3371. }
  3372. }
  3373. }
  3374. let uris = null;
  3375. const getSegmentUris = () => {
  3376. if (getUrisOptimization) {
  3377. return getUrisOptimization();
  3378. }
  3379. if (uris == null) {
  3380. uris = shaka.hls.Utils.constructSegmentUris(getUris(),
  3381. hlsSegment.verbatimSegmentUri, variables);
  3382. }
  3383. return uris || [];
  3384. };
  3385. const allPartialSegments = partialSegmentRefs.length > 0 &&
  3386. !!hlsSegment.verbatimSegmentUri;
  3387. const reference = new shaka.media.SegmentReference(
  3388. startTime,
  3389. endTime,
  3390. getSegmentUris,
  3391. startByte,
  3392. endByte,
  3393. initSegmentReference,
  3394. /* timestampOffset= */ 0,
  3395. /* appendWindowStart= */ 0,
  3396. /* appendWindowEnd= */ Infinity,
  3397. partialSegmentRefs,
  3398. tilesLayout,
  3399. tileDuration,
  3400. syncTime,
  3401. status,
  3402. aesKey,
  3403. allPartialSegments,
  3404. );
  3405. const mimeType = getMimeType(hlsSegment.verbatimSegmentUri);
  3406. if (mimeType) {
  3407. reference.mimeType = mimeType;
  3408. if (HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_.has(mimeType)) {
  3409. reference.initSegmentReference = null;
  3410. }
  3411. }
  3412. if (segmentWithByteRangeOptimization) {
  3413. this.lowLatencyByterangeOptimization_ = true;
  3414. reference.markAsByterangeOptimization();
  3415. if (isPreloadSegment) {
  3416. reference.markAsPreload();
  3417. }
  3418. }
  3419. return reference;
  3420. }
  3421. /**
  3422. * Parse the startByte and endByte.
  3423. * @param {shaka.media.SegmentReference} previousReference
  3424. * @param {?string} byterange
  3425. * @return {!Array.<number>} An array with the start byte and end byte.
  3426. * @private
  3427. */
  3428. parseByteRange_(previousReference, byterange) {
  3429. let startByte = 0;
  3430. let endByte = null;
  3431. // If BYTERANGE is not specified, the segment consists of the entire
  3432. // resource.
  3433. if (byterange) {
  3434. const blocks = byterange.split('@');
  3435. const byteLength = Number(blocks[0]);
  3436. if (blocks[1]) {
  3437. startByte = Number(blocks[1]);
  3438. } else {
  3439. goog.asserts.assert(previousReference,
  3440. 'Cannot refer back to previous HLS segment!');
  3441. startByte = previousReference.endByte + 1;
  3442. }
  3443. endByte = startByte + byteLength - 1;
  3444. }
  3445. return [startByte, endByte];
  3446. }
  3447. /**
  3448. * @param {!Array.<!shaka.hls.Tag>} tags
  3449. * @param {string} contentType
  3450. * @param {!Map.<string, string>} variables
  3451. * @param {function():!Array.<string>} getUris
  3452. * @private
  3453. */
  3454. processDateRangeTags_(tags, contentType, variables, getUris) {
  3455. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  3456. if (contentType != ContentType.VIDEO && contentType != ContentType.AUDIO) {
  3457. // DATE-RANGE should only appear in AUDIO or VIDEO playlists.
  3458. // We ignore those that appear in other playlists.
  3459. return;
  3460. }
  3461. const Utils = shaka.hls.Utils;
  3462. const initialProgramDateTime =
  3463. this.presentationTimeline_.getInitialProgramDateTime();
  3464. if (!initialProgramDateTime ||
  3465. this.ignoreManifestProgramDateTimeFor_(contentType)) {
  3466. return;
  3467. }
  3468. let dateRangeTags =
  3469. shaka.hls.Utils.filterTagsByName(tags, 'EXT-X-DATERANGE');
  3470. dateRangeTags = dateRangeTags.sort((a, b) => {
  3471. const aStartDateValue = a.getRequiredAttrValue('START-DATE');
  3472. const bStartDateValue = b.getRequiredAttrValue('START-DATE');
  3473. if (aStartDateValue < bStartDateValue) {
  3474. return -1;
  3475. }
  3476. if (aStartDateValue > bStartDateValue) {
  3477. return 1;
  3478. }
  3479. return 0;
  3480. });
  3481. for (let i = 0; i < dateRangeTags.length; i++) {
  3482. const tag = dateRangeTags[i];
  3483. const id = tag.getRequiredAttrValue('ID');
  3484. if (this.dateRangeIdsEmitted_.has(id)) {
  3485. continue;
  3486. }
  3487. const startDateValue = tag.getRequiredAttrValue('START-DATE');
  3488. const startDate = shaka.util.TXml.parseDate(startDateValue);
  3489. if (isNaN(startDate)) {
  3490. // Invalid START-DATE
  3491. continue;
  3492. }
  3493. goog.asserts.assert(startDate != null, 'Start date should not be null!');
  3494. const startTime = Math.max(0, startDate - initialProgramDateTime);
  3495. let endTime = null;
  3496. const endDateValue = tag.getAttributeValue('END-DATE');
  3497. if (endDateValue) {
  3498. const endDate = shaka.util.TXml.parseDate(endDateValue);
  3499. if (!isNaN(endDate)) {
  3500. goog.asserts.assert(endDate != null, 'End date should not be null!');
  3501. endTime = endDate - initialProgramDateTime;
  3502. if (endTime < 0) {
  3503. // Date range in the past
  3504. continue;
  3505. }
  3506. }
  3507. }
  3508. if (endTime == null) {
  3509. const durationValue = tag.getAttributeValue('DURATION') ||
  3510. tag.getAttributeValue('PLANNED-DURATION');
  3511. if (durationValue) {
  3512. const duration = parseFloat(durationValue);
  3513. if (!isNaN(duration)) {
  3514. endTime = startTime + duration;
  3515. }
  3516. const realEndTime = startDate - initialProgramDateTime + duration;
  3517. if (realEndTime < 0) {
  3518. // Date range in the past
  3519. continue;
  3520. }
  3521. }
  3522. }
  3523. const type = tag.getAttributeValue('CLASS') || 'com.apple.quicktime.HLS';
  3524. const endOnNext = tag.getAttributeValue('END-ON-NEXT') == 'YES';
  3525. if (endTime == null && endOnNext) {
  3526. for (let j = i + 1; j < dateRangeTags.length; j++) {
  3527. const otherDateRangeType =
  3528. dateRangeTags[j].getAttributeValue('CLASS') ||
  3529. 'com.apple.quicktime.HLS';
  3530. if (type != otherDateRangeType) {
  3531. continue;
  3532. }
  3533. const otherDateRangeStartDateValue =
  3534. dateRangeTags[j].getRequiredAttrValue('START-DATE');
  3535. const otherDateRangeStartDate =
  3536. shaka.util.TXml.parseDate(otherDateRangeStartDateValue);
  3537. if (isNaN(otherDateRangeStartDate)) {
  3538. // Invalid START-DATE
  3539. continue;
  3540. }
  3541. if (otherDateRangeStartDate && otherDateRangeStartDate > startDate) {
  3542. endTime = Math.max(0,
  3543. otherDateRangeStartDate - initialProgramDateTime);
  3544. break;
  3545. }
  3546. }
  3547. if (endTime == null) {
  3548. // Since we cannot know when it ends, we omit it for now and in the
  3549. // future with an update we will be able to have more information.
  3550. continue;
  3551. }
  3552. }
  3553. // Exclude these attributes from the metadata since they already go into
  3554. // other fields (eg: startTime or endTime) or are not necessary..
  3555. const excludedAttributes = [
  3556. 'ID',
  3557. 'CLASS',
  3558. 'START-DATE',
  3559. 'END-DATE',
  3560. 'DURATION',
  3561. 'END-ON-NEXT',
  3562. ];
  3563. /* @type {!Array.<shaka.extern.MetadataFrame>} */
  3564. const values = [];
  3565. for (const attribute of tag.attributes) {
  3566. if (excludedAttributes.includes(attribute.name)) {
  3567. continue;
  3568. }
  3569. let data = Utils.variableSubstitution(attribute.value, variables);
  3570. if (attribute.name == 'X-ASSET-URI' ||
  3571. attribute.name == 'X-ASSET-LIST') {
  3572. data = Utils.constructSegmentUris(
  3573. getUris(), attribute.value, variables)[0];
  3574. }
  3575. const metadataFrame = {
  3576. key: attribute.name,
  3577. description: '',
  3578. data,
  3579. mimeType: null,
  3580. pictureType: null,
  3581. };
  3582. values.push(metadataFrame);
  3583. }
  3584. if (values.length) {
  3585. this.playerInterface_.onMetadata(type, startTime, endTime, values);
  3586. }
  3587. this.dateRangeIdsEmitted_.add(id);
  3588. }
  3589. }
  3590. /**
  3591. * Parses shaka.hls.Segment objects into shaka.media.SegmentReferences and
  3592. * get the bandwidth necessary for this segments If it's defined in the
  3593. * playlist.
  3594. *
  3595. * @param {!shaka.hls.Playlist} playlist
  3596. * @param {!Map.<number, number>} mediaSequenceToStartTime
  3597. * @param {!Map.<string, string>} variables
  3598. * @param {function():!Array.<string>} getUris
  3599. * @param {string} type
  3600. * @return {{segments: !Array.<!shaka.media.SegmentReference>,
  3601. * bandwidth: (number|undefined)}}
  3602. * @private
  3603. */
  3604. createSegments_(playlist, mediaSequenceToStartTime, variables,
  3605. getUris, type) {
  3606. /** @type {Array.<!shaka.hls.Segment>} */
  3607. const hlsSegments = playlist.segments;
  3608. goog.asserts.assert(hlsSegments.length, 'Playlist should have segments!');
  3609. /** @type {shaka.media.InitSegmentReference} */
  3610. let initSegmentRef;
  3611. /** @type {shaka.extern.aesKey|undefined} */
  3612. let aesKey = undefined;
  3613. let discontinuitySequence = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3614. playlist.tags, 'EXT-X-DISCONTINUITY-SEQUENCE', 0);
  3615. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3616. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  3617. const skipTag = shaka.hls.Utils.getFirstTagWithName(
  3618. playlist.tags, 'EXT-X-SKIP');
  3619. const skippedSegments =
  3620. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  3621. let position = mediaSequenceNumber + skippedSegments;
  3622. let firstStartTime = 0;
  3623. // For live stream, use the cached value in the mediaSequenceToStartTime
  3624. // map if available.
  3625. if (this.isLive_() && mediaSequenceToStartTime.has(position)) {
  3626. firstStartTime = mediaSequenceToStartTime.get(position);
  3627. }
  3628. // This is for recovering from disconnects.
  3629. if (firstStartTime === 0 &&
  3630. this.presentationType_ == shaka.hls.HlsParser.PresentationType_.LIVE &&
  3631. mediaSequenceToStartTime.size > 0 &&
  3632. !mediaSequenceToStartTime.has(position)) {
  3633. firstStartTime = this.presentationTimeline_.getSegmentAvailabilityStart();
  3634. }
  3635. /** @type {!Array.<!shaka.media.SegmentReference>} */
  3636. const references = [];
  3637. let previousReference = null;
  3638. /** @type {!Array.<{bitrate: number, duration: number}>} */
  3639. const bitrates = [];
  3640. for (let i = 0; i < hlsSegments.length; i++) {
  3641. const item = hlsSegments[i];
  3642. const startTime =
  3643. (i == 0) ? firstStartTime : previousReference.endTime;
  3644. position = mediaSequenceNumber + skippedSegments + i;
  3645. const discontinuityTag = shaka.hls.Utils.getFirstTagWithName(
  3646. item.tags, 'EXT-X-DISCONTINUITY');
  3647. if (discontinuityTag) {
  3648. discontinuitySequence++;
  3649. }
  3650. // Apply new AES tags as you see them, keeping a running total.
  3651. for (const drmTag of item.tags) {
  3652. if (drmTag.name == 'EXT-X-KEY') {
  3653. if (this.isAesMethod_(drmTag.getRequiredAttrValue('METHOD'))) {
  3654. aesKey =
  3655. this.parseAESDrmTag_(drmTag, playlist, getUris, variables);
  3656. } else {
  3657. aesKey = undefined;
  3658. }
  3659. }
  3660. }
  3661. mediaSequenceToStartTime.set(position, startTime);
  3662. initSegmentRef = this.getInitSegmentReference_(playlist,
  3663. item.tags, getUris, variables);
  3664. // If the stream is low latency and the user has not configured the
  3665. // lowLatencyMode, but if it has been configured to activate the
  3666. // lowLatencyMode if a stream of this type is detected, we automatically
  3667. // activate the lowLatencyMode.
  3668. if (!this.lowLatencyMode_) {
  3669. const autoLowLatencyMode = this.playerInterface_.isAutoLowLatencyMode();
  3670. if (autoLowLatencyMode) {
  3671. this.playerInterface_.enableLowLatencyMode();
  3672. this.lowLatencyMode_ = this.playerInterface_.isLowLatencyMode();
  3673. }
  3674. }
  3675. const reference = this.createSegmentReference_(
  3676. initSegmentRef,
  3677. previousReference,
  3678. item,
  3679. startTime,
  3680. variables,
  3681. playlist,
  3682. type,
  3683. getUris,
  3684. aesKey);
  3685. if (reference) {
  3686. const bitrate = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  3687. item.tags, 'EXT-X-BITRATE');
  3688. if (bitrate) {
  3689. bitrates.push({
  3690. bitrate,
  3691. duration: reference.endTime - reference.startTime,
  3692. });
  3693. } else if (bitrates.length) {
  3694. // It applies to every segment between it and the next EXT-X-BITRATE,
  3695. // so we use the latest bitrate value
  3696. const prevBitrate = bitrates.pop();
  3697. prevBitrate.duration += reference.endTime - reference.startTime;
  3698. bitrates.push(prevBitrate);
  3699. }
  3700. previousReference = reference;
  3701. reference.discontinuitySequence = discontinuitySequence;
  3702. if (this.ignoreManifestProgramDateTimeFor_(type) &&
  3703. this.minSequenceNumber_ != null &&
  3704. position < this.minSequenceNumber_) {
  3705. // This segment is ignored as part of our fallback synchronization
  3706. // method.
  3707. } else {
  3708. references.push(reference);
  3709. }
  3710. }
  3711. }
  3712. let bandwidth = undefined;
  3713. if (bitrates.length) {
  3714. const duration = bitrates.reduce((sum, value) => {
  3715. return sum + value.duration;
  3716. }, 0);
  3717. bandwidth = Math.round(bitrates.reduce((sum, value) => {
  3718. return sum + value.bitrate * value.duration;
  3719. }, 0) / duration * 1000);
  3720. }
  3721. // If some segments have sync times, but not all, extrapolate the sync
  3722. // times of the ones with none.
  3723. const someSyncTime = references.some((ref) => ref.syncTime != null);
  3724. if (someSyncTime) {
  3725. for (let i = 0; i < references.length; i++) {
  3726. const reference = references[i];
  3727. if (reference.syncTime != null) {
  3728. // No need to extrapolate.
  3729. continue;
  3730. }
  3731. // Find the nearest segment with syncTime, in either direction.
  3732. // This looks forward and backward simultaneously, keeping track of what
  3733. // to offset the syncTime it finds by as it goes.
  3734. let forwardAdd = 0;
  3735. let forwardI = i;
  3736. /**
  3737. * Look forwards one reference at a time, summing all durations as we
  3738. * go, until we find a reference with a syncTime to use as a basis.
  3739. * This DOES count the original reference, but DOESN'T count the first
  3740. * reference with a syncTime (as we approach it from behind).
  3741. * @return {?number}
  3742. */
  3743. const lookForward = () => {
  3744. const other = references[forwardI];
  3745. if (other) {
  3746. if (other.syncTime != null) {
  3747. return other.syncTime + forwardAdd;
  3748. }
  3749. forwardAdd -= other.endTime - other.startTime;
  3750. forwardI += 1;
  3751. }
  3752. return null;
  3753. };
  3754. let backwardAdd = 0;
  3755. let backwardI = i;
  3756. /**
  3757. * Look backwards one reference at a time, summing all durations as we
  3758. * go, until we find a reference with a syncTime to use as a basis.
  3759. * This DOESN'T count the original reference, but DOES count the first
  3760. * reference with a syncTime (as we approach it from ahead).
  3761. * @return {?number}
  3762. */
  3763. const lookBackward = () => {
  3764. const other = references[backwardI];
  3765. if (other) {
  3766. if (other != reference) {
  3767. backwardAdd += other.endTime - other.startTime;
  3768. }
  3769. if (other.syncTime != null) {
  3770. return other.syncTime + backwardAdd;
  3771. }
  3772. backwardI -= 1;
  3773. }
  3774. return null;
  3775. };
  3776. while (reference.syncTime == null) {
  3777. reference.syncTime = lookBackward();
  3778. if (reference.syncTime == null) {
  3779. reference.syncTime = lookForward();
  3780. }
  3781. }
  3782. }
  3783. }
  3784. // Split the sync times properly among partial segments.
  3785. if (someSyncTime) {
  3786. for (const reference of references) {
  3787. let syncTime = reference.syncTime;
  3788. for (const partial of reference.partialReferences) {
  3789. partial.syncTime = syncTime;
  3790. syncTime += partial.endTime - partial.startTime;
  3791. }
  3792. }
  3793. }
  3794. // lowestSyncTime is a value from a previous playlist update. Use it to
  3795. // set reference start times. If this is the first playlist parse, we will
  3796. // skip this step, and wait until we have sync time across stream types.
  3797. const lowestSyncTime = this.lowestSyncTime_;
  3798. if (someSyncTime && lowestSyncTime != Infinity) {
  3799. if (!this.ignoreManifestProgramDateTimeFor_(type)) {
  3800. for (const reference of references) {
  3801. reference.syncAgainst(lowestSyncTime);
  3802. }
  3803. }
  3804. }
  3805. return {
  3806. segments: references,
  3807. bandwidth,
  3808. };
  3809. }
  3810. /**
  3811. * Attempts to guess stream's mime type based on content type and URI.
  3812. *
  3813. * @param {string} contentType
  3814. * @param {string} codecs
  3815. * @return {?string}
  3816. * @private
  3817. */
  3818. guessMimeTypeBeforeLoading_(contentType, codecs) {
  3819. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  3820. if (codecs == 'vtt' || codecs == 'wvtt') {
  3821. // If codecs is 'vtt', it's WebVTT.
  3822. return 'text/vtt';
  3823. } else if (codecs && codecs !== '') {
  3824. // Otherwise, assume MP4-embedded text, since text-based formats tend
  3825. // not to have a codecs string at all.
  3826. return 'application/mp4';
  3827. }
  3828. }
  3829. if (contentType == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  3830. if (!codecs || codecs == 'jpeg') {
  3831. return 'image/jpeg';
  3832. }
  3833. }
  3834. if (contentType == shaka.util.ManifestParserUtils.ContentType.AUDIO) {
  3835. // See: https://bugs.chromium.org/p/chromium/issues/detail?id=489520
  3836. if (codecs == 'mp4a.40.34') {
  3837. return 'audio/mpeg';
  3838. }
  3839. }
  3840. if (codecs == 'mjpg') {
  3841. return 'application/mp4';
  3842. }
  3843. // Not enough information to guess from the content type and codecs.
  3844. return null;
  3845. }
  3846. /**
  3847. * Get a fallback mime type for the content. Used if all the better methods
  3848. * for determining the mime type have failed.
  3849. *
  3850. * @param {string} contentType
  3851. * @return {string}
  3852. * @private
  3853. */
  3854. guessMimeTypeFallback_(contentType) {
  3855. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  3856. // If there was no codecs string and no content-type, assume HLS text
  3857. // streams are WebVTT.
  3858. return 'text/vtt';
  3859. }
  3860. // If the HLS content is lacking in both MIME type metadata and
  3861. // segment file extensions, we fall back to assuming it's MP4.
  3862. const map = shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  3863. return map['mp4'];
  3864. }
  3865. /**
  3866. * @param {!Array.<!shaka.media.SegmentReference>} segments
  3867. * @return {{segment: !shaka.media.SegmentReference, segmentIndex: number}}
  3868. * @private
  3869. */
  3870. getAvailableSegment_(segments) {
  3871. goog.asserts.assert(segments.length, 'Should have segments!');
  3872. // If you wait long enough, requesting the first segment can fail
  3873. // because it has fallen off the left edge of DVR, so to be safer,
  3874. // let's request the middle segment.
  3875. let segmentIndex = this.isLive_() ?
  3876. Math.trunc((segments.length - 1) / 2) : 0;
  3877. let segment = segments[segmentIndex];
  3878. while (segment.getStatus() == shaka.media.SegmentReference.Status.MISSING &&
  3879. (segmentIndex + 1) < segments.length) {
  3880. segmentIndex ++;
  3881. segment = segments[segmentIndex];
  3882. }
  3883. return {segment, segmentIndex};
  3884. }
  3885. /**
  3886. * Attempts to guess stream's mime type.
  3887. *
  3888. * @param {string} contentType
  3889. * @param {string} codecs
  3890. * @param {!Array.<!shaka.media.SegmentReference>} segments
  3891. * @return {!Promise.<string>}
  3892. * @private
  3893. */
  3894. async guessMimeType_(contentType, codecs, segments) {
  3895. const HlsParser = shaka.hls.HlsParser;
  3896. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  3897. const {segment} = this.getAvailableSegment_(segments);
  3898. if (segment.status == shaka.media.SegmentReference.Status.MISSING) {
  3899. return this.guessMimeTypeFallback_(contentType);
  3900. }
  3901. const segmentUris = segment.getUris();
  3902. const parsedUri = new goog.Uri(segmentUris[0]);
  3903. const extension = parsedUri.getPath().split('.').pop();
  3904. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  3905. let mimeType = map[extension];
  3906. if (mimeType) {
  3907. return mimeType;
  3908. }
  3909. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  3910. if (mimeType) {
  3911. return mimeType;
  3912. }
  3913. // The extension map didn't work, so guess based on codecs.
  3914. mimeType = this.guessMimeTypeBeforeLoading_(contentType, codecs);
  3915. if (mimeType) {
  3916. return mimeType;
  3917. }
  3918. // If unable to guess mime type, request a segment and try getting it
  3919. // from the response.
  3920. let contentMimeType;
  3921. const type = shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_SEGMENT;
  3922. const headRequest = shaka.net.NetworkingEngine.makeRequest(
  3923. segmentUris, this.config_.retryParameters);
  3924. try {
  3925. headRequest.method = 'HEAD';
  3926. const response = await this.makeNetworkRequest_(
  3927. headRequest, requestType, {type});
  3928. contentMimeType = response.headers['content-type'];
  3929. } catch (error) {
  3930. if (error &&
  3931. (error.code == shaka.util.Error.Code.HTTP_ERROR ||
  3932. error.code == shaka.util.Error.Code.BAD_HTTP_STATUS)) {
  3933. headRequest.method = 'GET';
  3934. const response = await this.makeNetworkRequest_(
  3935. headRequest, requestType, {type});
  3936. contentMimeType = response.headers['content-type'];
  3937. }
  3938. }
  3939. if (contentMimeType) {
  3940. // Split the MIME type in case the server sent additional parameters.
  3941. return contentMimeType.toLowerCase().split(';')[0];
  3942. }
  3943. return this.guessMimeTypeFallback_(contentType);
  3944. }
  3945. /**
  3946. * Returns a tag with a given name.
  3947. * Throws an error if tag was not found.
  3948. *
  3949. * @param {!Array.<shaka.hls.Tag>} tags
  3950. * @param {string} tagName
  3951. * @return {!shaka.hls.Tag}
  3952. * @private
  3953. */
  3954. getRequiredTag_(tags, tagName) {
  3955. const tag = shaka.hls.Utils.getFirstTagWithName(tags, tagName);
  3956. if (!tag) {
  3957. throw new shaka.util.Error(
  3958. shaka.util.Error.Severity.CRITICAL,
  3959. shaka.util.Error.Category.MANIFEST,
  3960. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, tagName);
  3961. }
  3962. return tag;
  3963. }
  3964. /**
  3965. * @param {shaka.extern.Stream} stream
  3966. * @param {?string} width
  3967. * @param {?string} height
  3968. * @param {?string} frameRate
  3969. * @param {?string} videoRange
  3970. * @param {?string} videoLayout
  3971. * @param {?string} colorGamut
  3972. * @private
  3973. */
  3974. addVideoAttributes_(stream, width, height, frameRate, videoRange,
  3975. videoLayout, colorGamut) {
  3976. if (stream) {
  3977. stream.width = Number(width) || undefined;
  3978. stream.height = Number(height) || undefined;
  3979. stream.frameRate = Number(frameRate) || undefined;
  3980. stream.hdr = videoRange || undefined;
  3981. stream.videoLayout = videoLayout || undefined;
  3982. stream.colorGamut = colorGamut || undefined;
  3983. }
  3984. }
  3985. /**
  3986. * Makes a network request for the manifest and returns a Promise
  3987. * with the resulting data.
  3988. *
  3989. * @param {!Array.<string>} uris
  3990. * @param {boolean=} isPlaylist
  3991. * @return {!Promise.<!shaka.extern.Response>}
  3992. * @private
  3993. */
  3994. requestManifest_(uris, isPlaylist) {
  3995. const requestType = shaka.net.NetworkingEngine.RequestType.MANIFEST;
  3996. const request = shaka.net.NetworkingEngine.makeRequest(
  3997. uris, this.config_.retryParameters);
  3998. const type = isPlaylist ?
  3999. shaka.net.NetworkingEngine.AdvancedRequestType.MEDIA_PLAYLIST :
  4000. shaka.net.NetworkingEngine.AdvancedRequestType.MASTER_PLAYLIST;
  4001. return this.makeNetworkRequest_(request, requestType, {type});
  4002. }
  4003. /**
  4004. * Called when the update timer ticks. Because parsing a manifest is async,
  4005. * this method is async. To work with this, this method will schedule the next
  4006. * update when it finished instead of using a repeating-start.
  4007. *
  4008. * @return {!Promise}
  4009. * @private
  4010. */
  4011. async onUpdate_() {
  4012. shaka.log.info('Updating manifest...');
  4013. goog.asserts.assert(
  4014. this.getUpdatePlaylistDelay_() > 0,
  4015. 'We should only call |onUpdate_| when we are suppose to be updating.');
  4016. // Detect a call to stop()
  4017. if (!this.playerInterface_) {
  4018. return;
  4019. }
  4020. try {
  4021. const startTime = Date.now();
  4022. await this.update();
  4023. // Keep track of how long the longest manifest update took.
  4024. const endTime = Date.now();
  4025. // This may have converted to VOD, in which case we stop updating.
  4026. if (this.isLive_()) {
  4027. const updateDuration = (endTime - startTime) / 1000.0;
  4028. this.averageUpdateDuration_.sample(1, updateDuration);
  4029. const delay = this.getUpdatePlaylistDelay_();
  4030. const finalDelay = Math.max(0,
  4031. delay - this.averageUpdateDuration_.getEstimate());
  4032. this.updatePlaylistTimer_.tickAfter(/* seconds= */ finalDelay);
  4033. }
  4034. } catch (error) {
  4035. // Detect a call to stop() during this.update()
  4036. if (!this.playerInterface_) {
  4037. return;
  4038. }
  4039. goog.asserts.assert(error instanceof shaka.util.Error,
  4040. 'Should only receive a Shaka error');
  4041. if (this.config_.raiseFatalErrorOnManifestUpdateRequestFailure) {
  4042. this.playerInterface_.onError(error);
  4043. return;
  4044. }
  4045. // We will retry updating, so override the severity of the error.
  4046. error.severity = shaka.util.Error.Severity.RECOVERABLE;
  4047. this.playerInterface_.onError(error);
  4048. // Try again very soon.
  4049. this.updatePlaylistTimer_.tickAfter(/* seconds= */ 0.1);
  4050. }
  4051. // Detect a call to stop()
  4052. if (!this.playerInterface_) {
  4053. return;
  4054. }
  4055. this.playerInterface_.onManifestUpdated();
  4056. }
  4057. /**
  4058. * @return {boolean}
  4059. * @private
  4060. */
  4061. isLive_() {
  4062. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  4063. return this.presentationType_ != PresentationType.VOD;
  4064. }
  4065. /**
  4066. * @return {number}
  4067. * @private
  4068. */
  4069. getUpdatePlaylistDelay_() {
  4070. // The HLS spec (RFC 8216) states in 6.3.4:
  4071. // "the client MUST wait for at least the target duration before
  4072. // attempting to reload the Playlist file again".
  4073. // For LL-HLS, the server must add a new partial segment to the Playlist
  4074. // every part target duration.
  4075. return this.lastTargetDuration_;
  4076. }
  4077. /**
  4078. * @param {shaka.hls.HlsParser.PresentationType_} type
  4079. * @private
  4080. */
  4081. setPresentationType_(type) {
  4082. this.presentationType_ = type;
  4083. if (this.presentationTimeline_) {
  4084. this.presentationTimeline_.setStatic(!this.isLive_());
  4085. }
  4086. // If this manifest is not for live content, then we have no reason to
  4087. // update it.
  4088. if (!this.isLive_()) {
  4089. this.updatePlaylistTimer_.stop();
  4090. }
  4091. }
  4092. /**
  4093. * Create a networking request. This will manage the request using the
  4094. * parser's operation manager. If the parser has already been stopped, the
  4095. * request will not be made.
  4096. *
  4097. * @param {shaka.extern.Request} request
  4098. * @param {shaka.net.NetworkingEngine.RequestType} type
  4099. * @param {shaka.extern.RequestContext=} context
  4100. * @return {!Promise.<shaka.extern.Response>}
  4101. * @private
  4102. */
  4103. makeNetworkRequest_(request, type, context) {
  4104. if (!this.operationManager_) {
  4105. throw new shaka.util.Error(
  4106. shaka.util.Error.Severity.CRITICAL,
  4107. shaka.util.Error.Category.PLAYER,
  4108. shaka.util.Error.Code.OPERATION_ABORTED);
  4109. }
  4110. const op = this.playerInterface_.networkingEngine.request(
  4111. type, request, context);
  4112. this.operationManager_.manage(op);
  4113. return op.promise;
  4114. }
  4115. /**
  4116. * @param {string} method
  4117. * @return {boolean}
  4118. * @private
  4119. */
  4120. isAesMethod_(method) {
  4121. return method == 'AES-128' ||
  4122. method == 'AES-256' ||
  4123. method == 'AES-256-CTR';
  4124. }
  4125. /**
  4126. * @param {!shaka.hls.Tag} drmTag
  4127. * @param {string} mimeType
  4128. * @return {?shaka.extern.DrmInfo}
  4129. * @private
  4130. */
  4131. static fairplayDrmParser_(drmTag, mimeType) {
  4132. if (mimeType == 'video/mp2t') {
  4133. throw new shaka.util.Error(
  4134. shaka.util.Error.Severity.CRITICAL,
  4135. shaka.util.Error.Category.MANIFEST,
  4136. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  4137. }
  4138. if (shaka.util.Platform.isMediaKeysPolyfilled()) {
  4139. throw new shaka.util.Error(
  4140. shaka.util.Error.Severity.CRITICAL,
  4141. shaka.util.Error.Category.MANIFEST,
  4142. shaka.util.Error.Code
  4143. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  4144. }
  4145. const method = drmTag.getRequiredAttrValue('METHOD');
  4146. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4147. if (!VALID_METHODS.includes(method)) {
  4148. shaka.log.error('FairPlay in HLS is only supported with [',
  4149. VALID_METHODS.join(', '), '], not', method);
  4150. return null;
  4151. }
  4152. let encryptionScheme = 'cenc';
  4153. if (method == 'SAMPLE-AES') {
  4154. // It should be 'cbcs-1-9' but Safari doesn't support it.
  4155. // See: https://github.com/WebKit/WebKit/blob/main/Source/WebCore/Modules/encryptedmedia/MediaKeyEncryptionScheme.idl
  4156. encryptionScheme = 'cbcs';
  4157. }
  4158. /*
  4159. * Even if we're not able to construct initData through the HLS tag, adding
  4160. * a DRMInfo will allow DRM Engine to request a media key system access
  4161. * with the correct keySystem and initDataType
  4162. */
  4163. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4164. 'com.apple.fps', encryptionScheme, [
  4165. {initDataType: 'sinf', initData: new Uint8Array(0), keyId: null},
  4166. ], drmTag.getRequiredAttrValue('URI'));
  4167. return drmInfo;
  4168. }
  4169. /**
  4170. * @param {!shaka.hls.Tag} drmTag
  4171. * @return {?shaka.extern.DrmInfo}
  4172. * @private
  4173. */
  4174. static widevineDrmParser_(drmTag) {
  4175. const method = drmTag.getRequiredAttrValue('METHOD');
  4176. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4177. if (!VALID_METHODS.includes(method)) {
  4178. shaka.log.error('Widevine in HLS is only supported with [',
  4179. VALID_METHODS.join(', '), '], not', method);
  4180. return null;
  4181. }
  4182. let encryptionScheme = 'cenc';
  4183. if (method == 'SAMPLE-AES') {
  4184. encryptionScheme = 'cbcs';
  4185. }
  4186. const uri = drmTag.getRequiredAttrValue('URI');
  4187. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  4188. // The data encoded in the URI is a PSSH box to be used as init data.
  4189. const pssh = shaka.util.BufferUtils.toUint8(parsedData.data);
  4190. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4191. 'com.widevine.alpha', encryptionScheme, [
  4192. {initDataType: 'cenc', initData: pssh},
  4193. ]);
  4194. const keyId = drmTag.getAttributeValue('KEYID');
  4195. if (keyId) {
  4196. const keyIdLowerCase = keyId.toLowerCase();
  4197. // This value should begin with '0x':
  4198. goog.asserts.assert(
  4199. keyIdLowerCase.startsWith('0x'), 'Incorrect KEYID format!');
  4200. // But the output should not contain the '0x':
  4201. drmInfo.keyIds = new Set([keyIdLowerCase.substr(2)]);
  4202. }
  4203. return drmInfo;
  4204. }
  4205. /**
  4206. * See: https://docs.microsoft.com/en-us/playready/packaging/mp4-based-formats-supported-by-playready-clients?tabs=case4
  4207. *
  4208. * @param {!shaka.hls.Tag} drmTag
  4209. * @return {?shaka.extern.DrmInfo}
  4210. * @private
  4211. */
  4212. static playreadyDrmParser_(drmTag) {
  4213. const method = drmTag.getRequiredAttrValue('METHOD');
  4214. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4215. if (!VALID_METHODS.includes(method)) {
  4216. shaka.log.error('PlayReady in HLS is only supported with [',
  4217. VALID_METHODS.join(', '), '], not', method);
  4218. return null;
  4219. }
  4220. let encryptionScheme = 'cenc';
  4221. if (method == 'SAMPLE-AES') {
  4222. encryptionScheme = 'cbcs';
  4223. }
  4224. const uri = drmTag.getRequiredAttrValue('URI');
  4225. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri.split('?')[0]);
  4226. // The data encoded in the URI is a PlayReady Pro Object, so we need
  4227. // convert it to pssh.
  4228. const data = shaka.util.BufferUtils.toUint8(parsedData.data);
  4229. const systemId = new Uint8Array([
  4230. 0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86,
  4231. 0xab, 0x92, 0xe6, 0x5b, 0xe0, 0x88, 0x5f, 0x95,
  4232. ]);
  4233. const keyIds = new Set();
  4234. const psshVersion = 0;
  4235. const pssh =
  4236. shaka.util.Pssh.createPssh(data, systemId, keyIds, psshVersion);
  4237. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  4238. 'com.microsoft.playready', encryptionScheme, [
  4239. {initDataType: 'cenc', initData: pssh},
  4240. ]);
  4241. return drmInfo;
  4242. }
  4243. /**
  4244. * See: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-5.1
  4245. *
  4246. * @param {!shaka.hls.Tag} drmTag
  4247. * @param {string} mimeType
  4248. * @param {function():!Array.<string>} getUris
  4249. * @param {?shaka.media.InitSegmentReference} initSegmentRef
  4250. * @param {?Map.<string, string>=} variables
  4251. * @return {!Promise.<?shaka.extern.DrmInfo>}
  4252. * @private
  4253. */
  4254. async identityDrmParser_(drmTag, mimeType, getUris, initSegmentRef,
  4255. variables) {
  4256. if (mimeType == 'video/mp2t') {
  4257. throw new shaka.util.Error(
  4258. shaka.util.Error.Severity.CRITICAL,
  4259. shaka.util.Error.Category.MANIFEST,
  4260. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  4261. }
  4262. if (shaka.util.Platform.isMediaKeysPolyfilled()) {
  4263. throw new shaka.util.Error(
  4264. shaka.util.Error.Severity.CRITICAL,
  4265. shaka.util.Error.Category.MANIFEST,
  4266. shaka.util.Error.Code
  4267. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  4268. }
  4269. const method = drmTag.getRequiredAttrValue('METHOD');
  4270. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  4271. if (!VALID_METHODS.includes(method)) {
  4272. shaka.log.error('Identity (ClearKey) in HLS is only supported with [',
  4273. VALID_METHODS.join(', '), '], not', method);
  4274. return null;
  4275. }
  4276. const keyUris = shaka.hls.Utils.constructSegmentUris(
  4277. getUris(), drmTag.getRequiredAttrValue('URI'), variables);
  4278. let key;
  4279. if (keyUris[0].startsWith('data:text/plain;base64,')) {
  4280. key = shaka.util.Uint8ArrayUtils.toHex(
  4281. shaka.util.Uint8ArrayUtils.fromBase64(
  4282. keyUris[0].split('data:text/plain;base64,').pop()));
  4283. } else {
  4284. const keyMapKey = keyUris.sort().join('');
  4285. if (!this.identityKeyMap_.has(keyMapKey)) {
  4286. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  4287. const request = shaka.net.NetworkingEngine.makeRequest(
  4288. keyUris, this.config_.retryParameters);
  4289. const keyResponse = this.makeNetworkRequest_(request, requestType);
  4290. this.identityKeyMap_.set(keyMapKey, keyResponse);
  4291. }
  4292. const keyResponse = await this.identityKeyMap_.get(keyMapKey);
  4293. key = shaka.util.Uint8ArrayUtils.toHex(keyResponse.data);
  4294. }
  4295. // NOTE: The ClearKey CDM requires a key-id to key mapping. HLS doesn't
  4296. // provide a key ID anywhere. So although we could use the 'URI' attribute
  4297. // to fetch the actual 16-byte key, without a key ID, we can't provide this
  4298. // automatically to the ClearKey CDM. By default we assume that keyId is 0,
  4299. // but we will try to get key ID from Init Segment.
  4300. // If the application want override this behavior will have to use
  4301. // player.configure('drm.clearKeys', { ... }) to provide the key IDs
  4302. // and keys or player.configure('drm.servers.org\.w3\.clearkey', ...) to
  4303. // provide a ClearKey license server URI.
  4304. let keyId = '00000000000000000000000000000000';
  4305. if (initSegmentRef) {
  4306. let defaultKID;
  4307. if (this.identityKidMap_.has(initSegmentRef)) {
  4308. defaultKID = this.identityKidMap_.get(initSegmentRef);
  4309. } else {
  4310. const initSegmentRequest = shaka.util.Networking.createSegmentRequest(
  4311. initSegmentRef.getUris(),
  4312. initSegmentRef.getStartByte(),
  4313. initSegmentRef.getEndByte(),
  4314. this.config_.retryParameters);
  4315. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  4316. const initType =
  4317. shaka.net.NetworkingEngine.AdvancedRequestType.INIT_SEGMENT;
  4318. const initResponse = await this.makeNetworkRequest_(
  4319. initSegmentRequest, requestType, {type: initType});
  4320. defaultKID = shaka.media.SegmentUtils.getDefaultKID(
  4321. initResponse.data);
  4322. this.identityKidMap_.set(initSegmentRef, defaultKID);
  4323. }
  4324. if (defaultKID) {
  4325. keyId = defaultKID;
  4326. }
  4327. }
  4328. const clearkeys = new Map();
  4329. clearkeys.set(keyId, key);
  4330. let encryptionScheme = 'cenc';
  4331. if (method == 'SAMPLE-AES') {
  4332. encryptionScheme = 'cbcs';
  4333. }
  4334. return shaka.util.ManifestParserUtils.createDrmInfoFromClearKeys(
  4335. clearkeys, encryptionScheme);
  4336. }
  4337. };
  4338. /**
  4339. * @typedef {{
  4340. * stream: !shaka.extern.Stream,
  4341. * type: string,
  4342. * redirectUris: !Array.<string>,
  4343. * getUris: function():!Array.<string>,
  4344. * minTimestamp: number,
  4345. * maxTimestamp: number,
  4346. * mediaSequenceToStartTime: !Map.<number, number>,
  4347. * canSkipSegments: boolean,
  4348. * canBlockReload: boolean,
  4349. * hasEndList: boolean,
  4350. * firstSequenceNumber: number,
  4351. * nextMediaSequence: number,
  4352. * nextPart: number,
  4353. * loadedOnce: boolean
  4354. * }}
  4355. *
  4356. * @description
  4357. * Contains a stream and information about it.
  4358. *
  4359. * @property {!shaka.extern.Stream} stream
  4360. * The Stream itself.
  4361. * @property {string} type
  4362. * The type value. Could be 'video', 'audio', 'text', or 'image'.
  4363. * @property {!Array.<string>} redirectUris
  4364. * The redirect URIs.
  4365. * @property {function():!Array.<string>} getUris
  4366. * The verbatim media playlist URIs, as it appeared in the master playlist.
  4367. * @property {number} minTimestamp
  4368. * The minimum timestamp found in the stream.
  4369. * @property {number} maxTimestamp
  4370. * The maximum timestamp found in the stream.
  4371. * @property {!Map.<number, number>} mediaSequenceToStartTime
  4372. * A map of media sequence numbers to media start times.
  4373. * Only used for VOD content.
  4374. * @property {boolean} canSkipSegments
  4375. * True if the server supports delta playlist updates, and we can send a
  4376. * request for a playlist that can skip older media segments.
  4377. * @property {boolean} canBlockReload
  4378. * True if the server supports blocking playlist reload, and we can send a
  4379. * request for a playlist that can block reload until some segments are
  4380. * present.
  4381. * @property {boolean} hasEndList
  4382. * True if the stream has an EXT-X-ENDLIST tag.
  4383. * @property {number} firstSequenceNumber
  4384. * The sequence number of the first reference. Only calculated if needed.
  4385. * @property {number} nextMediaSequence
  4386. * The next media sequence.
  4387. * @property {number} nextPart
  4388. * The next part.
  4389. * @property {boolean} loadedOnce
  4390. * True if the stream has been loaded at least once.
  4391. */
  4392. shaka.hls.HlsParser.StreamInfo;
  4393. /**
  4394. * @typedef {{
  4395. * audio: !Array.<shaka.hls.HlsParser.StreamInfo>,
  4396. * video: !Array.<shaka.hls.HlsParser.StreamInfo>
  4397. * }}
  4398. *
  4399. * @description Audio and video stream infos.
  4400. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} audio
  4401. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} video
  4402. */
  4403. shaka.hls.HlsParser.StreamInfos;
  4404. /**
  4405. * @const {!Object.<string, string>}
  4406. * @private
  4407. */
  4408. shaka.hls.HlsParser.RAW_FORMATS_TO_MIME_TYPES_ = {
  4409. 'aac': 'audio/aac',
  4410. 'ac3': 'audio/ac3',
  4411. 'ec3': 'audio/ec3',
  4412. 'mp3': 'audio/mpeg',
  4413. };
  4414. /**
  4415. * @const {!Object.<string, string>}
  4416. * @private
  4417. */
  4418. shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_ = {
  4419. 'mp4': 'audio/mp4',
  4420. 'mp4a': 'audio/mp4',
  4421. 'm4s': 'audio/mp4',
  4422. 'm4i': 'audio/mp4',
  4423. 'm4a': 'audio/mp4',
  4424. 'm4f': 'audio/mp4',
  4425. 'cmfa': 'audio/mp4',
  4426. // MPEG2-TS also uses video/ for audio: https://bit.ly/TsMse
  4427. 'ts': 'video/mp2t',
  4428. 'tsa': 'video/mp2t',
  4429. };
  4430. /**
  4431. * @const {!Object.<string, string>}
  4432. * @private
  4433. */
  4434. shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_ = {
  4435. 'mp4': 'video/mp4',
  4436. 'mp4v': 'video/mp4',
  4437. 'm4s': 'video/mp4',
  4438. 'm4i': 'video/mp4',
  4439. 'm4v': 'video/mp4',
  4440. 'm4f': 'video/mp4',
  4441. 'cmfv': 'video/mp4',
  4442. 'ts': 'video/mp2t',
  4443. 'tsv': 'video/mp2t',
  4444. };
  4445. /**
  4446. * @const {!Object.<string, string>}
  4447. * @private
  4448. */
  4449. shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_ = {
  4450. 'mp4': 'application/mp4',
  4451. 'm4s': 'application/mp4',
  4452. 'm4i': 'application/mp4',
  4453. 'm4f': 'application/mp4',
  4454. 'cmft': 'application/mp4',
  4455. 'vtt': 'text/vtt',
  4456. 'webvtt': 'text/vtt',
  4457. 'ttml': 'application/ttml+xml',
  4458. };
  4459. /**
  4460. * @const {!Object.<string, string>}
  4461. * @private
  4462. */
  4463. shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_ = {
  4464. 'jpg': 'image/jpeg',
  4465. 'png': 'image/png',
  4466. 'svg': 'image/svg+xml',
  4467. 'webp': 'image/webp',
  4468. 'avif': 'image/avif',
  4469. };
  4470. /**
  4471. * @const {!Object.<string, !Object.<string, string>>}
  4472. * @private
  4473. */
  4474. shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_ = {
  4475. 'audio': shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_,
  4476. 'video': shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_,
  4477. 'text': shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_,
  4478. 'image': shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_,
  4479. };
  4480. /**
  4481. * MIME types without init segment.
  4482. *
  4483. * @const {!Set.<string>}
  4484. * @private
  4485. */
  4486. shaka.hls.HlsParser.MIME_TYPES_WITHOUT_INIT_SEGMENT_ = new Set([
  4487. 'video/mp2t',
  4488. // Containerless types
  4489. ...shaka.util.MimeUtils.RAW_FORMATS,
  4490. ]);
  4491. /**
  4492. * @typedef {function(!shaka.hls.Tag, string):?shaka.extern.DrmInfo}
  4493. * @private
  4494. */
  4495. shaka.hls.HlsParser.DrmParser_;
  4496. /**
  4497. * @const {!Object.<string, shaka.hls.HlsParser.DrmParser_>}
  4498. * @private
  4499. */
  4500. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_ = {
  4501. 'com.apple.streamingkeydelivery':
  4502. shaka.hls.HlsParser.fairplayDrmParser_,
  4503. 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed':
  4504. shaka.hls.HlsParser.widevineDrmParser_,
  4505. 'com.microsoft.playready':
  4506. shaka.hls.HlsParser.playreadyDrmParser_,
  4507. };
  4508. /**
  4509. * @enum {string}
  4510. * @private
  4511. */
  4512. shaka.hls.HlsParser.PresentationType_ = {
  4513. VOD: 'VOD',
  4514. EVENT: 'EVENT',
  4515. LIVE: 'LIVE',
  4516. };
  4517. shaka.media.ManifestParser.registerParserByMime(
  4518. 'application/x-mpegurl', () => new shaka.hls.HlsParser());
  4519. shaka.media.ManifestParser.registerParserByMime(
  4520. 'application/vnd.apple.mpegurl', () => new shaka.hls.HlsParser());