Source: lib/media/segment_utils.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.SegmentUtils');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.media.Capabilities');
  10. goog.require('shaka.media.ClosedCaptionParser');
  11. goog.require('shaka.util.BufferUtils');
  12. goog.require('shaka.util.ManifestParserUtils');
  13. goog.require('shaka.util.MimeUtils');
  14. goog.require('shaka.util.Mp4BoxParsers');
  15. goog.require('shaka.util.Mp4Parser');
  16. goog.require('shaka.util.TsParser');
  17. /**
  18. * @summary Utility functions for segment parsing.
  19. */
  20. shaka.media.SegmentUtils = class {
  21. /**
  22. * @param {string} mimeType
  23. * @return {shaka.media.SegmentUtils.BasicInfo}
  24. */
  25. static getBasicInfoFromMimeType(mimeType) {
  26. const baseMimeType = shaka.util.MimeUtils.getBasicType(mimeType);
  27. const type = baseMimeType.split('/')[0];
  28. const codecs = shaka.util.MimeUtils.getCodecs(mimeType);
  29. return {
  30. type: type,
  31. mimeType: baseMimeType,
  32. codecs: codecs,
  33. language: null,
  34. height: null,
  35. width: null,
  36. channelCount: null,
  37. sampleRate: null,
  38. closedCaptions: new Map(),
  39. videoRange: null,
  40. colorGamut: null,
  41. };
  42. }
  43. /**
  44. * @param {!BufferSource} data
  45. * @return {?shaka.media.SegmentUtils.BasicInfo}
  46. */
  47. static getBasicInfoFromTs(data) {
  48. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  49. const tsParser = new shaka.util.TsParser().parse(uint8ArrayData);
  50. const tsCodecs = tsParser.getCodecs();
  51. const videoInfo = tsParser.getVideoInfo();
  52. const codecs = [];
  53. let hasAudio = false;
  54. let hasVideo = false;
  55. switch (tsCodecs.audio) {
  56. case 'aac':
  57. case 'aac-loas':
  58. codecs.push('mp4a.40.2');
  59. hasAudio = true;
  60. break;
  61. case 'mp3':
  62. codecs.push('mp4a.40.34');
  63. hasAudio = true;
  64. break;
  65. case 'ac3':
  66. codecs.push('ac-3');
  67. hasAudio = true;
  68. break;
  69. case 'ec3':
  70. codecs.push('ec-3');
  71. hasAudio = true;
  72. break;
  73. case 'opus':
  74. codecs.push('opus');
  75. hasAudio = true;
  76. break;
  77. }
  78. switch (tsCodecs.video) {
  79. case 'avc':
  80. if (videoInfo.codec) {
  81. codecs.push(videoInfo.codec);
  82. } else {
  83. codecs.push('avc1.42E01E');
  84. }
  85. hasVideo = true;
  86. break;
  87. case 'hvc':
  88. if (videoInfo.codec) {
  89. codecs.push(videoInfo.codec);
  90. } else {
  91. codecs.push('hvc1.1.6.L93.90');
  92. }
  93. hasVideo = true;
  94. break;
  95. case 'av1':
  96. codecs.push('av01.0.01M.08');
  97. hasVideo = true;
  98. break;
  99. }
  100. if (!codecs.length) {
  101. return null;
  102. }
  103. const onlyAudio = hasAudio && !hasVideo;
  104. const closedCaptions = new Map();
  105. if (hasVideo) {
  106. const captionParser = new shaka.media.ClosedCaptionParser('video/mp2t');
  107. captionParser.parseFrom(data);
  108. for (const stream of captionParser.getStreams()) {
  109. closedCaptions.set(stream, stream);
  110. }
  111. captionParser.reset();
  112. }
  113. return {
  114. type: onlyAudio ? 'audio' : 'video',
  115. mimeType: 'video/mp2t',
  116. codecs: codecs.join(', '),
  117. language: null,
  118. height: videoInfo.height,
  119. width: videoInfo.width,
  120. channelCount: null,
  121. sampleRate: null,
  122. closedCaptions: closedCaptions,
  123. videoRange: null,
  124. colorGamut: null,
  125. };
  126. }
  127. /**
  128. * @param {?BufferSource} initData
  129. * @param {!BufferSource} data
  130. * @return {?shaka.media.SegmentUtils.BasicInfo}
  131. */
  132. static getBasicInfoFromMp4(initData, data) {
  133. const Mp4Parser = shaka.util.Mp4Parser;
  134. const SegmentUtils = shaka.media.SegmentUtils;
  135. const audioCodecs = [];
  136. let videoCodecs = [];
  137. let hasAudio = false;
  138. let hasVideo = false;
  139. const addCodec = (codec) => {
  140. const codecLC = codec.toLowerCase();
  141. switch (codecLC) {
  142. case 'avc1':
  143. case 'avc3':
  144. videoCodecs.push(codecLC + '.42E01E');
  145. hasVideo = true;
  146. break;
  147. case 'hev1':
  148. case 'hvc1':
  149. videoCodecs.push(codecLC + '.1.6.L93.90');
  150. hasVideo = true;
  151. break;
  152. case 'dvh1':
  153. case 'dvhe':
  154. videoCodecs.push(codecLC + '.05.04');
  155. hasVideo = true;
  156. break;
  157. case 'vp09':
  158. videoCodecs.push(codecLC + '.00.10.08');
  159. hasVideo = true;
  160. break;
  161. case 'av01':
  162. videoCodecs.push(codecLC + '.0.01M.08');
  163. hasVideo = true;
  164. break;
  165. case 'mp4a':
  166. // We assume AAC, but this can be wrong since mp4a supports
  167. // others codecs
  168. audioCodecs.push('mp4a.40.2');
  169. hasAudio = true;
  170. break;
  171. case 'ac-3':
  172. case 'ec-3':
  173. case 'ac-4':
  174. case 'opus':
  175. case 'flac':
  176. audioCodecs.push(codecLC);
  177. hasAudio = true;
  178. break;
  179. }
  180. };
  181. const codecBoxParser = (box) => addCodec(box.name);
  182. /** @type {?string} */
  183. let language = null;
  184. /** @type {?string} */
  185. let height = null;
  186. /** @type {?string} */
  187. let width = null;
  188. /** @type {?number} */
  189. let channelCount = null;
  190. /** @type {?number} */
  191. let sampleRate = null;
  192. /** @type {?string} */
  193. let realVideoRange = null;
  194. /** @type {?string} */
  195. let realColorGamut = null;
  196. /** @type {?string} */
  197. let baseBox;
  198. const genericAudioBox = (box) => {
  199. const parsedAudioSampleEntryBox =
  200. shaka.util.Mp4BoxParsers.audioSampleEntry(box.reader);
  201. channelCount = parsedAudioSampleEntryBox.channelCount;
  202. sampleRate = parsedAudioSampleEntryBox.sampleRate;
  203. codecBoxParser(box);
  204. };
  205. const genericVideoBox = (box) => {
  206. baseBox = box.name;
  207. const parsedVisualSampleEntryBox =
  208. shaka.util.Mp4BoxParsers.visualSampleEntry(box.reader);
  209. width = String(parsedVisualSampleEntryBox.width);
  210. height = String(parsedVisualSampleEntryBox.height);
  211. if (box.reader.hasMoreData()) {
  212. Mp4Parser.children(box);
  213. }
  214. };
  215. new Mp4Parser()
  216. .box('moov', Mp4Parser.children)
  217. .box('trak', Mp4Parser.children)
  218. .box('mdia', Mp4Parser.children)
  219. .fullBox('mdhd', (box) => {
  220. goog.asserts.assert(
  221. box.version != null,
  222. 'MDHD is a full box and should have a valid version.');
  223. const parsedMDHDBox = shaka.util.Mp4BoxParsers.parseMDHD(
  224. box.reader, box.version);
  225. language = parsedMDHDBox.language;
  226. })
  227. .box('minf', Mp4Parser.children)
  228. .box('stbl', Mp4Parser.children)
  229. .fullBox('stsd', Mp4Parser.sampleDescription)
  230. // AUDIO
  231. // These are the various boxes that signal a codec.
  232. .box('mp4a', (box) => {
  233. const parsedAudioSampleEntryBox =
  234. shaka.util.Mp4BoxParsers.audioSampleEntry(box.reader);
  235. channelCount = parsedAudioSampleEntryBox.channelCount;
  236. sampleRate = parsedAudioSampleEntryBox.sampleRate;
  237. if (box.reader.hasMoreData()) {
  238. Mp4Parser.children(box);
  239. } else {
  240. codecBoxParser(box);
  241. }
  242. })
  243. .box('esds', (box) => {
  244. const parsedESDSBox = shaka.util.Mp4BoxParsers.parseESDS(box.reader);
  245. audioCodecs.push(parsedESDSBox.codec);
  246. hasAudio = true;
  247. })
  248. .box('ac-3', genericAudioBox)
  249. .box('ec-3', genericAudioBox)
  250. .box('ac-4', genericAudioBox)
  251. .box('Opus', genericAudioBox)
  252. .box('fLaC', genericAudioBox)
  253. // VIDEO
  254. // These are the various boxes that signal a codec.
  255. .box('avc1', genericVideoBox)
  256. .box('avc3', genericVideoBox)
  257. .box('hev1', genericVideoBox)
  258. .box('hvc1', genericVideoBox)
  259. .box('dva1', genericVideoBox)
  260. .box('dvav', genericVideoBox)
  261. .box('dvh1', genericVideoBox)
  262. .box('dvhe', genericVideoBox)
  263. .box('vp09', genericVideoBox)
  264. .box('av01', genericVideoBox)
  265. .box('avcC', (box) => {
  266. let codecBase = baseBox || '';
  267. switch (baseBox) {
  268. case 'dvav':
  269. codecBase = 'avc3';
  270. break;
  271. case 'dva1':
  272. codecBase = 'avc1';
  273. break;
  274. }
  275. const parsedAVCCBox = shaka.util.Mp4BoxParsers.parseAVCC(
  276. codecBase, box.reader, box.name);
  277. videoCodecs.push(parsedAVCCBox.codec);
  278. hasVideo = true;
  279. })
  280. .box('hvcC', (box) => {
  281. let codecBase = baseBox || '';
  282. switch (baseBox) {
  283. case 'dvh1':
  284. codecBase = 'hvc1';
  285. break;
  286. case 'dvhe':
  287. codecBase = 'hev1';
  288. break;
  289. }
  290. const parsedHVCCBox = shaka.util.Mp4BoxParsers.parseHVCC(
  291. codecBase, box.reader, box.name);
  292. videoCodecs.push(parsedHVCCBox.codec);
  293. hasVideo = true;
  294. })
  295. .box('dvcC', (box) => {
  296. let codecBase = baseBox || '';
  297. switch (baseBox) {
  298. case 'hvc1':
  299. codecBase = 'dvh1';
  300. break;
  301. case 'hev1':
  302. codecBase = 'dvhe';
  303. break;
  304. case 'avc1':
  305. codecBase = 'dva1';
  306. break;
  307. case 'avc3':
  308. codecBase = 'dvav';
  309. break;
  310. case 'av01':
  311. codecBase = 'dav1';
  312. break;
  313. }
  314. const parsedDVCCBox = shaka.util.Mp4BoxParsers.parseDVCC(
  315. codecBase, box.reader, box.name);
  316. videoCodecs.push(parsedDVCCBox.codec);
  317. hasVideo = true;
  318. })
  319. .box('dvvC', (box) => {
  320. let codecBase = baseBox || '';
  321. switch (baseBox) {
  322. case 'hvc1':
  323. codecBase = 'dvh1';
  324. break;
  325. case 'hev1':
  326. codecBase = 'dvhe';
  327. break;
  328. case 'avc1':
  329. codecBase = 'dva1';
  330. break;
  331. case 'avc3':
  332. codecBase = 'dvav';
  333. break;
  334. case 'av01':
  335. codecBase = 'dav1';
  336. break;
  337. }
  338. const parsedDVCCBox = shaka.util.Mp4BoxParsers.parseDVVC(
  339. codecBase, box.reader, box.name);
  340. videoCodecs.push(parsedDVCCBox.codec);
  341. hasVideo = true;
  342. })
  343. .fullBox('vpcC', (box) => {
  344. const codecBase = baseBox || '';
  345. const parsedVPCCBox = shaka.util.Mp4BoxParsers.parseVPCC(
  346. codecBase, box.reader, box.name);
  347. videoCodecs.push(parsedVPCCBox.codec);
  348. hasVideo = true;
  349. })
  350. .box('av1C', (box) => {
  351. let codecBase = baseBox || '';
  352. switch (baseBox) {
  353. case 'dav1':
  354. codecBase = 'av01';
  355. break;
  356. }
  357. const parsedAV1CBox = shaka.util.Mp4BoxParsers.parseAV1C(
  358. codecBase, box.reader, box.name);
  359. videoCodecs.push(parsedAV1CBox.codec);
  360. hasVideo = true;
  361. })
  362. // This signals an encrypted sample, which we can go inside of to
  363. // find the codec used.
  364. // Note: If encrypted, you can only have audio or video, not both.
  365. .box('enca', Mp4Parser.audioSampleEntry)
  366. .box('encv', Mp4Parser.visualSampleEntry)
  367. .box('sinf', Mp4Parser.children)
  368. .box('frma', (box) => {
  369. const {codec} = shaka.util.Mp4BoxParsers.parseFRMA(box.reader);
  370. addCodec(codec);
  371. })
  372. .box('colr', (box) => {
  373. videoCodecs = videoCodecs.map((codec) => {
  374. if (codec.startsWith('av01.')) {
  375. return shaka.util.Mp4BoxParsers.updateAV1CodecWithCOLRBox(
  376. codec, box.reader);
  377. }
  378. return codec;
  379. });
  380. const {videoRange, colorGamut} =
  381. shaka.util.Mp4BoxParsers.parseCOLR(box.reader);
  382. realVideoRange = videoRange;
  383. realColorGamut = colorGamut;
  384. })
  385. .parse(initData || data, /* partialOkay= */ true);
  386. if (!audioCodecs.length && !videoCodecs.length) {
  387. return null;
  388. }
  389. const onlyAudio = hasAudio && !hasVideo;
  390. const closedCaptions = new Map();
  391. if (hasVideo) {
  392. const captionParser = new shaka.media.ClosedCaptionParser('video/mp4');
  393. if (initData) {
  394. captionParser.init(initData);
  395. }
  396. captionParser.parseFrom(data);
  397. for (const stream of captionParser.getStreams()) {
  398. closedCaptions.set(stream, stream);
  399. }
  400. captionParser.reset();
  401. }
  402. const codecs = audioCodecs.concat(videoCodecs);
  403. return {
  404. type: onlyAudio ? 'audio' : 'video',
  405. mimeType: onlyAudio ? 'audio/mp4' : 'video/mp4',
  406. codecs: SegmentUtils.codecsFiltering(codecs).join(', '),
  407. language: language,
  408. height: height,
  409. width: width,
  410. channelCount: channelCount,
  411. sampleRate: sampleRate,
  412. closedCaptions: closedCaptions,
  413. videoRange: realVideoRange,
  414. colorGamut: realColorGamut,
  415. };
  416. }
  417. /**
  418. * @param {!Array.<string>} codecs
  419. * @return {!Array.<string>} codecs
  420. */
  421. static codecsFiltering(codecs) {
  422. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  423. const ManifestParserUtils = shaka.util.ManifestParserUtils;
  424. const SegmentUtils = shaka.media.SegmentUtils;
  425. const allCodecs = SegmentUtils.filterDuplicateCodecs_(codecs);
  426. const audioCodecs =
  427. ManifestParserUtils.guessAllCodecsSafe(ContentType.AUDIO, allCodecs);
  428. const videoCodecs =
  429. ManifestParserUtils.guessAllCodecsSafe(ContentType.VIDEO, allCodecs);
  430. const textCodecs =
  431. ManifestParserUtils.guessAllCodecsSafe(ContentType.TEXT, allCodecs);
  432. const validVideoCodecs = SegmentUtils.chooseBetterCodecs_(videoCodecs);
  433. const finalCodecs =
  434. audioCodecs.concat(validVideoCodecs).concat(textCodecs);
  435. if (allCodecs.length && !finalCodecs.length) {
  436. return allCodecs;
  437. }
  438. return finalCodecs;
  439. }
  440. /**
  441. * @param {!Array.<string>} codecs
  442. * @return {!Array.<string>} codecs
  443. * @private
  444. */
  445. static filterDuplicateCodecs_(codecs) {
  446. // Filter out duplicate codecs.
  447. const seen = new Set();
  448. const ret = [];
  449. for (const codec of codecs) {
  450. const shortCodec = shaka.util.MimeUtils.getCodecBase(codec);
  451. if (!seen.has(shortCodec)) {
  452. ret.push(codec);
  453. seen.add(shortCodec);
  454. } else {
  455. shaka.log.debug('Ignoring duplicate codec');
  456. }
  457. }
  458. return ret;
  459. }
  460. /**
  461. * Prioritizes Dolby Vision if supported. This is necessary because with
  462. * Dolby Vision we could have hvcC and dvcC boxes at the same time.
  463. *
  464. * @param {!Array.<string>} codecs
  465. * @return {!Array.<string>} codecs
  466. * @private
  467. */
  468. static chooseBetterCodecs_(codecs) {
  469. if (codecs.length <= 1) {
  470. return codecs;
  471. }
  472. const dolbyVision = codecs.find((codec) => {
  473. return codec.startsWith('dvav.') ||
  474. codec.startsWith('dva1.') ||
  475. codec.startsWith('dvh1.') ||
  476. codec.startsWith('dvhe.') ||
  477. codec.startsWith('dav1.') ||
  478. codec.startsWith('dvc1.') ||
  479. codec.startsWith('dvi1.');
  480. });
  481. if (!dolbyVision) {
  482. return codecs;
  483. }
  484. const type = `video/mp4; codecs="${dolbyVision}"`;
  485. if (shaka.media.Capabilities.isTypeSupported(type)) {
  486. return [dolbyVision];
  487. }
  488. return codecs.filter((codec) => codec != dolbyVision);
  489. }
  490. /**
  491. * @param {!BufferSource} data
  492. * @return {?string}
  493. */
  494. static getDefaultKID(data) {
  495. const Mp4Parser = shaka.util.Mp4Parser;
  496. let defaultKID = null;
  497. new Mp4Parser()
  498. .box('moov', Mp4Parser.children)
  499. .box('trak', Mp4Parser.children)
  500. .box('mdia', Mp4Parser.children)
  501. .box('minf', Mp4Parser.children)
  502. .box('stbl', Mp4Parser.children)
  503. .fullBox('stsd', Mp4Parser.sampleDescription)
  504. .box('encv', Mp4Parser.visualSampleEntry)
  505. .box('enca', Mp4Parser.audioSampleEntry)
  506. .box('sinf', Mp4Parser.children)
  507. .box('schi', Mp4Parser.children)
  508. .fullBox('tenc', (box) => {
  509. const parsedTENCBox = shaka.util.Mp4BoxParsers.parseTENC(box.reader);
  510. defaultKID = parsedTENCBox.defaultKID;
  511. })
  512. .parse(data, /* partialOkay= */ true);
  513. return defaultKID;
  514. }
  515. /**
  516. * @param {!BufferSource} rawResult
  517. * @param {shaka.extern.aesKey} aesKey
  518. * @param {number} position
  519. * @return {!Promise.<!BufferSource>}
  520. */
  521. static async aesDecrypt(rawResult, aesKey, position) {
  522. const key = aesKey;
  523. if (!key.cryptoKey) {
  524. goog.asserts.assert(key.fetchKey, 'If AES cryptoKey was not ' +
  525. 'preloaded, fetchKey function should be provided');
  526. await key.fetchKey();
  527. goog.asserts.assert(key.cryptoKey, 'AES cryptoKey should now be set');
  528. }
  529. let iv = key.iv;
  530. if (!iv) {
  531. iv = shaka.util.BufferUtils.toUint8(new ArrayBuffer(16));
  532. let sequence = key.firstMediaSequenceNumber + position;
  533. for (let i = iv.byteLength - 1; i >= 0; i--) {
  534. iv[i] = sequence & 0xff;
  535. sequence >>= 8;
  536. }
  537. }
  538. let algorithm;
  539. if (aesKey.blockCipherMode == 'CBC') {
  540. algorithm = {
  541. name: 'AES-CBC',
  542. iv,
  543. };
  544. } else {
  545. algorithm = {
  546. name: 'AES-CTR',
  547. counter: iv,
  548. // NIST SP800-38A standard suggests that the counter should occupy half
  549. // of the counter block
  550. length: 64,
  551. };
  552. }
  553. return window.crypto.subtle.decrypt(algorithm, key.cryptoKey, rawResult);
  554. }
  555. };
  556. /**
  557. * @typedef {{
  558. * type: string,
  559. * mimeType: string,
  560. * codecs: string,
  561. * language: ?string,
  562. * height: ?string,
  563. * width: ?string,
  564. * channelCount: ?number,
  565. * sampleRate: ?number,
  566. * closedCaptions: Map.<string, string>,
  567. * videoRange: ?string,
  568. * colorGamut: ?string
  569. * }}
  570. *
  571. * @property {string} type
  572. * @property {string} mimeType
  573. * @property {string} codecs
  574. * @property {?string} language
  575. * @property {?string} height
  576. * @property {?string} width
  577. * @property {?number} channelCount
  578. * @property {?number} sampleRate
  579. * @property {Map.<string, string>} closedCaptions
  580. * @property {?string} videoRange
  581. * @property {?string} colorGamut
  582. */
  583. shaka.media.SegmentUtils.BasicInfo;