ts-inspector.js 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512
  1. /**
  2. * mux.js
  3. *
  4. * Copyright (c) 2016 Brightcove
  5. * All rights reserved.
  6. *
  7. * Parse mpeg2 transport stream packets to extract basic timing information
  8. */
  9. 'use strict';
  10. var StreamTypes = require('../m2ts/stream-types.js');
  11. var handleRollover = require('../m2ts/timestamp-rollover-stream.js').handleRollover;
  12. var probe = {};
  13. probe.ts = require('../m2ts/probe.js');
  14. probe.aac = require('../aac/probe.js');
  15. var
  16. PES_TIMESCALE = 90000,
  17. MP2T_PACKET_LENGTH = 188, // bytes
  18. SYNC_BYTE = 0x47;
  19. var isLikelyAacData = function(data) {
  20. if ((data[0] === 'I'.charCodeAt(0)) &&
  21. (data[1] === 'D'.charCodeAt(0)) &&
  22. (data[2] === '3'.charCodeAt(0))) {
  23. return true;
  24. }
  25. return false;
  26. };
  27. /**
  28. * walks through segment data looking for pat and pmt packets to parse out
  29. * program map table information
  30. */
  31. var parsePsi_ = function(bytes, pmt) {
  32. var
  33. startIndex = 0,
  34. endIndex = MP2T_PACKET_LENGTH,
  35. packet, type;
  36. while (endIndex < bytes.byteLength) {
  37. // Look for a pair of start and end sync bytes in the data..
  38. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  39. // We found a packet
  40. packet = bytes.subarray(startIndex, endIndex);
  41. type = probe.ts.parseType(packet, pmt.pid);
  42. switch (type) {
  43. case 'pat':
  44. if (!pmt.pid) {
  45. pmt.pid = probe.ts.parsePat(packet);
  46. }
  47. break;
  48. case 'pmt':
  49. if (!pmt.table) {
  50. pmt.table = probe.ts.parsePmt(packet);
  51. }
  52. break;
  53. default:
  54. break;
  55. }
  56. // Found the pat and pmt, we can stop walking the segment
  57. if (pmt.pid && pmt.table) {
  58. return;
  59. }
  60. startIndex += MP2T_PACKET_LENGTH;
  61. endIndex += MP2T_PACKET_LENGTH;
  62. continue;
  63. }
  64. // If we get here, we have somehow become de-synchronized and we need to step
  65. // forward one byte at a time until we find a pair of sync bytes that denote
  66. // a packet
  67. startIndex++;
  68. endIndex++;
  69. }
  70. };
  71. /**
  72. * walks through the segment data from the start and end to get timing information
  73. * for the first and last audio pes packets
  74. */
  75. var parseAudioPes_ = function(bytes, pmt, result) {
  76. var
  77. startIndex = 0,
  78. endIndex = MP2T_PACKET_LENGTH,
  79. packet, type, pesType, pusi, parsed;
  80. var endLoop = false;
  81. // Start walking from start of segment to get first audio packet
  82. while (endIndex < bytes.byteLength) {
  83. // Look for a pair of start and end sync bytes in the data..
  84. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  85. // We found a packet
  86. packet = bytes.subarray(startIndex, endIndex);
  87. type = probe.ts.parseType(packet, pmt.pid);
  88. switch (type) {
  89. case 'pes':
  90. pesType = probe.ts.parsePesType(packet, pmt.table);
  91. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  92. if (pesType === 'audio' && pusi) {
  93. parsed = probe.ts.parsePesTime(packet);
  94. if (parsed) {
  95. parsed.type = 'audio';
  96. result.audio.push(parsed);
  97. endLoop = true;
  98. }
  99. }
  100. break;
  101. default:
  102. break;
  103. }
  104. if (endLoop) {
  105. break;
  106. }
  107. startIndex += MP2T_PACKET_LENGTH;
  108. endIndex += MP2T_PACKET_LENGTH;
  109. continue;
  110. }
  111. // If we get here, we have somehow become de-synchronized and we need to step
  112. // forward one byte at a time until we find a pair of sync bytes that denote
  113. // a packet
  114. startIndex++;
  115. endIndex++;
  116. }
  117. // Start walking from end of segment to get last audio packet
  118. endIndex = bytes.byteLength;
  119. startIndex = endIndex - MP2T_PACKET_LENGTH;
  120. endLoop = false;
  121. while (startIndex >= 0) {
  122. // Look for a pair of start and end sync bytes in the data..
  123. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  124. // We found a packet
  125. packet = bytes.subarray(startIndex, endIndex);
  126. type = probe.ts.parseType(packet, pmt.pid);
  127. switch (type) {
  128. case 'pes':
  129. pesType = probe.ts.parsePesType(packet, pmt.table);
  130. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  131. if (pesType === 'audio' && pusi) {
  132. parsed = probe.ts.parsePesTime(packet);
  133. if (parsed) {
  134. parsed.type = 'audio';
  135. result.audio.push(parsed);
  136. endLoop = true;
  137. }
  138. }
  139. break;
  140. default:
  141. break;
  142. }
  143. if (endLoop) {
  144. break;
  145. }
  146. startIndex -= MP2T_PACKET_LENGTH;
  147. endIndex -= MP2T_PACKET_LENGTH;
  148. continue;
  149. }
  150. // If we get here, we have somehow become de-synchronized and we need to step
  151. // forward one byte at a time until we find a pair of sync bytes that denote
  152. // a packet
  153. startIndex--;
  154. endIndex--;
  155. }
  156. };
  157. /**
  158. * walks through the segment data from the start and end to get timing information
  159. * for the first and last video pes packets as well as timing information for the first
  160. * key frame.
  161. */
  162. var parseVideoPes_ = function(bytes, pmt, result) {
  163. var
  164. startIndex = 0,
  165. endIndex = MP2T_PACKET_LENGTH,
  166. packet, type, pesType, pusi, parsed, frame, i, pes;
  167. var endLoop = false;
  168. var currentFrame = {
  169. data: [],
  170. size: 0
  171. };
  172. // Start walking from start of segment to get first video packet
  173. while (endIndex < bytes.byteLength) {
  174. // Look for a pair of start and end sync bytes in the data..
  175. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  176. // We found a packet
  177. packet = bytes.subarray(startIndex, endIndex);
  178. type = probe.ts.parseType(packet, pmt.pid);
  179. switch (type) {
  180. case 'pes':
  181. pesType = probe.ts.parsePesType(packet, pmt.table);
  182. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  183. if (pesType === 'video') {
  184. if (pusi && !endLoop) {
  185. parsed = probe.ts.parsePesTime(packet);
  186. if (parsed) {
  187. parsed.type = 'video';
  188. result.video.push(parsed);
  189. endLoop = true;
  190. }
  191. }
  192. if (!result.firstKeyFrame) {
  193. if (pusi) {
  194. if (currentFrame.size !== 0) {
  195. frame = new Uint8Array(currentFrame.size);
  196. i = 0;
  197. while (currentFrame.data.length) {
  198. pes = currentFrame.data.shift();
  199. frame.set(pes, i);
  200. i += pes.byteLength;
  201. }
  202. if (probe.ts.videoPacketContainsKeyFrame(frame)) {
  203. result.firstKeyFrame = probe.ts.parsePesTime(frame);
  204. result.firstKeyFrame.type = 'video';
  205. }
  206. currentFrame.size = 0;
  207. }
  208. }
  209. currentFrame.data.push(packet);
  210. currentFrame.size += packet.byteLength;
  211. }
  212. }
  213. break;
  214. default:
  215. break;
  216. }
  217. if (endLoop && result.firstKeyFrame) {
  218. break;
  219. }
  220. startIndex += MP2T_PACKET_LENGTH;
  221. endIndex += MP2T_PACKET_LENGTH;
  222. continue;
  223. }
  224. // If we get here, we have somehow become de-synchronized and we need to step
  225. // forward one byte at a time until we find a pair of sync bytes that denote
  226. // a packet
  227. startIndex++;
  228. endIndex++;
  229. }
  230. // Start walking from end of segment to get last video packet
  231. endIndex = bytes.byteLength;
  232. startIndex = endIndex - MP2T_PACKET_LENGTH;
  233. endLoop = false;
  234. while (startIndex >= 0) {
  235. // Look for a pair of start and end sync bytes in the data..
  236. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  237. // We found a packet
  238. packet = bytes.subarray(startIndex, endIndex);
  239. type = probe.ts.parseType(packet, pmt.pid);
  240. switch (type) {
  241. case 'pes':
  242. pesType = probe.ts.parsePesType(packet, pmt.table);
  243. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  244. if (pesType === 'video' && pusi) {
  245. parsed = probe.ts.parsePesTime(packet);
  246. if (parsed) {
  247. parsed.type = 'video';
  248. result.video.push(parsed);
  249. endLoop = true;
  250. }
  251. }
  252. break;
  253. default:
  254. break;
  255. }
  256. if (endLoop) {
  257. break;
  258. }
  259. startIndex -= MP2T_PACKET_LENGTH;
  260. endIndex -= MP2T_PACKET_LENGTH;
  261. continue;
  262. }
  263. // If we get here, we have somehow become de-synchronized and we need to step
  264. // forward one byte at a time until we find a pair of sync bytes that denote
  265. // a packet
  266. startIndex--;
  267. endIndex--;
  268. }
  269. };
  270. /**
  271. * Adjusts the timestamp information for the segment to account for
  272. * rollover and convert to seconds based on pes packet timescale (90khz clock)
  273. */
  274. var adjustTimestamp_ = function(segmentInfo, baseTimestamp) {
  275. if (segmentInfo.audio && segmentInfo.audio.length) {
  276. var audioBaseTimestamp = baseTimestamp;
  277. if (typeof audioBaseTimestamp === 'undefined') {
  278. audioBaseTimestamp = segmentInfo.audio[0].dts;
  279. }
  280. segmentInfo.audio.forEach(function(info) {
  281. info.dts = handleRollover(info.dts, audioBaseTimestamp);
  282. info.pts = handleRollover(info.pts, audioBaseTimestamp);
  283. // time in seconds
  284. info.dtsTime = info.dts / PES_TIMESCALE;
  285. info.ptsTime = info.pts / PES_TIMESCALE;
  286. });
  287. }
  288. if (segmentInfo.video && segmentInfo.video.length) {
  289. var videoBaseTimestamp = baseTimestamp;
  290. if (typeof videoBaseTimestamp === 'undefined') {
  291. videoBaseTimestamp = segmentInfo.video[0].dts;
  292. }
  293. segmentInfo.video.forEach(function(info) {
  294. info.dts = handleRollover(info.dts, videoBaseTimestamp);
  295. info.pts = handleRollover(info.pts, videoBaseTimestamp);
  296. // time in seconds
  297. info.dtsTime = info.dts / PES_TIMESCALE;
  298. info.ptsTime = info.pts / PES_TIMESCALE;
  299. });
  300. if (segmentInfo.firstKeyFrame) {
  301. var frame = segmentInfo.firstKeyFrame;
  302. frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
  303. frame.pts = handleRollover(frame.pts, videoBaseTimestamp);
  304. // time in seconds
  305. frame.dtsTime = frame.dts / PES_TIMESCALE;
  306. frame.ptsTime = frame.dts / PES_TIMESCALE;
  307. }
  308. }
  309. };
  310. /**
  311. * inspects the aac data stream for start and end time information
  312. */
  313. var inspectAac_ = function(bytes) {
  314. var
  315. endLoop = false,
  316. audioCount = 0,
  317. sampleRate = null,
  318. timestamp = null,
  319. frameSize = 0,
  320. byteIndex = 0,
  321. packet;
  322. while (bytes.length - byteIndex >= 3) {
  323. var type = probe.aac.parseType(bytes, byteIndex);
  324. switch (type) {
  325. case 'timed-metadata':
  326. // Exit early because we don't have enough to parse
  327. // the ID3 tag header
  328. if (bytes.length - byteIndex < 10) {
  329. endLoop = true;
  330. break;
  331. }
  332. frameSize = probe.aac.parseId3TagSize(bytes, byteIndex);
  333. // Exit early if we don't have enough in the buffer
  334. // to emit a full packet
  335. if (frameSize > bytes.length) {
  336. endLoop = true;
  337. break;
  338. }
  339. if (timestamp === null) {
  340. packet = bytes.subarray(byteIndex, byteIndex + frameSize);
  341. timestamp = probe.aac.parseAacTimestamp(packet);
  342. }
  343. byteIndex += frameSize;
  344. break;
  345. case 'audio':
  346. // Exit early because we don't have enough to parse
  347. // the ADTS frame header
  348. if (bytes.length - byteIndex < 7) {
  349. endLoop = true;
  350. break;
  351. }
  352. frameSize = probe.aac.parseAdtsSize(bytes, byteIndex);
  353. // Exit early if we don't have enough in the buffer
  354. // to emit a full packet
  355. if (frameSize > bytes.length) {
  356. endLoop = true;
  357. break;
  358. }
  359. if (sampleRate === null) {
  360. packet = bytes.subarray(byteIndex, byteIndex + frameSize);
  361. sampleRate = probe.aac.parseSampleRate(packet);
  362. }
  363. audioCount++;
  364. byteIndex += frameSize;
  365. break;
  366. default:
  367. byteIndex++;
  368. break;
  369. }
  370. if (endLoop) {
  371. return null;
  372. }
  373. }
  374. if (sampleRate === null || timestamp === null) {
  375. return null;
  376. }
  377. var audioTimescale = PES_TIMESCALE / sampleRate;
  378. var result = {
  379. audio: [
  380. {
  381. type: 'audio',
  382. dts: timestamp,
  383. pts: timestamp
  384. },
  385. {
  386. type: 'audio',
  387. dts: timestamp + (audioCount * 1024 * audioTimescale),
  388. pts: timestamp + (audioCount * 1024 * audioTimescale)
  389. }
  390. ]
  391. };
  392. return result;
  393. };
  394. /**
  395. * inspects the transport stream segment data for start and end time information
  396. * of the audio and video tracks (when present) as well as the first key frame's
  397. * start time.
  398. */
  399. var inspectTs_ = function(bytes) {
  400. var pmt = {
  401. pid: null,
  402. table: null
  403. };
  404. var result = {};
  405. parsePsi_(bytes, pmt);
  406. for (var pid in pmt.table) {
  407. if (pmt.table.hasOwnProperty(pid)) {
  408. var type = pmt.table[pid];
  409. switch (type) {
  410. case StreamTypes.H264_STREAM_TYPE:
  411. result.video = [];
  412. parseVideoPes_(bytes, pmt, result);
  413. if (result.video.length === 0) {
  414. delete result.video;
  415. }
  416. break;
  417. case StreamTypes.ADTS_STREAM_TYPE:
  418. result.audio = [];
  419. parseAudioPes_(bytes, pmt, result);
  420. if (result.audio.length === 0) {
  421. delete result.audio;
  422. }
  423. break;
  424. default:
  425. break;
  426. }
  427. }
  428. }
  429. return result;
  430. };
  431. /**
  432. * Inspects segment byte data and returns an object with start and end timing information
  433. *
  434. * @param {Uint8Array} bytes The segment byte data
  435. * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
  436. * timestamps for rollover. This value must be in 90khz clock.
  437. * @return {Object} Object containing start and end frame timing info of segment.
  438. */
  439. var inspect = function(bytes, baseTimestamp) {
  440. var isAacData = isLikelyAacData(bytes);
  441. var result;
  442. if (isAacData) {
  443. result = inspectAac_(bytes);
  444. } else {
  445. result = inspectTs_(bytes);
  446. }
  447. if (!result || (!result.audio && !result.video)) {
  448. return null;
  449. }
  450. adjustTimestamp_(result, baseTimestamp);
  451. return result;
  452. };
  453. module.exports = {
  454. inspect: inspect
  455. };