coalesce-stream.js 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145
  1. 'use strict';
  2. var Stream = require('../utils/stream.js');
  3. /**
  4. * The final stage of the transmuxer that emits the flv tags
  5. * for audio, video, and metadata. Also tranlates in time and
  6. * outputs caption data and id3 cues.
  7. */
  8. var CoalesceStream = function(options) {
  9. // Number of Tracks per output segment
  10. // If greater than 1, we combine multiple
  11. // tracks into a single segment
  12. this.numberOfTracks = 0;
  13. this.metadataStream = options.metadataStream;
  14. this.videoTags = [];
  15. this.audioTags = [];
  16. this.videoTrack = null;
  17. this.audioTrack = null;
  18. this.pendingCaptions = [];
  19. this.pendingMetadata = [];
  20. this.pendingTracks = 0;
  21. this.processedTracks = 0;
  22. CoalesceStream.prototype.init.call(this);
  23. // Take output from multiple
  24. this.push = function(output) {
  25. // buffer incoming captions until the associated video segment
  26. // finishes
  27. if (output.text) {
  28. return this.pendingCaptions.push(output);
  29. }
  30. // buffer incoming id3 tags until the final flush
  31. if (output.frames) {
  32. return this.pendingMetadata.push(output);
  33. }
  34. if (output.track.type === 'video') {
  35. this.videoTrack = output.track;
  36. this.videoTags = output.tags;
  37. this.pendingTracks++;
  38. }
  39. if (output.track.type === 'audio') {
  40. this.audioTrack = output.track;
  41. this.audioTags = output.tags;
  42. this.pendingTracks++;
  43. }
  44. };
  45. };
  46. CoalesceStream.prototype = new Stream();
  47. CoalesceStream.prototype.flush = function(flushSource) {
  48. var
  49. id3,
  50. caption,
  51. i,
  52. timelineStartPts,
  53. event = {
  54. tags: {},
  55. captions: [],
  56. captionStreams: {},
  57. metadata: []
  58. };
  59. if (this.pendingTracks < this.numberOfTracks) {
  60. if (flushSource !== 'VideoSegmentStream' &&
  61. flushSource !== 'AudioSegmentStream') {
  62. // Return because we haven't received a flush from a data-generating
  63. // portion of the segment (meaning that we have only recieved meta-data
  64. // or captions.)
  65. return;
  66. } else if (this.pendingTracks === 0) {
  67. // In the case where we receive a flush without any data having been
  68. // received we consider it an emitted track for the purposes of coalescing
  69. // `done` events.
  70. // We do this for the case where there is an audio and video track in the
  71. // segment but no audio data. (seen in several playlists with alternate
  72. // audio tracks and no audio present in the main TS segments.)
  73. this.processedTracks++;
  74. if (this.processedTracks < this.numberOfTracks) {
  75. return;
  76. }
  77. }
  78. }
  79. this.processedTracks += this.pendingTracks;
  80. this.pendingTracks = 0;
  81. if (this.processedTracks < this.numberOfTracks) {
  82. return;
  83. }
  84. if (this.videoTrack) {
  85. timelineStartPts = this.videoTrack.timelineStartInfo.pts;
  86. } else if (this.audioTrack) {
  87. timelineStartPts = this.audioTrack.timelineStartInfo.pts;
  88. }
  89. event.tags.videoTags = this.videoTags;
  90. event.tags.audioTags = this.audioTags;
  91. // Translate caption PTS times into second offsets into the
  92. // video timeline for the segment, and add track info
  93. for (i = 0; i < this.pendingCaptions.length; i++) {
  94. caption = this.pendingCaptions[i];
  95. caption.startTime = caption.startPts - timelineStartPts;
  96. caption.startTime /= 90e3;
  97. caption.endTime = caption.endPts - timelineStartPts;
  98. caption.endTime /= 90e3;
  99. event.captionStreams[caption.stream] = true;
  100. event.captions.push(caption);
  101. }
  102. // Translate ID3 frame PTS times into second offsets into the
  103. // video timeline for the segment
  104. for (i = 0; i < this.pendingMetadata.length; i++) {
  105. id3 = this.pendingMetadata[i];
  106. id3.cueTime = id3.pts - timelineStartPts;
  107. id3.cueTime /= 90e3;
  108. event.metadata.push(id3);
  109. }
  110. // We add this to every single emitted segment even though we only need
  111. // it for the first
  112. event.metadata.dispatchType = this.metadataStream.dispatchType;
  113. // Reset stream state
  114. this.videoTrack = null;
  115. this.audioTrack = null;
  116. this.videoTags = [];
  117. this.audioTags = [];
  118. this.pendingCaptions.length = 0;
  119. this.pendingMetadata.length = 0;
  120. this.pendingTracks = 0;
  121. this.processedTracks = 0;
  122. // Emit the final segment
  123. this.trigger('data', event);
  124. this.trigger('done');
  125. };
  126. module.exports = CoalesceStream;