transmuxer.js 13.1 KB
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425
/**
 * mux.js
 *
 * Copyright (c) Brightcove
 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
 */
'use strict';

var Stream = require('../utils/stream.js');

var FlvTag = require('./flv-tag.js');

var m2ts = require('../m2ts/m2ts.js');

var AdtsStream = require('../codecs/adts.js');

var H264Stream = require('../codecs/h264').H264Stream;

var CoalesceStream = require('./coalesce-stream.js');

var TagList = require('./tag-list.js');

var _Transmuxer, _VideoSegmentStream, _AudioSegmentStream, collectTimelineInfo, metaDataTag, extraDataTag;
/**
 * Store information about the start and end of the tracka and the
 * duration for each frame/sample we process in order to calculate
 * the baseMediaDecodeTime
 */


collectTimelineInfo = function collectTimelineInfo(track, data) {
  if (typeof data.pts === 'number') {
    if (track.timelineStartInfo.pts === undefined) {
      track.timelineStartInfo.pts = data.pts;
    } else {
      track.timelineStartInfo.pts = Math.min(track.timelineStartInfo.pts, data.pts);
    }
  }

  if (typeof data.dts === 'number') {
    if (track.timelineStartInfo.dts === undefined) {
      track.timelineStartInfo.dts = data.dts;
    } else {
      track.timelineStartInfo.dts = Math.min(track.timelineStartInfo.dts, data.dts);
    }
  }
};

metaDataTag = function metaDataTag(track, pts) {
  var tag = new FlvTag(FlvTag.METADATA_TAG); // :FlvTag

  tag.dts = pts;
  tag.pts = pts;
  tag.writeMetaDataDouble('videocodecid', 7);
  tag.writeMetaDataDouble('width', track.width);
  tag.writeMetaDataDouble('height', track.height);
  return tag;
};

extraDataTag = function extraDataTag(track, pts) {
  var i,
      tag = new FlvTag(FlvTag.VIDEO_TAG, true);
  tag.dts = pts;
  tag.pts = pts;
  tag.writeByte(0x01); // version

  tag.writeByte(track.profileIdc); // profile

  tag.writeByte(track.profileCompatibility); // compatibility

  tag.writeByte(track.levelIdc); // level

  tag.writeByte(0xFC | 0x03); // reserved (6 bits), NULA length size - 1 (2 bits)

  tag.writeByte(0xE0 | 0x01); // reserved (3 bits), num of SPS (5 bits)

  tag.writeShort(track.sps[0].length); // data of SPS

  tag.writeBytes(track.sps[0]); // SPS

  tag.writeByte(track.pps.length); // num of PPS (will there ever be more that 1 PPS?)

  for (i = 0; i < track.pps.length; ++i) {
    tag.writeShort(track.pps[i].length); // 2 bytes for length of PPS

    tag.writeBytes(track.pps[i]); // data of PPS
  }

  return tag;
};
/**
 * Constructs a single-track, media segment from AAC data
 * events. The output of this stream can be fed to flash.
 */


_AudioSegmentStream = function AudioSegmentStream(track) {
  var adtsFrames = [],
      videoKeyFrames = [],
      oldExtraData;

  _AudioSegmentStream.prototype.init.call(this);

  this.push = function (data) {
    collectTimelineInfo(track, data);

    if (track) {
      track.audioobjecttype = data.audioobjecttype;
      track.channelcount = data.channelcount;
      track.samplerate = data.samplerate;
      track.samplingfrequencyindex = data.samplingfrequencyindex;
      track.samplesize = data.samplesize;
      track.extraData = track.audioobjecttype << 11 | track.samplingfrequencyindex << 7 | track.channelcount << 3;
    }

    data.pts = Math.round(data.pts / 90);
    data.dts = Math.round(data.dts / 90); // buffer audio data until end() is called

    adtsFrames.push(data);
  };

  this.flush = function () {
    var currentFrame,
        adtsFrame,
        lastMetaPts,
        tags = new TagList(); // return early if no audio data has been observed

    if (adtsFrames.length === 0) {
      this.trigger('done', 'AudioSegmentStream');
      return;
    }

    lastMetaPts = -Infinity;

    while (adtsFrames.length) {
      currentFrame = adtsFrames.shift(); // write out a metadata frame at every video key frame

      if (videoKeyFrames.length && currentFrame.pts >= videoKeyFrames[0]) {
        lastMetaPts = videoKeyFrames.shift();
        this.writeMetaDataTags(tags, lastMetaPts);
      } // also write out metadata tags every 1 second so that the decoder
      // is re-initialized quickly after seeking into a different
      // audio configuration.


      if (track.extraData !== oldExtraData || currentFrame.pts - lastMetaPts >= 1000) {
        this.writeMetaDataTags(tags, currentFrame.pts);
        oldExtraData = track.extraData;
        lastMetaPts = currentFrame.pts;
      }

      adtsFrame = new FlvTag(FlvTag.AUDIO_TAG);
      adtsFrame.pts = currentFrame.pts;
      adtsFrame.dts = currentFrame.dts;
      adtsFrame.writeBytes(currentFrame.data);
      tags.push(adtsFrame.finalize());
    }

    videoKeyFrames.length = 0;
    oldExtraData = null;
    this.trigger('data', {
      track: track,
      tags: tags.list
    });
    this.trigger('done', 'AudioSegmentStream');
  };

  this.writeMetaDataTags = function (tags, pts) {
    var adtsFrame;
    adtsFrame = new FlvTag(FlvTag.METADATA_TAG); // For audio, DTS is always the same as PTS. We want to set the DTS
    // however so we can compare with video DTS to determine approximate
    // packet order

    adtsFrame.pts = pts;
    adtsFrame.dts = pts; // AAC is always 10

    adtsFrame.writeMetaDataDouble('audiocodecid', 10);
    adtsFrame.writeMetaDataBoolean('stereo', track.channelcount === 2);
    adtsFrame.writeMetaDataDouble('audiosamplerate', track.samplerate); // Is AAC always 16 bit?

    adtsFrame.writeMetaDataDouble('audiosamplesize', 16);
    tags.push(adtsFrame.finalize());
    adtsFrame = new FlvTag(FlvTag.AUDIO_TAG, true); // For audio, DTS is always the same as PTS. We want to set the DTS
    // however so we can compare with video DTS to determine approximate
    // packet order

    adtsFrame.pts = pts;
    adtsFrame.dts = pts;
    adtsFrame.view.setUint16(adtsFrame.position, track.extraData);
    adtsFrame.position += 2;
    adtsFrame.length = Math.max(adtsFrame.length, adtsFrame.position);
    tags.push(adtsFrame.finalize());
  };

  this.onVideoKeyFrame = function (pts) {
    videoKeyFrames.push(pts);
  };
};

_AudioSegmentStream.prototype = new Stream();
/**
 * Store FlvTags for the h264 stream
 * @param track {object} track metadata configuration
 */

_VideoSegmentStream = function VideoSegmentStream(track) {
  var nalUnits = [],
      config,
      h264Frame;

  _VideoSegmentStream.prototype.init.call(this);

  this.finishFrame = function (tags, frame) {
    if (!frame) {
      return;
    } // Check if keyframe and the length of tags.
    // This makes sure we write metadata on the first frame of a segment.


    if (config && track && track.newMetadata && (frame.keyFrame || tags.length === 0)) {
      // Push extra data on every IDR frame in case we did a stream change + seek
      var metaTag = metaDataTag(config, frame.dts).finalize();
      var extraTag = extraDataTag(track, frame.dts).finalize();
      metaTag.metaDataTag = extraTag.metaDataTag = true;
      tags.push(metaTag);
      tags.push(extraTag);
      track.newMetadata = false;
      this.trigger('keyframe', frame.dts);
    }

    frame.endNalUnit();
    tags.push(frame.finalize());
    h264Frame = null;
  };

  this.push = function (data) {
    collectTimelineInfo(track, data);
    data.pts = Math.round(data.pts / 90);
    data.dts = Math.round(data.dts / 90); // buffer video until flush() is called

    nalUnits.push(data);
  };

  this.flush = function () {
    var currentNal,
        tags = new TagList(); // Throw away nalUnits at the start of the byte stream until we find
    // the first AUD

    while (nalUnits.length) {
      if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
        break;
      }

      nalUnits.shift();
    } // return early if no video data has been observed


    if (nalUnits.length === 0) {
      this.trigger('done', 'VideoSegmentStream');
      return;
    }

    while (nalUnits.length) {
      currentNal = nalUnits.shift(); // record the track config

      if (currentNal.nalUnitType === 'seq_parameter_set_rbsp') {
        track.newMetadata = true;
        config = currentNal.config;
        track.width = config.width;
        track.height = config.height;
        track.sps = [currentNal.data];
        track.profileIdc = config.profileIdc;
        track.levelIdc = config.levelIdc;
        track.profileCompatibility = config.profileCompatibility;
        h264Frame.endNalUnit();
      } else if (currentNal.nalUnitType === 'pic_parameter_set_rbsp') {
        track.newMetadata = true;
        track.pps = [currentNal.data];
        h264Frame.endNalUnit();
      } else if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
        if (h264Frame) {
          this.finishFrame(tags, h264Frame);
        }

        h264Frame = new FlvTag(FlvTag.VIDEO_TAG);
        h264Frame.pts = currentNal.pts;
        h264Frame.dts = currentNal.dts;
      } else {
        if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
          // the current sample is a key frame
          h264Frame.keyFrame = true;
        }

        h264Frame.endNalUnit();
      }

      h264Frame.startNalUnit();
      h264Frame.writeBytes(currentNal.data);
    }

    if (h264Frame) {
      this.finishFrame(tags, h264Frame);
    }

    this.trigger('data', {
      track: track,
      tags: tags.list
    }); // Continue with the flush process now

    this.trigger('done', 'VideoSegmentStream');
  };
};

_VideoSegmentStream.prototype = new Stream();
/**
 * An object that incrementally transmuxes MPEG2 Trasport Stream
 * chunks into an FLV.
 */

_Transmuxer = function Transmuxer(options) {
  var self = this,
      packetStream,
      parseStream,
      elementaryStream,
      videoTimestampRolloverStream,
      audioTimestampRolloverStream,
      timedMetadataTimestampRolloverStream,
      adtsStream,
      h264Stream,
      videoSegmentStream,
      audioSegmentStream,
      captionStream,
      coalesceStream;

  _Transmuxer.prototype.init.call(this);

  options = options || {}; // expose the metadata stream

  this.metadataStream = new m2ts.MetadataStream();
  options.metadataStream = this.metadataStream; // set up the parsing pipeline

  packetStream = new m2ts.TransportPacketStream();
  parseStream = new m2ts.TransportParseStream();
  elementaryStream = new m2ts.ElementaryStream();
  videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
  audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
  timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
  adtsStream = new AdtsStream();
  h264Stream = new H264Stream();
  coalesceStream = new CoalesceStream(options); // disassemble MPEG2-TS packets into elementary streams

  packetStream.pipe(parseStream).pipe(elementaryStream); // !!THIS ORDER IS IMPORTANT!!
  // demux the streams

  elementaryStream.pipe(videoTimestampRolloverStream).pipe(h264Stream);
  elementaryStream.pipe(audioTimestampRolloverStream).pipe(adtsStream);
  elementaryStream.pipe(timedMetadataTimestampRolloverStream).pipe(this.metadataStream).pipe(coalesceStream); // if CEA-708 parsing is available, hook up a caption stream

  captionStream = new m2ts.CaptionStream(options);
  h264Stream.pipe(captionStream).pipe(coalesceStream); // hook up the segment streams once track metadata is delivered

  elementaryStream.on('data', function (data) {
    var i, videoTrack, audioTrack;

    if (data.type === 'metadata') {
      i = data.tracks.length; // scan the tracks listed in the metadata

      while (i--) {
        if (data.tracks[i].type === 'video') {
          videoTrack = data.tracks[i];
        } else if (data.tracks[i].type === 'audio') {
          audioTrack = data.tracks[i];
        }
      } // hook up the video segment stream to the first track with h264 data


      if (videoTrack && !videoSegmentStream) {
        coalesceStream.numberOfTracks++;
        videoSegmentStream = new _VideoSegmentStream(videoTrack); // Set up the final part of the video pipeline

        h264Stream.pipe(videoSegmentStream).pipe(coalesceStream);
      }

      if (audioTrack && !audioSegmentStream) {
        // hook up the audio segment stream to the first track with aac data
        coalesceStream.numberOfTracks++;
        audioSegmentStream = new _AudioSegmentStream(audioTrack); // Set up the final part of the audio pipeline

        adtsStream.pipe(audioSegmentStream).pipe(coalesceStream);

        if (videoSegmentStream) {
          videoSegmentStream.on('keyframe', audioSegmentStream.onVideoKeyFrame);
        }
      }
    }
  }); // feed incoming data to the front of the parsing pipeline

  this.push = function (data) {
    packetStream.push(data);
  }; // flush any buffered data


  this.flush = function () {
    // Start at the top of the pipeline and flush all pending work
    packetStream.flush();
  }; // Caption data has to be reset when seeking outside buffered range


  this.resetCaptions = function () {
    captionStream.reset();
  }; // Re-emit any data coming from the coalesce stream to the outside world


  coalesceStream.on('data', function (event) {
    self.trigger('data', event);
  }); // Let the consumer know we have finished flushing the entire pipeline

  coalesceStream.on('done', function () {
    self.trigger('done');
  });
};

_Transmuxer.prototype = new Stream(); // forward compatibility

module.exports = _Transmuxer;