From 81cdf537e287672326d662e9040aad4e16409409 Mon Sep 17 00:00:00 2001
From: Nicolas Pope <nwpope@utu.fi>
Date: Wed, 20 May 2020 16:37:03 +0300
Subject: [PATCH] FTLStream abstraction

---
 components/streams/src/sender.cpp |     2 -
 web-service/public/js/bundle.js   | 12546 +++++++++++++++++-----------
 web-service/public/js/index.js    |   104 +-
 3 files changed, 7543 insertions(+), 5109 deletions(-)

diff --git a/components/streams/src/sender.cpp b/components/streams/src/sender.cpp
index 6f4cb3289..8277a73d9 100644
--- a/components/streams/src/sender.cpp
+++ b/components/streams/src/sender.cpp
@@ -349,8 +349,6 @@ void Sender::_encodeChannel(ftl::rgbd::FrameSet &fs, Channel c, bool reset) {
 
 				FTL_Profile("Encoder",0.02);
 
-				LOG(INFO) << "Enocode bitrate: " << (int)pkt.bitrate;
-
 				if (enc->encode(sroi, pkt)) {
 					stream_->post(spkt, pkt);
 
diff --git a/web-service/public/js/bundle.js b/web-service/public/js/bundle.js
index b26b56a29..7557e2bdf 100644
--- a/web-service/public/js/bundle.js
+++ b/web-service/public/js/bundle.js
@@ -1,1948 +1,2960 @@
 (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){
-(function (Buffer){
-const Peer = require('../../server/src/peer')
-const VideoConverter = require('./lib/dist/video-converter');
-
-let current_data = {};
-let peer;
-let player;
-
-/**
- * Validates that the user is logged in by sending the token 
- */
-checkIfLoggedIn = async () => {
-    //     const token = window.localStorage.getItem('token')
-    //     console.log(token)
-    //     if(!token){
-    //         console.log("You need to login")
-    //         renderLogin()
-    //     }else{
-
-    //         //Check if the token is valid
-    //         const response = await fetch('http://localhost:8080/auth/validation', {
-    //             method: 'POST',
-    //             headers: {'Authorization': token}
-    //         })
-    //         console.log('RESPONSE', response)
-            
-    //         //Token is valid, show available streams
-    //         if(response.status === 200){
-    //             console.log("SUCCESS")
-                 renderThumbnails()
-
-    //         }
-    //     }
-}
-
-/**
- * Returns a list of available streams
- */
-getAvailableStreams = async () => {
-    try{
-        const streamsInJson = await fetch(`./streams`);
-        const streams = await streamsInJson.json();
-        console.log('AVAILABLE', streams)
-        return streams;
-    }catch(err){
-        console.log(err)
-    }
-}
-
-
-createVideoPlayer = () => {
-    const containerDiv = document.getElementById('container')
-    containerDiv.innerHTML = `<h1>Stream from source ${current_data.uri}</h1><br>
-        <button onclick="renderThumbnails(); closeStream()">Go back</button>
-        <button onclick="connectToStream()">Start Stream</button><br>
-        <button onclick="webSocketTest()">WebSocket Test</button><br>
-        <video id="ftlab-stream-video" width="640" height="360"></video>`;
-    containerDiv.innerHTML += '<br>'
-    containerDiv.innerHTML += ''
-    createPeer();
-    connectToStream();
-}
-
-/**
- * Creates thumbnail (image) for all available streams and adds them to div class='container'
- */
-renderThumbnails = async () => {
-    const thumbnails = await getAvailableStreams();
-    const containerDiv = document.getElementById('container')
-    containerDiv.innerHTML = '';
-    containerDiv.innerHTML = `<button onClick="configs()">change configs</button>`
-    containerDiv.innerHTML += `<div class="ftlab-stream-thumbnails"></div>`
-    if(thumbnails.length === 0){
-        containerDiv.innerHTML = `<h3>No streams running currently</h3>`
-    }else{
-        for(var i=0; i<thumbnails.length; i++){
-            const encodedURI = encodeURIComponent(thumbnails[i])
-            current_data.uri = thumbnails[i]
-            try{
-                const someData = await fetch(`./stream/rgb?uri=${encodedURI}`)
-                if(!someData.ok){
-                    throw new Error('Image not found')
-                }
-                const myBlob = await someData.blob();
-                const objectURL = URL.createObjectURL(myBlob);
-                // containerDiv.innerHTML += createCard()
-                containerDiv.innerHTML += createCard(objectURL, i+4)
-            }catch(err){
-                console.log("Couldn't create thumbnail");
-                console.log(err) 
-            }
-        }
-    }
-}
-
-
-/** 
- * Method to create a single thumbnail
- */
-createCard = (url, viewers) => {
-    return `<div class='ftlab-card-component' >
-                <img src='${url}' class="thumbnail-img" alt="Hups" width="250px"></img>
-                <p>Viewers: ${viewers}</p>
-                <button onclick="createVideoPlayer()">button</button>
-            </div>`
-}
-
-
-createPeer = () => {
-    // FOR PRODUCTION
-    const ws = new WebSocket("ws://" + location.host + ":" + (location.port == "" ? "80" : location.port) + location.pathname);
-    //const ws = new WebSocket("ws://localhost:8080")
-    ws.binaryType = "arraybuffer";
-    peer = new Peer(ws)
-}
-
-webSocketTest = () => {
-    peer.send("update_cfg", "ftl://utu.fi#reconstruction_default/0/renderer/cool_effect", "true")    
-}
-
-
-connectToStream = () => {
-    const element = document.getElementById('ftlab-stream-video');
-    let converter = null;
-
-    let rxcount = 0;
-    let ts = 0;
-    let dts = 0;
-
-    peer.bind(current_data.uri, (latency, streampckg, pckg) => {
-        if(pckg[0] === 2){
-            rxcount++;
-            if (rxcount >= 25) {
-                rxcount = 0;
-                peer.send(current_data.uri, 0, [1,0,255,0],[255,7,35,0,0,Buffer.alloc(0)]);
-                //peer.send(current_data.uri, 0, [255,7,35,0,0,Buffer.alloc(0)], [1,0,255,0]);
-            }
-
-            if (converter) {
-                function decode(value){
-                    converter.appendRawData(value);
-                }
-                decode(pckg[5]);
-                converter.play();
-            } else {
-                if (ts > 0) {
-                    dts = streampckg[0] - ts;
-                    console.log("Framerate = ", 1000/dts);
-                    converter = new VideoConverter.default(element, 30, 1);
-                }
-                ts = streampckg[0];
-            }
-        };
-    })
-
-    // Start the transaction
-    //peer.send("get_stream", (current_data.uri, 30, 0, current_data.uri));
-
-    peer.rpc("find_stream", (res) => {
-        peer.send(current_data.uri, 0, [1,0,255,0],[255,7,35,0,0,Buffer.alloc(0)]);
-        //peer.send(current_data.uri, [255,7,35,0,0,Buffer.alloc(0)], [1,0,255,0]);
-    }, current_data.uri);
-}
-
-closeStream = () => {
-    peer.sock.close()
-}
-
-
-
-/**
- * **************
- * CONFIGURATIONS
- * **************
- */
-
-
-current_data.configURI = "ftl://utu.fi#reconstruction_snap8/net"
-
-configs = () => {
-    const container = document.getElementById("container");
-    container.innerHTML = `<div class="ftlab-configurations"></div>`;
-    renderConfigOptions();
-}
-
-
-renderConfigOptions = () => {
-    const input = `<p>input1</p><br>ftl://utu.fi#<input type="text">`
-    const doc = document.getElementsByClassName('ftlab-configurations')[0];
-    doc.innerHTML = input;
-}
-
-/**
- * 
- */
-loadConfigs = async (str) => {
-    const configURI = encodeURIComponent(`ftl://utu.fi#reconstruction_snap8${str}`);
-    const uri = encodeURIComponent(current_data.uri)
-    const rawResp = await fetch(`./stream/config?settings=${configURI}&uri=${uri}`)
-    const response = await rawResp.json();
-    const content = JSON.parse(response);
-    container.innerHTML += `<p>${response}</p>`;
-}
-
-// current_data.configData = '{"peers": 1}';
-
-/**
- * Method to send configurations to backend 
- */
-saveConfigs = async () => {
-    let {uri, configURI, configData} = current_data
-    const rawResp = await fetch('./stream/config', {
-        method: 'POST',
-        headers: {
-            'Accept': 'application/json',
-            'Content-Type': 'application/json'
-        },
-        body: JSON.stringify({peerURI: uri, configURI, data: configData, saveToCPP: true})
-    });
-    const content = await rawResp.json();
+'use strict'
+var DuplexStream = require('readable-stream').Duplex
+  , util         = require('util')
+  , Buffer       = require('safe-buffer').Buffer
+
+function BufferList (callback) {
+  if (!(this instanceof BufferList))
+    return new BufferList(callback)
+
+  this._bufs  = []
+  this.length = 0
+
+  if (typeof callback == 'function') {
+    this._callback = callback
+
+    var piper = function piper (err) {
+      if (this._callback) {
+        this._callback(err)
+        this._callback = null
+      }
+    }.bind(this)
+
+    this.on('pipe', function onPipe (src) {
+      src.on('error', piper)
+    })
+    this.on('unpipe', function onUnpipe (src) {
+      src.removeListener('error', piper)
+    })
+  } else {
+    this.append(callback)
+  }
+
+  DuplexStream.call(this)
 }
-}).call(this,require("buffer").Buffer)
-},{"../../server/src/peer":36,"./lib/dist/video-converter":9,"buffer":44}],2:[function(require,module,exports){
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var bit_stream_1 = require("./util/bit-stream");
-var debug = require("./util/debug");
-var NALU_1 = require("./util/NALU");
-var H264Parser = (function () {
-    function H264Parser(remuxer) {
-        this.remuxer = remuxer;
-        this.track = remuxer.mp4track;
-    }
-    H264Parser.prototype.parseSEI = function (sei) {
-        var messages = H264Parser.readSEI(sei);
-        for (var _i = 0, messages_1 = messages; _i < messages_1.length; _i++) {
-            var m = messages_1[_i];
-            switch (m.type) {
-                case 0:
-                    this.track.seiBuffering = true;
-                    break;
-                case 5:
-                    return true;
-                default:
-                    break;
-            }
-        }
-        return false;
-    };
-    H264Parser.prototype.parseSPS = function (sps) {
-        var config = H264Parser.readSPS(sps);
-        this.track.width = config.width;
-        this.track.height = config.height;
-        this.track.sps = [sps];
-        this.track.codec = 'avc1.';
-        var codecArray = new DataView(sps.buffer, sps.byteOffset + 1, 4);
-        for (var i = 0; i < 3; ++i) {
-            var h = codecArray.getUint8(i).toString(16);
-            if (h.length < 2) {
-                h = '0' + h;
-            }
-            this.track.codec += h;
-        }
-    };
-    H264Parser.prototype.parsePPS = function (pps) {
-        this.track.pps = [pps];
-    };
-    H264Parser.prototype.parseNAL = function (unit) {
-        if (!unit) {
-            return false;
-        }
-        var push = false;
-        switch (unit.type()) {
-            case NALU_1.default.NDR:
-            case NALU_1.default.IDR:
-                push = true;
-                break;
-            case NALU_1.default.SEI:
-                push = this.parseSEI(unit.getData().subarray(4));
-                break;
-            case NALU_1.default.SPS:
-                if (this.track.sps.length === 0) {
-                    this.parseSPS(unit.getData().subarray(4));
-                    debug.log(" Found SPS type NALU frame.");
-                    if (!this.remuxer.readyToDecode && this.track.pps.length > 0 && this.track.sps.length > 0) {
-                        this.remuxer.readyToDecode = true;
-                    }
-                }
-                break;
-            case NALU_1.default.PPS:
-                if (this.track.pps.length === 0) {
-                    this.parsePPS(unit.getData().subarray(4));
-                    debug.log(" Found PPS type NALU frame.");
-                    if (!this.remuxer.readyToDecode && this.track.pps.length > 0 && this.track.sps.length > 0) {
-                        this.remuxer.readyToDecode = true;
-                    }
-                }
-                break;
-            default:
-                debug.log(" Found Unknown type NALU frame. type=" + unit.type());
-                break;
-        }
-        return push;
-    };
-    H264Parser.skipScalingList = function (decoder, count) {
-        var lastScale = 8;
-        var nextScale = 8;
-        for (var j = 0; j < count; j++) {
-            if (nextScale !== 0) {
-                var deltaScale = decoder.readEG();
-                nextScale = (lastScale + deltaScale + 256) % 256;
-            }
-            lastScale = (nextScale === 0) ? lastScale : nextScale;
-        }
-    };
-    H264Parser.readSPS = function (data) {
-        var decoder = new bit_stream_1.default(data);
-        var frameCropLeftOffset = 0;
-        var frameCropRightOffset = 0;
-        var frameCropTopOffset = 0;
-        var frameCropBottomOffset = 0;
-        var sarScale = 1;
-        decoder.readUByte();
-        var profileIdc = decoder.readUByte();
-        decoder.skipBits(5);
-        decoder.skipBits(3);
-        decoder.skipBits(8);
-        decoder.skipUEG();
-        if (profileIdc === 100 ||
-            profileIdc === 110 ||
-            profileIdc === 122 ||
-            profileIdc === 244 ||
-            profileIdc === 44 ||
-            profileIdc === 83 ||
-            profileIdc === 86 ||
-            profileIdc === 118 ||
-            profileIdc === 128) {
-            var chromaFormatIdc = decoder.readUEG();
-            if (chromaFormatIdc === 3) {
-                decoder.skipBits(1);
-            }
-            decoder.skipUEG();
-            decoder.skipUEG();
-            decoder.skipBits(1);
-            if (decoder.readBoolean()) {
-                var scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
-                for (var i = 0; i < scalingListCount; ++i) {
-                    if (decoder.readBoolean()) {
-                        if (i < 6) {
-                            H264Parser.skipScalingList(decoder, 16);
-                        }
-                        else {
-                            H264Parser.skipScalingList(decoder, 64);
-                        }
-                    }
-                }
-            }
-        }
-        decoder.skipUEG();
-        var picOrderCntType = decoder.readUEG();
-        if (picOrderCntType === 0) {
-            decoder.readUEG();
-        }
-        else if (picOrderCntType === 1) {
-            decoder.skipBits(1);
-            decoder.skipEG();
-            decoder.skipEG();
-            var numRefFramesInPicOrderCntCycle = decoder.readUEG();
-            for (var i = 0; i < numRefFramesInPicOrderCntCycle; ++i) {
-                decoder.skipEG();
-            }
-        }
-        decoder.skipUEG();
-        decoder.skipBits(1);
-        var picWidthInMbsMinus1 = decoder.readUEG();
-        var picHeightInMapUnitsMinus1 = decoder.readUEG();
-        var frameMbsOnlyFlag = decoder.readBits(1);
-        if (frameMbsOnlyFlag === 0) {
-            decoder.skipBits(1);
-        }
-        decoder.skipBits(1);
-        if (decoder.readBoolean()) {
-            frameCropLeftOffset = decoder.readUEG();
-            frameCropRightOffset = decoder.readUEG();
-            frameCropTopOffset = decoder.readUEG();
-            frameCropBottomOffset = decoder.readUEG();
-        }
-        if (decoder.readBoolean()) {
-            if (decoder.readBoolean()) {
-                var sarRatio = void 0;
-                var aspectRatioIdc = decoder.readUByte();
-                switch (aspectRatioIdc) {
-                    case 1:
-                        sarRatio = [1, 1];
-                        break;
-                    case 2:
-                        sarRatio = [12, 11];
-                        break;
-                    case 3:
-                        sarRatio = [10, 11];
-                        break;
-                    case 4:
-                        sarRatio = [16, 11];
-                        break;
-                    case 5:
-                        sarRatio = [40, 33];
-                        break;
-                    case 6:
-                        sarRatio = [24, 11];
-                        break;
-                    case 7:
-                        sarRatio = [20, 11];
-                        break;
-                    case 8:
-                        sarRatio = [32, 11];
-                        break;
-                    case 9:
-                        sarRatio = [80, 33];
-                        break;
-                    case 10:
-                        sarRatio = [18, 11];
-                        break;
-                    case 11:
-                        sarRatio = [15, 11];
-                        break;
-                    case 12:
-                        sarRatio = [64, 33];
-                        break;
-                    case 13:
-                        sarRatio = [160, 99];
-                        break;
-                    case 14:
-                        sarRatio = [4, 3];
-                        break;
-                    case 15:
-                        sarRatio = [3, 2];
-                        break;
-                    case 16:
-                        sarRatio = [2, 1];
-                        break;
-                    case 255: {
-                        sarRatio = [decoder.readUByte() << 8 | decoder.readUByte(), decoder.readUByte() << 8 | decoder.readUByte()];
-                        break;
-                    }
-                    default: {
-                        debug.error("  H264: Unknown aspectRatioIdc=" + aspectRatioIdc);
-                    }
-                }
-                if (sarRatio) {
-                    sarScale = sarRatio[0] / sarRatio[1];
-                }
-            }
-            if (decoder.readBoolean()) {
-                decoder.skipBits(1);
-            }
-            if (decoder.readBoolean()) {
-                decoder.skipBits(4);
-                if (decoder.readBoolean()) {
-                    decoder.skipBits(24);
-                }
-            }
-            if (decoder.readBoolean()) {
-                decoder.skipUEG();
-                decoder.skipUEG();
-            }
-            if (decoder.readBoolean()) {
-                var unitsInTick = decoder.readUInt();
-                var timeScale = decoder.readUInt();
-                var fixedFrameRate = decoder.readBoolean();
-                var frameDuration = timeScale / (2 * unitsInTick);
-                debug.log("timescale: " + timeScale + "; unitsInTick: " + unitsInTick + "; " +
-                    ("fixedFramerate: " + fixedFrameRate + "; avgFrameDuration: " + frameDuration));
-            }
+
+
+util.inherits(BufferList, DuplexStream)
+
+
+BufferList.prototype._offset = function _offset (offset) {
+  var tot = 0, i = 0, _t
+  if (offset === 0) return [ 0, 0 ]
+  for (; i < this._bufs.length; i++) {
+    _t = tot + this._bufs[i].length
+    if (offset < _t || i == this._bufs.length - 1) {
+      return [ i, offset - tot ]
+    }
+    tot = _t
+  }
+}
+
+BufferList.prototype._reverseOffset = function (blOffset) {
+  var bufferId = blOffset[0]
+  var offset = blOffset[1]
+  for (var i = 0; i < bufferId; i++) {
+    offset += this._bufs[i].length
+  }
+  return offset
+}
+
+BufferList.prototype.append = function append (buf) {
+  var i = 0
+
+  if (Buffer.isBuffer(buf)) {
+    this._appendBuffer(buf)
+  } else if (Array.isArray(buf)) {
+    for (; i < buf.length; i++)
+      this.append(buf[i])
+  } else if (buf instanceof BufferList) {
+    // unwrap argument into individual BufferLists
+    for (; i < buf._bufs.length; i++)
+      this.append(buf._bufs[i])
+  } else if (buf != null) {
+    // coerce number arguments to strings, since Buffer(number) does
+    // uninitialized memory allocation
+    if (typeof buf == 'number')
+      buf = buf.toString()
+
+    this._appendBuffer(Buffer.from(buf))
+  }
+
+  return this
+}
+
+
+BufferList.prototype._appendBuffer = function appendBuffer (buf) {
+  this._bufs.push(buf)
+  this.length += buf.length
+}
+
+
+BufferList.prototype._write = function _write (buf, encoding, callback) {
+  this._appendBuffer(buf)
+
+  if (typeof callback == 'function')
+    callback()
+}
+
+
+BufferList.prototype._read = function _read (size) {
+  if (!this.length)
+    return this.push(null)
+
+  size = Math.min(size, this.length)
+  this.push(this.slice(0, size))
+  this.consume(size)
+}
+
+
+BufferList.prototype.end = function end (chunk) {
+  DuplexStream.prototype.end.call(this, chunk)
+
+  if (this._callback) {
+    this._callback(null, this.slice())
+    this._callback = null
+  }
+}
+
+
+BufferList.prototype.get = function get (index) {
+  if (index > this.length || index < 0) {
+    return undefined
+  }
+  var offset = this._offset(index)
+  return this._bufs[offset[0]][offset[1]]
+}
+
+
+BufferList.prototype.slice = function slice (start, end) {
+  if (typeof start == 'number' && start < 0)
+    start += this.length
+  if (typeof end == 'number' && end < 0)
+    end += this.length
+  return this.copy(null, 0, start, end)
+}
+
+
+BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
+  if (typeof srcStart != 'number' || srcStart < 0)
+    srcStart = 0
+  if (typeof srcEnd != 'number' || srcEnd > this.length)
+    srcEnd = this.length
+  if (srcStart >= this.length)
+    return dst || Buffer.alloc(0)
+  if (srcEnd <= 0)
+    return dst || Buffer.alloc(0)
+
+  var copy   = !!dst
+    , off    = this._offset(srcStart)
+    , len    = srcEnd - srcStart
+    , bytes  = len
+    , bufoff = (copy && dstStart) || 0
+    , start  = off[1]
+    , l
+    , i
+
+  // copy/slice everything
+  if (srcStart === 0 && srcEnd == this.length) {
+    if (!copy) { // slice, but full concat if multiple buffers
+      return this._bufs.length === 1
+        ? this._bufs[0]
+        : Buffer.concat(this._bufs, this.length)
+    }
+
+    // copy, need to copy individual buffers
+    for (i = 0; i < this._bufs.length; i++) {
+      this._bufs[i].copy(dst, bufoff)
+      bufoff += this._bufs[i].length
+    }
+
+    return dst
+  }
+
+  // easy, cheap case where it's a subset of one of the buffers
+  if (bytes <= this._bufs[off[0]].length - start) {
+    return copy
+      ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
+      : this._bufs[off[0]].slice(start, start + bytes)
+  }
+
+  if (!copy) // a slice, we need something to copy in to
+    dst = Buffer.allocUnsafe(len)
+
+  for (i = off[0]; i < this._bufs.length; i++) {
+    l = this._bufs[i].length - start
+
+    if (bytes > l) {
+      this._bufs[i].copy(dst, bufoff, start)
+    } else {
+      this._bufs[i].copy(dst, bufoff, start, start + bytes)
+      break
+    }
+
+    bufoff += l
+    bytes -= l
+
+    if (start)
+      start = 0
+  }
+
+  return dst
+}
+
+BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
+  start = start || 0
+  end = typeof end !== 'number' ? this.length : end
+
+  if (start < 0)
+    start += this.length
+  if (end < 0)
+    end += this.length
+
+  if (start === end) {
+    return new BufferList()
+  }
+  var startOffset = this._offset(start)
+    , endOffset = this._offset(end)
+    , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
+
+  if (endOffset[1] == 0)
+    buffers.pop()
+  else
+    buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
+
+  if (startOffset[1] != 0)
+    buffers[0] = buffers[0].slice(startOffset[1])
+
+  return new BufferList(buffers)
+}
+
+BufferList.prototype.toString = function toString (encoding, start, end) {
+  return this.slice(start, end).toString(encoding)
+}
+
+BufferList.prototype.consume = function consume (bytes) {
+  while (this._bufs.length) {
+    if (bytes >= this._bufs[0].length) {
+      bytes -= this._bufs[0].length
+      this.length -= this._bufs[0].length
+      this._bufs.shift()
+    } else {
+      this._bufs[0] = this._bufs[0].slice(bytes)
+      this.length -= bytes
+      break
+    }
+  }
+  return this
+}
+
+
+BufferList.prototype.duplicate = function duplicate () {
+  var i = 0
+    , copy = new BufferList()
+
+  for (; i < this._bufs.length; i++)
+    copy.append(this._bufs[i])
+
+  return copy
+}
+
+
+BufferList.prototype.destroy = function destroy () {
+  this._bufs.length = 0
+  this.length = 0
+  this.push(null)
+}
+
+
+BufferList.prototype.indexOf = function (search, offset, encoding) {
+  if (encoding === undefined && typeof offset === 'string') {
+    encoding = offset
+    offset = undefined
+  }
+  if (typeof search === 'function' || Array.isArray(search)) {
+    throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
+  } else if (typeof search === 'number') {
+      search = Buffer.from([search])
+  } else if (typeof search === 'string') {
+    search = Buffer.from(search, encoding)
+  } else if (search instanceof BufferList) {
+    search = search.slice()
+  } else if (!Buffer.isBuffer(search)) {
+    search = Buffer.from(search)
+  }
+
+  offset = Number(offset || 0)
+  if (isNaN(offset)) {
+    offset = 0
+  }
+
+  if (offset < 0) {
+    offset = this.length + offset
+  }
+
+  if (offset < 0) {
+    offset = 0
+  }
+
+  if (search.length === 0) {
+    return offset > this.length ? this.length : offset
+  }
+
+  var blOffset = this._offset(offset)
+  var blIndex = blOffset[0] // index of which internal buffer we're working on
+  var buffOffset = blOffset[1] // offset of the internal buffer we're working on
+
+  // scan over each buffer
+  for (blIndex; blIndex < this._bufs.length; blIndex++) {
+    var buff = this._bufs[blIndex]
+    while(buffOffset < buff.length) {
+      var availableWindow = buff.length - buffOffset
+      if (availableWindow >= search.length) {
+        var nativeSearchResult = buff.indexOf(search, buffOffset)
+        if (nativeSearchResult !== -1) {
+          return this._reverseOffset([blIndex, nativeSearchResult])
         }
-        return {
-            width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
-            height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) -
-                ((frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset)),
-        };
-    };
-    H264Parser.readSEI = function (data) {
-        var decoder = new bit_stream_1.default(data);
-        decoder.skipBits(8);
-        var result = [];
-        while (decoder.bitsAvailable > 3 * 8) {
-            result.push(this.readSEIMessage(decoder));
+        buffOffset = buff.length - search.length + 1 // end of native search window
+      } else {
+        var revOffset = this._reverseOffset([blIndex, buffOffset])
+        if (this._match(revOffset, search)) {
+          return revOffset
         }
-        return result;
-    };
-    H264Parser.readSEIMessage = function (decoder) {
-        function get() {
-            var result = 0;
-            while (true) {
-                var value = decoder.readUByte();
-                result += value;
-                if (value !== 0xff) {
-                    break;
-                }
-            }
-            return result;
+        buffOffset++
+      }
+    }
+    buffOffset = 0
+  }
+  return -1
+}
+
+BufferList.prototype._match = function(offset, search) {
+  if (this.length - offset < search.length) {
+    return false
+  }
+  for (var searchOffset = 0; searchOffset < search.length ; searchOffset++) {
+    if(this.get(offset + searchOffset) !== search[searchOffset]){
+      return false
+    }
+  }
+  return true
+}
+
+
+;(function () {
+  var methods = {
+      'readDoubleBE' : 8
+    , 'readDoubleLE' : 8
+    , 'readFloatBE'  : 4
+    , 'readFloatLE'  : 4
+    , 'readInt32BE'  : 4
+    , 'readInt32LE'  : 4
+    , 'readUInt32BE' : 4
+    , 'readUInt32LE' : 4
+    , 'readInt16BE'  : 2
+    , 'readInt16LE'  : 2
+    , 'readUInt16BE' : 2
+    , 'readUInt16LE' : 2
+    , 'readInt8'     : 1
+    , 'readUInt8'    : 1
+    , 'readIntBE'    : null
+    , 'readIntLE'    : null
+    , 'readUIntBE'   : null
+    , 'readUIntLE'   : null
+  }
+
+  for (var m in methods) {
+    (function (m) {
+      if (methods[m] === null) {
+        BufferList.prototype[m] = function (offset, byteLength) {
+          return this.slice(offset, offset + byteLength)[m](0, byteLength)
         }
-        var payloadType = get();
-        var payloadSize = get();
-        return this.readSEIPayload(decoder, payloadType, payloadSize);
-    };
-    H264Parser.readSEIPayload = function (decoder, type, size) {
-        var result;
-        switch (type) {
-            default:
-                result = { type: type };
-                decoder.skipBits(size * 8);
+      }
+      else {
+        BufferList.prototype[m] = function (offset) {
+          return this.slice(offset, offset + methods[m])[m](0)
         }
-        decoder.skipBits(decoder.bitsAvailable % 8);
-        return result;
-    };
-    return H264Parser;
-}());
-exports.default = H264Parser;
+      }
+    }(m))
+  }
+}())
+
+
+module.exports = BufferList
+
+},{"readable-stream":18,"safe-buffer":19,"util":74}],2:[function(require,module,exports){
+(function (Buffer){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// NOTE: These type checking functions intentionally don't use `instanceof`
+// because it is fragile and can be easily faked with `Object.create()`.
+
+function isArray(arg) {
+  if (Array.isArray) {
+    return Array.isArray(arg);
+  }
+  return objectToString(arg) === '[object Array]';
+}
+exports.isArray = isArray;
+
+function isBoolean(arg) {
+  return typeof arg === 'boolean';
+}
+exports.isBoolean = isBoolean;
+
+function isNull(arg) {
+  return arg === null;
+}
+exports.isNull = isNull;
+
+function isNullOrUndefined(arg) {
+  return arg == null;
+}
+exports.isNullOrUndefined = isNullOrUndefined;
+
+function isNumber(arg) {
+  return typeof arg === 'number';
+}
+exports.isNumber = isNumber;
+
+function isString(arg) {
+  return typeof arg === 'string';
+}
+exports.isString = isString;
+
+function isSymbol(arg) {
+  return typeof arg === 'symbol';
+}
+exports.isSymbol = isSymbol;
+
+function isUndefined(arg) {
+  return arg === void 0;
+}
+exports.isUndefined = isUndefined;
+
+function isRegExp(re) {
+  return objectToString(re) === '[object RegExp]';
+}
+exports.isRegExp = isRegExp;
+
+function isObject(arg) {
+  return typeof arg === 'object' && arg !== null;
+}
+exports.isObject = isObject;
+
+function isDate(d) {
+  return objectToString(d) === '[object Date]';
+}
+exports.isDate = isDate;
+
+function isError(e) {
+  return (objectToString(e) === '[object Error]' || e instanceof Error);
+}
+exports.isError = isError;
+
+function isFunction(arg) {
+  return typeof arg === 'function';
+}
+exports.isFunction = isFunction;
+
+function isPrimitive(arg) {
+  return arg === null ||
+         typeof arg === 'boolean' ||
+         typeof arg === 'number' ||
+         typeof arg === 'string' ||
+         typeof arg === 'symbol' ||  // ES6 symbol
+         typeof arg === 'undefined';
+}
+exports.isPrimitive = isPrimitive;
+
+exports.isBuffer = Buffer.isBuffer;
+
+function objectToString(o) {
+  return Object.prototype.toString.call(o);
+}
+
+}).call(this,{"isBuffer":require("../../../../../../../../usr/local/lib/node_modules/browserify/node_modules/is-buffer/index.js")})
+},{"../../../../../../../../usr/local/lib/node_modules/browserify/node_modules/is-buffer/index.js":68}],3:[function(require,module,exports){
+if (typeof Object.create === 'function') {
+  // implementation from standard node.js 'util' module
+  module.exports = function inherits(ctor, superCtor) {
+    ctor.super_ = superCtor
+    ctor.prototype = Object.create(superCtor.prototype, {
+      constructor: {
+        value: ctor,
+        enumerable: false,
+        writable: true,
+        configurable: true
+      }
+    });
+  };
+} else {
+  // old school shim for old browsers
+  module.exports = function inherits(ctor, superCtor) {
+    ctor.super_ = superCtor
+    var TempCtor = function () {}
+    TempCtor.prototype = superCtor.prototype
+    ctor.prototype = new TempCtor()
+    ctor.prototype.constructor = ctor
+  }
+}
 
-},{"./util/NALU":5,"./util/bit-stream":6,"./util/debug":7}],3:[function(require,module,exports){
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var h264_parser_1 = require("./h264-parser");
-var debug = require("./util/debug");
-var NALU_1 = require("./util/NALU");
-var trackId = 1;
-var H264Remuxer = (function () {
-    function H264Remuxer(fps, framePerFragment, timescale) {
-        this.fps = fps;
-        this.framePerFragment = framePerFragment;
-        this.timescale = timescale;
-        this.readyToDecode = false;
-        this.totalDTS = 0;
-        this.stepDTS = Math.round(this.timescale / this.fps);
-        this.frameCount = 0;
-        this.seq = 1;
-        this.mp4track = {
-            id: H264Remuxer.getTrackID(),
-            type: 'video',
-            len: 0,
-            codec: '',
-            sps: [],
-            pps: [],
-            seiBuffering: false,
-            width: 0,
-            height: 0,
-            timescale: timescale,
-            duration: timescale,
-            samples: [],
-            isKeyFrame: true,
-        };
-        this.unitSamples = [[]];
-        this.parser = new h264_parser_1.default(this);
+},{}],4:[function(require,module,exports){
+var toString = {}.toString;
+
+module.exports = Array.isArray || function (arr) {
+  return toString.call(arr) == '[object Array]';
+};
+
+},{}],5:[function(require,module,exports){
+'use strict'
+
+var Buffer = require('safe-buffer').Buffer
+var assert = require('assert')
+var bl = require('bl')
+var streams = require('./lib/streams')
+var buildDecode = require('./lib/decoder')
+var buildEncode = require('./lib/encoder')
+
+function msgpack (options) {
+  var encodingTypes = []
+  var decodingTypes = []
+
+  options = options || {
+    forceFloat64: false,
+    compatibilityMode: false,
+    disableTimestampEncoding: false // if true, skips encoding Dates using the msgpack timestamp ext format (-1)
+  }
+
+  function registerEncoder (check, encode) {
+    assert(check, 'must have an encode function')
+    assert(encode, 'must have an encode function')
+
+    encodingTypes.push({
+      check: check, encode: encode
+    })
+
+    return this
+  }
+
+  function registerDecoder (type, decode) {
+    assert(type >= 0, 'must have a non-negative type')
+    assert(decode, 'must have a decode function')
+
+    decodingTypes.push({
+      type: type, decode: decode
+    })
+
+    return this
+  }
+
+  function register (type, constructor, encode, decode) {
+    assert(constructor, 'must have a constructor')
+    assert(encode, 'must have an encode function')
+    assert(type >= 0, 'must have a non-negative type')
+    assert(decode, 'must have a decode function')
+
+    function check (obj) {
+      return (obj instanceof constructor)
+    }
+
+    function reEncode (obj) {
+      var buf = bl()
+      var header = Buffer.allocUnsafe(1)
+
+      header.writeInt8(type, 0)
+
+      buf.append(header)
+      buf.append(encode(obj))
+
+      return buf
+    }
+
+    this.registerEncoder(check, reEncode)
+    this.registerDecoder(type, decode)
+
+    return this
+  }
+
+  return {
+    encode: buildEncode(encodingTypes, options.forceFloat64, options.compatibilityMode, options.disableTimestampEncoding),
+    decode: buildDecode(decodingTypes),
+    register: register,
+    registerEncoder: registerEncoder,
+    registerDecoder: registerDecoder,
+    encoder: streams.encoder,
+    decoder: streams.decoder,
+    // needed for levelup support
+    buffer: true,
+    type: 'msgpack5',
+    IncompleteBufferError: buildDecode.IncompleteBufferError
+  }
+}
+
+module.exports = msgpack
+
+},{"./lib/decoder":6,"./lib/encoder":7,"./lib/streams":8,"assert":59,"bl":1,"safe-buffer":19}],6:[function(require,module,exports){
+'use strict'
+
+var bl = require('bl')
+var util = require('util')
+
+function IncompleteBufferError (message) {
+  Error.call(this) // super constructor
+  if (Error.captureStackTrace) {
+    Error.captureStackTrace(this, this.constructor) // super helper method to include stack trace in error object
+  }
+  this.name = this.constructor.name
+  this.message = message || 'unable to decode'
+}
+
+util.inherits(IncompleteBufferError, Error)
+
+module.exports = function buildDecode (decodingTypes) {
+  return decode
+
+  function getSize (first) {
+    switch (first) {
+      case 0xc4:
+        return 2
+      case 0xc5:
+        return 3
+      case 0xc6:
+        return 5
+      case 0xc7:
+        return 3
+      case 0xc8:
+        return 4
+      case 0xc9:
+        return 6
+      case 0xca:
+        return 5
+      case 0xcb:
+        return 9
+      case 0xcc:
+        return 2
+      case 0xcd:
+        return 3
+      case 0xce:
+        return 5
+      case 0xcf:
+        return 9
+      case 0xd0:
+        return 2
+      case 0xd1:
+        return 3
+      case 0xd2:
+        return 5
+      case 0xd3:
+        return 9
+      case 0xd4:
+        return 3
+      case 0xd5:
+        return 4
+      case 0xd6:
+        return 6
+      case 0xd7:
+        return 10
+      case 0xd8:
+        return 18
+      case 0xd9:
+        return 2
+      case 0xda:
+        return 3
+      case 0xdb:
+        return 5
+      case 0xde:
+        return 3
+      default:
+        return -1
+    }
+  }
+
+  function hasMinBufferSize (first, length) {
+    var size = getSize(first)
+
+    if (size !== -1 && length < size) {
+      return false
+    } else {
+      return true
+    }
+  }
+
+  function isValidDataSize (dataLength, bufLength, headerLength) {
+    return bufLength >= headerLength + dataLength
+  }
+
+  function buildDecodeResult (value, bytesConsumed) {
+    return {
+      value: value,
+      bytesConsumed: bytesConsumed
+    }
+  }
+
+  function decode (buf) {
+    if (!(buf instanceof bl)) {
+      buf = bl().append(buf)
+    }
+
+    var result = tryDecode(buf)
+    if (result) {
+      buf.consume(result.bytesConsumed)
+      return result.value
+    } else {
+      throw new IncompleteBufferError()
+    }
+  }
+
+  function tryDecode (buf, offset) {
+    offset = offset === undefined ? 0 : offset
+    var bufLength = buf.length - offset
+    if (bufLength <= 0) {
+      return null
     }
-    H264Remuxer.getTrackID = function () {
-        return trackId++;
-    };
-    Object.defineProperty(H264Remuxer.prototype, "seqNum", {
-        get: function () {
-            return this.seq;
-        },
-        enumerable: true,
-        configurable: true
-    });
-    H264Remuxer.prototype.remux = function (nalu) {
-        if (this.mp4track.seiBuffering && nalu.type() === NALU_1.default.SEI) {
-            return this.createNextFrame();
-        }
-        if (this.parser.parseNAL(nalu)) {
-            this.unitSamples[this.unitSamples.length - 1].push(nalu);
-            this.mp4track.len += nalu.getSize();
-        }
-        if (!this.mp4track.seiBuffering && (nalu.type() === NALU_1.default.IDR || nalu.type() === NALU_1.default.NDR)) {
-            return this.createNextFrame();
-        }
-        return;
-    };
-    H264Remuxer.prototype.createNextFrame = function () {
-        if (this.mp4track.len > 0) {
-            this.frameCount++;
-            if (this.frameCount % this.framePerFragment === 0) {
-                var fragment = this.getFragment();
-                if (fragment) {
-                    var dts = this.totalDTS;
-                    this.totalDTS = this.stepDTS * this.frameCount;
-                    return [dts, fragment];
-                }
-                else {
-                    debug.log("No mp4 sample data.");
-                }
-            }
-            this.unitSamples.push([]);
-        }
-        return;
-    };
-    H264Remuxer.prototype.flush = function () {
-        this.seq++;
-        this.mp4track.len = 0;
-        this.mp4track.samples = [];
-        this.mp4track.isKeyFrame = false;
-        this.unitSamples = [[]];
-    };
-    H264Remuxer.prototype.getFragment = function () {
-        if (!this.checkReadyToDecode()) {
-            return undefined;
-        }
-        var payload = new Uint8Array(this.mp4track.len);
-        this.mp4track.samples = [];
-        var offset = 0;
-        for (var i = 0, len = this.unitSamples.length; i < len; i++) {
-            var units = this.unitSamples[i];
-            if (units.length === 0) {
-                continue;
-            }
-            var mp4Sample = {
-                size: 0,
-                cts: this.stepDTS * i,
-            };
-            for (var _i = 0, units_1 = units; _i < units_1.length; _i++) {
-                var unit = units_1[_i];
-                mp4Sample.size += unit.getSize();
-                payload.set(unit.getData(), offset);
-                offset += unit.getSize();
-            }
-            this.mp4track.samples.push(mp4Sample);
-        }
-        if (offset === 0) {
-            return undefined;
-        }
-        return payload;
-    };
-    H264Remuxer.prototype.checkReadyToDecode = function () {
-        if (!this.readyToDecode || this.unitSamples.filter(function (array) { return array.length > 0; }).length === 0) {
-            debug.log("Not ready to decode! readyToDecode(" + this.readyToDecode + ") is false or units is empty.");
-            return false;
-        }
-        return true;
-    };
-    return H264Remuxer;
-}());
-exports.default = H264Remuxer;
 
-},{"./h264-parser":2,"./util/NALU":5,"./util/debug":7}],4:[function(require,module,exports){
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var MP4 = (function () {
-    function MP4() {
+    var first = buf.readUInt8(offset)
+    var length
+    var result = 0
+    var type
+    var bytePos
+
+    if (!hasMinBufferSize(first, bufLength)) {
+      return null
     }
-    MP4.init = function () {
-        MP4.initalized = true;
-        MP4.types = {
-            avc1: [],
-            avcC: [],
-            btrt: [],
-            dinf: [],
-            dref: [],
-            esds: [],
-            ftyp: [],
-            hdlr: [],
-            mdat: [],
-            mdhd: [],
-            mdia: [],
-            mfhd: [],
-            minf: [],
-            moof: [],
-            moov: [],
-            mp4a: [],
-            mvex: [],
-            mvhd: [],
-            sdtp: [],
-            stbl: [],
-            stco: [],
-            stsc: [],
-            stsd: [],
-            stsz: [],
-            stts: [],
-            styp: [],
-            tfdt: [],
-            tfhd: [],
-            traf: [],
-            trak: [],
-            trun: [],
-            trep: [],
-            trex: [],
-            tkhd: [],
-            vmhd: [],
-            smhd: [],
-        };
-        for (var type in MP4.types) {
-            if (MP4.types.hasOwnProperty(type)) {
-                MP4.types[type] = [
-                    type.charCodeAt(0),
-                    type.charCodeAt(1),
-                    type.charCodeAt(2),
-                    type.charCodeAt(3),
-                ];
-            }
+
+    switch (first) {
+      case 0xc0:
+        return buildDecodeResult(null, 1)
+      case 0xc2:
+        return buildDecodeResult(false, 1)
+      case 0xc3:
+        return buildDecodeResult(true, 1)
+      case 0xcc:
+        // 1-byte unsigned int
+        result = buf.readUInt8(offset + 1)
+        return buildDecodeResult(result, 2)
+      case 0xcd:
+        // 2-bytes BE unsigned int
+        result = buf.readUInt16BE(offset + 1)
+        return buildDecodeResult(result, 3)
+      case 0xce:
+        // 4-bytes BE unsigned int
+        result = buf.readUInt32BE(offset + 1)
+        return buildDecodeResult(result, 5)
+      case 0xcf:
+        // 8-bytes BE unsigned int
+        // Read long byte by byte, big-endian
+        for (bytePos = 7; bytePos >= 0; bytePos--) {
+          result += (buf.readUInt8(offset + bytePos + 1) * Math.pow(2, (8 * (7 - bytePos))))
         }
-        var hdlr = new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x76, 0x69, 0x64, 0x65,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x56, 0x69, 0x64, 0x65,
-            0x6f, 0x48, 0x61, 0x6e,
-            0x64, 0x6c, 0x65, 0x72, 0x00,
-        ]);
-        var dref = new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x01,
-            0x00, 0x00, 0x00, 0x0c,
-            0x75, 0x72, 0x6c, 0x20,
-            0x00,
-            0x00, 0x00, 0x01,
-        ]);
-        var stco = new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-        ]);
-        MP4.STTS = MP4.STSC = MP4.STCO = stco;
-        MP4.STSZ = new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-        ]);
-        MP4.VMHD = new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x01,
-            0x00, 0x00,
-            0x00, 0x00,
-            0x00, 0x00,
-            0x00, 0x00,
-        ]);
-        MP4.SMHD = new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00,
-            0x00, 0x00,
-        ]);
-        MP4.STSD = new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x01
-        ]);
-        MP4.FTYP = MP4.box(MP4.types.ftyp, new Uint8Array([
-            0x69, 0x73, 0x6f, 0x35,
-            0x00, 0x00, 0x00, 0x01,
-            0x61, 0x76, 0x63, 0x31,
-            0x69, 0x73, 0x6f, 0x35,
-            0x64, 0x61, 0x73, 0x68,
-        ]));
-        MP4.STYP = MP4.box(MP4.types.styp, new Uint8Array([
-            0x6d, 0x73, 0x64, 0x68,
-            0x00, 0x00, 0x00, 0x00,
-            0x6d, 0x73, 0x64, 0x68,
-            0x6d, 0x73, 0x69, 0x78,
-        ]));
-        MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref));
-        MP4.HDLR = MP4.box(MP4.types.hdlr, hdlr);
-    };
-    MP4.box = function (type) {
-        var payload = [];
-        for (var _i = 1; _i < arguments.length; _i++) {
-            payload[_i - 1] = arguments[_i];
+        return buildDecodeResult(result, 9)
+      case 0xd0:
+        // 1-byte signed int
+        result = buf.readInt8(offset + 1)
+        return buildDecodeResult(result, 2)
+      case 0xd1:
+        // 2-bytes signed int
+        result = buf.readInt16BE(offset + 1)
+        return buildDecodeResult(result, 3)
+      case 0xd2:
+        // 4-bytes signed int
+        result = buf.readInt32BE(offset + 1)
+        return buildDecodeResult(result, 5)
+      case 0xd3:
+        result = readInt64BE(buf.slice(offset + 1, offset + 9), 0)
+        return buildDecodeResult(result, 9)
+      case 0xca:
+        // 4-bytes float
+        result = buf.readFloatBE(offset + 1)
+        return buildDecodeResult(result, 5)
+      case 0xcb:
+        // 8-bytes double
+        result = buf.readDoubleBE(offset + 1)
+        return buildDecodeResult(result, 9)
+      case 0xd9:
+        // strings up to 2^8 - 1 bytes
+        length = buf.readUInt8(offset + 1)
+        if (!isValidDataSize(length, bufLength, 2)) {
+          return null
         }
-        var size = 8;
-        for (var _a = 0, payload_1 = payload; _a < payload_1.length; _a++) {
-            var p = payload_1[_a];
-            size += p.byteLength;
+        result = buf.toString('utf8', offset + 2, offset + 2 + length)
+        return buildDecodeResult(result, 2 + length)
+      case 0xda:
+        // strings up to 2^16 - 2 bytes
+        length = buf.readUInt16BE(offset + 1)
+        if (!isValidDataSize(length, bufLength, 3)) {
+          return null
         }
-        var result = new Uint8Array(size);
-        result[0] = (size >> 24) & 0xff;
-        result[1] = (size >> 16) & 0xff;
-        result[2] = (size >> 8) & 0xff;
-        result[3] = size & 0xff;
-        result.set(type, 4);
-        size = 8;
-        for (var _b = 0, payload_2 = payload; _b < payload_2.length; _b++) {
-            var box = payload_2[_b];
-            result.set(box, size);
-            size += box.byteLength;
+        result = buf.toString('utf8', offset + 3, offset + 3 + length)
+        return buildDecodeResult(result, 3 + length)
+      case 0xdb:
+        // strings up to 2^32 - 4 bytes
+        length = buf.readUInt32BE(offset + 1)
+        if (!isValidDataSize(length, bufLength, 5)) {
+          return null
         }
-        return result;
-    };
-    MP4.mdat = function (data) {
-        return MP4.box(MP4.types.mdat, data);
-    };
-    MP4.mdhd = function (timescale) {
-        return MP4.box(MP4.types.mdhd, new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x01,
-            0x00, 0x00, 0x00, 0x02,
-            (timescale >> 24) & 0xFF,
-            (timescale >> 16) & 0xFF,
-            (timescale >> 8) & 0xFF,
-            timescale & 0xFF,
-            0x00, 0x00, 0x00, 0x00,
-            0x55, 0xc4,
-            0x00, 0x00,
-        ]));
-    };
-    MP4.mdia = function (track) {
-        return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale), MP4.HDLR, MP4.minf(track));
-    };
-    MP4.mfhd = function (sequenceNumber) {
-        return MP4.box(MP4.types.mfhd, new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            (sequenceNumber >> 24),
-            (sequenceNumber >> 16) & 0xFF,
-            (sequenceNumber >> 8) & 0xFF,
-            sequenceNumber & 0xFF,
-        ]));
-    };
-    MP4.minf = function (track) {
-        return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track));
-    };
-    MP4.moof = function (sn, baseMediaDecodeTime, track) {
-        return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track, baseMediaDecodeTime));
-    };
-    MP4.moov = function (tracks, duration, timescale) {
-        var boxes = [];
-        for (var _i = 0, tracks_1 = tracks; _i < tracks_1.length; _i++) {
-            var track = tracks_1[_i];
-            boxes.push(MP4.trak(track));
+        result = buf.toString('utf8', offset + 5, offset + 5 + length)
+        return buildDecodeResult(result, 5 + length)
+      case 0xc4:
+        // buffers up to 2^8 - 1 bytes
+        length = buf.readUInt8(offset + 1)
+        if (!isValidDataSize(length, bufLength, 2)) {
+          return null
         }
-        return MP4.box.apply(MP4, [MP4.types.moov, MP4.mvhd(timescale, duration), MP4.mvex(tracks)].concat(boxes));
-    };
-    MP4.mvhd = function (timescale, duration) {
-        var bytes = new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x01,
-            0x00, 0x00, 0x00, 0x02,
-            (timescale >> 24) & 0xFF,
-            (timescale >> 16) & 0xFF,
-            (timescale >> 8) & 0xFF,
-            timescale & 0xFF,
-            (duration >> 24) & 0xFF,
-            (duration >> 16) & 0xFF,
-            (duration >> 8) & 0xFF,
-            duration & 0xFF,
-            0x00, 0x01, 0x00, 0x00,
-            0x01, 0x00,
-            0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x01, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x01, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x40, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x02,
-        ]);
-        return MP4.box(MP4.types.mvhd, bytes);
-    };
-    MP4.mvex = function (tracks) {
-        var boxes = [];
-        for (var _i = 0, tracks_2 = tracks; _i < tracks_2.length; _i++) {
-            var track = tracks_2[_i];
-            boxes.push(MP4.trex(track));
+        result = buf.slice(offset + 2, offset + 2 + length)
+        return buildDecodeResult(result, 2 + length)
+      case 0xc5:
+        // buffers up to 2^16 - 1 bytes
+        length = buf.readUInt16BE(offset + 1)
+        if (!isValidDataSize(length, bufLength, 3)) {
+          return null
         }
-        return MP4.box.apply(MP4, [MP4.types.mvex].concat(boxes, [MP4.trep()]));
-    };
-    MP4.trep = function () {
-        return MP4.box(MP4.types.trep, new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x01,
-        ]));
-    };
-    MP4.stbl = function (track) {
-        return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO));
-    };
-    MP4.avc1 = function (track) {
-        var sps = [];
-        var pps = [];
-        for (var _i = 0, _a = track.sps; _i < _a.length; _i++) {
-            var data = _a[_i];
-            var len = data.byteLength;
-            sps.push((len >>> 8) & 0xFF);
-            sps.push((len & 0xFF));
-            sps = sps.concat(Array.prototype.slice.call(data));
+        result = buf.slice(offset + 3, offset + 3 + length)
+        return buildDecodeResult(result, 3 + length)
+      case 0xc6:
+        // buffers up to 2^32 - 1 bytes
+        length = buf.readUInt32BE(offset + 1)
+        if (!isValidDataSize(length, bufLength, 5)) {
+          return null
         }
-        for (var _b = 0, _c = track.pps; _b < _c.length; _b++) {
-            var data = _c[_b];
-            var len = data.byteLength;
-            pps.push((len >>> 8) & 0xFF);
-            pps.push((len & 0xFF));
-            pps = pps.concat(Array.prototype.slice.call(data));
+        result = buf.slice(offset + 5, offset + 5 + length)
+        return buildDecodeResult(result, 5 + length)
+      case 0xdc:
+        // array up to 2^16 elements - 2 bytes
+        if (bufLength < 3) {
+          return null
+        }
+
+        length = buf.readUInt16BE(offset + 1)
+        return decodeArray(buf, offset, length, 3)
+      case 0xdd:
+        // array up to 2^32 elements - 4 bytes
+        if (bufLength < 5) {
+          return null
+        }
+
+        length = buf.readUInt32BE(offset + 1)
+        return decodeArray(buf, offset, length, 5)
+      case 0xde:
+        // maps up to 2^16 elements - 2 bytes
+        length = buf.readUInt16BE(offset + 1)
+        return decodeMap(buf, offset, length, 3)
+      case 0xdf:
+        length = buf.readUInt32BE(offset + 1)
+        return decodeMap(buf, offset, length, 5)
+      case 0xd4:
+        return decodeFixExt(buf, offset, 1)
+      case 0xd5:
+        return decodeFixExt(buf, offset, 2)
+      case 0xd6:
+        return decodeFixExt(buf, offset, 4)
+      case 0xd7:
+        return decodeFixExt(buf, offset, 8)
+      case 0xd8:
+        return decodeFixExt(buf, offset, 16)
+      case 0xc7:
+        // ext up to 2^8 - 1 bytes
+        length = buf.readUInt8(offset + 1)
+        type = buf.readUInt8(offset + 2)
+        if (!isValidDataSize(length, bufLength, 3)) {
+          return null
+        }
+        return decodeExt(buf, offset, type, length, 3)
+      case 0xc8:
+        // ext up to 2^16 - 1 bytes
+        length = buf.readUInt16BE(offset + 1)
+        type = buf.readUInt8(offset + 3)
+        if (!isValidDataSize(length, bufLength, 4)) {
+          return null
+        }
+        return decodeExt(buf, offset, type, length, 4)
+      case 0xc9:
+        // ext up to 2^32 - 1 bytes
+        length = buf.readUInt32BE(offset + 1)
+        type = buf.readUInt8(offset + 5)
+        if (!isValidDataSize(length, bufLength, 6)) {
+          return null
+        }
+        return decodeExt(buf, offset, type, length, 6)
+    }
+
+    if ((first & 0xf0) === 0x90) {
+      // we have an array with less than 15 elements
+      length = first & 0x0f
+      return decodeArray(buf, offset, length, 1)
+    } else if ((first & 0xf0) === 0x80) {
+      // we have a map with less than 15 elements
+      length = first & 0x0f
+      return decodeMap(buf, offset, length, 1)
+    } else if ((first & 0xe0) === 0xa0) {
+      // fixstr up to 31 bytes
+      length = first & 0x1f
+      if (isValidDataSize(length, bufLength, 1)) {
+        result = buf.toString('utf8', offset + 1, offset + length + 1)
+        return buildDecodeResult(result, length + 1)
+      } else {
+        return null
+      }
+    } else if (first >= 0xe0) {
+      // 5 bits negative ints
+      result = first - 0x100
+      return buildDecodeResult(result, 1)
+    } else if (first < 0x80) {
+      // 7-bits positive ints
+      return buildDecodeResult(first, 1)
+    } else {
+      throw new Error('not implemented yet')
+    }
+  }
+
+  function readInt64BE (buf, offset) {
+    var negate = (buf[offset] & 0x80) == 0x80 // eslint-disable-line
+
+    if (negate) {
+      var carry = 1
+      for (var i = offset + 7; i >= offset; i--) {
+        var v = (buf[i] ^ 0xff) + carry
+        buf[i] = v & 0xff
+        carry = v >> 8
+      }
+    }
+
+    var hi = buf.readUInt32BE(offset + 0)
+    var lo = buf.readUInt32BE(offset + 4)
+    return (hi * 4294967296 + lo) * (negate ? -1 : +1)
+  }
+
+  function decodeArray (buf, offset, length, headerLength) {
+    var result = []
+    var i
+    var totalBytesConsumed = 0
+
+    offset += headerLength
+    for (i = 0; i < length; i++) {
+      var decodeResult = tryDecode(buf, offset)
+      if (decodeResult) {
+        result.push(decodeResult.value)
+        offset += decodeResult.bytesConsumed
+        totalBytesConsumed += decodeResult.bytesConsumed
+      } else {
+        return null
+      }
+    }
+    return buildDecodeResult(result, headerLength + totalBytesConsumed)
+  }
+
+  function decodeMap (buf, offset, length, headerLength) {
+    var result = {}
+    var key
+    var i
+    var totalBytesConsumed = 0
+
+    offset += headerLength
+    for (i = 0; i < length; i++) {
+      var keyResult = tryDecode(buf, offset)
+      if (keyResult) {
+        offset += keyResult.bytesConsumed
+        var valueResult = tryDecode(buf, offset)
+        if (valueResult) {
+          key = keyResult.value
+          result[key] = valueResult.value
+          offset += valueResult.bytesConsumed
+          totalBytesConsumed += (keyResult.bytesConsumed + valueResult.bytesConsumed)
+        } else {
+          return null
         }
-        var avcc = MP4.box(MP4.types.avcC, new Uint8Array([
-            0x01,
-            sps[3],
-            sps[4],
-            sps[5],
-            0xfc | 3,
-            0xE0 | track.sps.length,
-        ].concat(sps).concat([
-            track.pps.length,
-        ]).concat(pps)));
-        var width = track.width;
-        var height = track.height;
-        return MP4.box(MP4.types.avc1, new Uint8Array([
-            0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x01,
-            0x00, 0x00,
-            0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            (width >> 8) & 0xFF,
-            width & 0xff,
-            (height >> 8) & 0xFF,
-            height & 0xff,
-            0x00, 0x48, 0x00, 0x00,
-            0x00, 0x48, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x01,
-            0x12,
-            0x62, 0x69, 0x6E, 0x65,
-            0x6C, 0x70, 0x72, 0x6F,
-            0x2E, 0x72, 0x75, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00,
-            0x00, 0x18,
-            0x11, 0x11
-        ]), avcc, MP4.box(MP4.types.btrt, new Uint8Array([
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x2d, 0xc6, 0xc0,
-            0x00, 0x2d, 0xc6, 0xc0,
-        ])));
-    };
-    MP4.stsd = function (track) {
-        return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
-    };
-    MP4.tkhd = function (track) {
-        var id = track.id;
-        var width = track.width;
-        var height = track.height;
-        return MP4.box(MP4.types.tkhd, new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x01,
-            0x00, 0x00, 0x00, 0x01,
-            0x00, 0x00, 0x00, 0x02,
-            (id >> 24) & 0xFF,
-            (id >> 16) & 0xFF,
-            (id >> 8) & 0xFF,
-            id & 0xFF,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00,
-            0x00, 0x00,
-            (track.type === 'audio' ? 0x01 : 0x00), 0x00,
-            0x00, 0x00,
-            0x00, 0x01, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x01, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x00, 0x00, 0x00,
-            0x40, 0x00, 0x00, 0x00,
-            (width >> 8) & 0xFF,
-            width & 0xFF,
-            0x00, 0x00,
-            (height >> 8) & 0xFF,
-            height & 0xFF,
-            0x00, 0x00,
-        ]));
-    };
-    MP4.traf = function (track, baseMediaDecodeTime) {
-        var id = track.id;
-        return MP4.box(MP4.types.traf, MP4.box(MP4.types.tfhd, new Uint8Array([
-            0x00,
-            0x02, 0x00, 0x00,
-            (id >> 24),
-            (id >> 16) & 0XFF,
-            (id >> 8) & 0XFF,
-            (id & 0xFF),
-        ])), MP4.box(MP4.types.tfdt, new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            (baseMediaDecodeTime >> 24),
-            (baseMediaDecodeTime >> 16) & 0XFF,
-            (baseMediaDecodeTime >> 8) & 0XFF,
-            (baseMediaDecodeTime & 0xFF),
-        ])), MP4.trun(track, 16 +
-            16 +
-            8 +
-            16 +
-            8 +
-            8));
-    };
-    MP4.trak = function (track) {
-        track.duration = track.duration || 0xffffffff;
-        return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track));
-    };
-    MP4.trex = function (track) {
-        var id = track.id;
-        return MP4.box(MP4.types.trex, new Uint8Array([
-            0x00,
-            0x00, 0x00, 0x00,
-            (id >> 24),
-            (id >> 16) & 0XFF,
-            (id >> 8) & 0XFF,
-            (id & 0xFF),
-            0x00, 0x00, 0x00, 0x01,
-            0x00, 0x00, 0x00, 0x3c,
-            0x00, 0x00, 0x00, 0x00,
-            0x00, 0x01, 0x00, 0x00,
-        ]));
-    };
-    MP4.trun = function (track, offset) {
-        var samples = track.samples || [];
-        var len = samples.length;
-        var additionalLen = track.isKeyFrame ? 4 : 0;
-        var arraylen = 12 + additionalLen + (4 * len);
-        var array = new Uint8Array(arraylen);
-        offset += 8 + arraylen;
-        array.set([
-            0x00,
-            0x00, 0x02, (track.isKeyFrame ? 0x05 : 0x01),
-            (len >>> 24) & 0xFF,
-            (len >>> 16) & 0xFF,
-            (len >>> 8) & 0xFF,
-            len & 0xFF,
-            (offset >>> 24) & 0xFF,
-            (offset >>> 16) & 0xFF,
-            (offset >>> 8) & 0xFF,
-            offset & 0xFF,
-        ], 0);
-        if (track.isKeyFrame) {
-            array.set([
-                0x00, 0x00, 0x00, 0x00,
-            ], 12);
+      } else {
+        return null
+      }
+    }
+    return buildDecodeResult(result, headerLength + totalBytesConsumed)
+  }
+
+  function decodeFixExt (buf, offset, size) {
+    var type = buf.readInt8(offset + 1) // Signed
+    return decodeExt(buf, offset, type, size, 2)
+  }
+
+  function decodeTimestamp (buf, size, headerSize) {
+    var seconds
+    var nanoseconds = 0
+
+    switch (size) {
+      case 4:
+        // timestamp 32 stores the number of seconds that have elapsed since 1970-01-01 00:00:00 UTC in an 32-bit unsigned integer
+        seconds = buf.readUInt32BE(0)
+        break
+
+      case 8:
+        // Timestamp 64 stores the number of seconds and nanoseconds that have elapsed
+        // since 1970-01-01 00:00:00 UTC in 32-bit unsigned integers, split 30/34 bits
+        var upper = buf.readUInt32BE(0)
+        var lower = buf.readUInt32BE(4)
+        nanoseconds = upper / 4
+        seconds = ((upper & 0x03) * Math.pow(2, 32)) + lower // If we use bitwise operators, we get truncated to 32bits
+        break
+
+      case 12:
+        throw new Error('timestamp 96 is not yet implemented')
+    }
+
+    var millis = (seconds * 1000) + Math.round(nanoseconds / 1E6)
+    return buildDecodeResult(new Date(millis), size + headerSize)
+  }
+
+  function decodeExt (buf, offset, type, size, headerSize) {
+    var i,
+      toDecode
+
+    offset += headerSize
+
+    // Pre-defined
+    if (type < 0) { // Reserved for future extensions
+      switch (type) {
+        case -1: // Tiemstamp https://github.com/msgpack/msgpack/blob/master/spec.md#timestamp-extension-type
+          toDecode = buf.slice(offset, offset + size)
+          return decodeTimestamp(toDecode, size, headerSize)
+      }
+    }
+
+    for (i = 0; i < decodingTypes.length; i++) {
+      if (type === decodingTypes[i].type) {
+        toDecode = buf.slice(offset, offset + size)
+        var value = decodingTypes[i].decode(toDecode)
+        return buildDecodeResult(value, headerSize + size)
+      }
+    }
+
+    throw new Error('unable to find ext type ' + type)
+  }
+}
+
+module.exports.IncompleteBufferError = IncompleteBufferError
+
+},{"bl":1,"util":74}],7:[function(require,module,exports){
+'use strict'
+
+var Buffer = require('safe-buffer').Buffer
+var bl = require('bl')
+
+module.exports = function buildEncode (encodingTypes, forceFloat64, compatibilityMode, disableTimestampEncoding) {
+  function encode (obj, avoidSlice) {
+    var buf
+    var len
+
+    if (obj === undefined) {
+      throw new Error('undefined is not encodable in msgpack!')
+    } else if (isNaN(obj)) {
+      throw new Error('NaN is not encodable in msgpack!')
+    } else if (obj === null) {
+      buf = Buffer.allocUnsafe(1)
+      buf[0] = 0xc0
+    } else if (obj === true) {
+      buf = Buffer.allocUnsafe(1)
+      buf[0] = 0xc3
+    } else if (obj === false) {
+      buf = Buffer.allocUnsafe(1)
+      buf[0] = 0xc2
+    } else if (typeof obj === 'string') {
+      len = Buffer.byteLength(obj)
+      if (len < 32) {
+        buf = Buffer.allocUnsafe(1 + len)
+        buf[0] = 0xa0 | len
+        if (len > 0) {
+          buf.write(obj, 1)
         }
-        for (var i = 0; i < len; i++) {
-            var sample = samples[i];
-            var size = sample.size;
-            array.set([
-                (size >>> 24) & 0xFF,
-                (size >>> 16) & 0xFF,
-                (size >>> 8) & 0xFF,
-                size & 0xFF,
-            ], 12 + additionalLen + 4 * i);
+      } else if (len <= 0xff && !compatibilityMode) {
+        // str8, but only when not in compatibility mode
+        buf = Buffer.allocUnsafe(2 + len)
+        buf[0] = 0xd9
+        buf[1] = len
+        buf.write(obj, 2)
+      } else if (len <= 0xffff) {
+        buf = Buffer.allocUnsafe(3 + len)
+        buf[0] = 0xda
+        buf.writeUInt16BE(len, 1)
+        buf.write(obj, 3)
+      } else {
+        buf = Buffer.allocUnsafe(5 + len)
+        buf[0] = 0xdb
+        buf.writeUInt32BE(len, 1)
+        buf.write(obj, 5)
+      }
+    } else if (obj && (obj.readUInt32LE || obj instanceof Uint8Array)) {
+      if (obj instanceof Uint8Array) {
+        obj = Buffer.from(obj)
+      }
+      // weird hack to support Buffer
+      // and Buffer-like objects
+      if (obj.length <= 0xff) {
+        buf = Buffer.allocUnsafe(2)
+        buf[0] = 0xc4
+        buf[1] = obj.length
+      } else if (obj.length <= 0xffff) {
+        buf = Buffer.allocUnsafe(3)
+        buf[0] = 0xc5
+        buf.writeUInt16BE(obj.length, 1)
+      } else {
+        buf = Buffer.allocUnsafe(5)
+        buf[0] = 0xc6
+        buf.writeUInt32BE(obj.length, 1)
+      }
+
+      buf = bl([buf, obj])
+    } else if (Array.isArray(obj)) {
+      if (obj.length < 16) {
+        buf = Buffer.allocUnsafe(1)
+        buf[0] = 0x90 | obj.length
+      } else if (obj.length < 65536) {
+        buf = Buffer.allocUnsafe(3)
+        buf[0] = 0xdc
+        buf.writeUInt16BE(obj.length, 1)
+      } else {
+        buf = Buffer.allocUnsafe(5)
+        buf[0] = 0xdd
+        buf.writeUInt32BE(obj.length, 1)
+      }
+
+      buf = obj.reduce(function (acc, obj) {
+        acc.append(encode(obj, true))
+        return acc
+      }, bl().append(buf))
+    } else if (!disableTimestampEncoding && typeof obj.getDate === 'function') {
+      return encodeDate(obj)
+    } else if (typeof obj === 'object') {
+      buf = encodeExt(obj) || encodeObject(obj)
+    } else if (typeof obj === 'number') {
+      if (isFloat(obj)) {
+        return encodeFloat(obj, forceFloat64)
+      } else if (obj >= 0) {
+        if (obj < 128) {
+          buf = Buffer.allocUnsafe(1)
+          buf[0] = obj
+        } else if (obj < 256) {
+          buf = Buffer.allocUnsafe(2)
+          buf[0] = 0xcc
+          buf[1] = obj
+        } else if (obj < 65536) {
+          buf = Buffer.allocUnsafe(3)
+          buf[0] = 0xcd
+          buf.writeUInt16BE(obj, 1)
+        } else if (obj <= 0xffffffff) {
+          buf = Buffer.allocUnsafe(5)
+          buf[0] = 0xce
+          buf.writeUInt32BE(obj, 1)
+        } else if (obj <= 9007199254740991) {
+          buf = Buffer.allocUnsafe(9)
+          buf[0] = 0xcf
+          write64BitUint(buf, obj)
+        } else {
+          return encodeFloat(obj, true)
         }
-        return MP4.box(MP4.types.trun, array);
-    };
-    MP4.initSegment = function (tracks, duration, timescale) {
-        if (!MP4.initalized) {
-            MP4.init();
+      } else {
+        if (obj >= -32) {
+          buf = Buffer.allocUnsafe(1)
+          buf[0] = 0x100 + obj
+        } else if (obj >= -128) {
+          buf = Buffer.allocUnsafe(2)
+          buf[0] = 0xd0
+          buf.writeInt8(obj, 1)
+        } else if (obj >= -32768) {
+          buf = Buffer.allocUnsafe(3)
+          buf[0] = 0xd1
+          buf.writeInt16BE(obj, 1)
+        } else if (obj > -214748365) {
+          buf = Buffer.allocUnsafe(5)
+          buf[0] = 0xd2
+          buf.writeInt32BE(obj, 1)
+        } else if (obj >= -9007199254740991) {
+          buf = Buffer.allocUnsafe(9)
+          buf[0] = 0xd3
+          write64BitInt(buf, 1, obj)
+        } else {
+          return encodeFloat(obj, true)
         }
-        var movie = MP4.moov(tracks, duration, timescale);
-        var result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength);
-        result.set(MP4.FTYP);
-        result.set(movie, MP4.FTYP.byteLength);
-        return result;
-    };
-    MP4.fragmentSegment = function (sn, baseMediaDecodeTime, track, payload) {
-        var moof = MP4.moof(sn, baseMediaDecodeTime, track);
-        var mdat = MP4.mdat(payload);
-        var result = new Uint8Array(MP4.STYP.byteLength + moof.byteLength + mdat.byteLength);
-        result.set(MP4.STYP);
-        result.set(moof, MP4.STYP.byteLength);
-        result.set(mdat, MP4.STYP.byteLength + moof.byteLength);
-        return result;
-    };
-    return MP4;
-}());
-MP4.types = {};
-MP4.initalized = false;
-exports.default = MP4;
+      }
+    }
+
+    if (!buf) {
+      throw new Error('not implemented yet')
+    }
+
+    if (avoidSlice) {
+      return buf
+    } else {
+      return buf.slice()
+    }
+  }
+
+  function encodeDate (dt) {
+    var encoded
+    var millis = dt * 1
+    var seconds = Math.floor(millis / 1000)
+    var nanos = (millis - (seconds * 1000)) * 1E6
+
+    if (nanos || seconds > 0xFFFFFFFF) {
+      // Timestamp64
+      encoded = Buffer.allocUnsafe(10)
+      encoded[0] = 0xd7
+      encoded[1] = -1
+
+      var upperNanos = ((nanos * 4))
+      var upperSeconds = seconds / Math.pow(2, 32)
+      var upper = (upperNanos + upperSeconds) & 0xFFFFFFFF
+      var lower = seconds & 0xFFFFFFFF
+
+      encoded.writeInt32BE(upper, 2)
+      encoded.writeInt32BE(lower, 6)
+    } else {
+      // Timestamp32
+      encoded = Buffer.allocUnsafe(6)
+      encoded[0] = 0xd6
+      encoded[1] = -1
+      encoded.writeUInt32BE(Math.floor(millis / 1000), 2)
+    }
+    return bl().append(encoded)
+  }
+
+  function encodeExt (obj) {
+    var i
+    var encoded
+    var length = -1
+    var headers = []
+
+    for (i = 0; i < encodingTypes.length; i++) {
+      if (encodingTypes[i].check(obj)) {
+        encoded = encodingTypes[i].encode(obj)
+        break
+      }
+    }
+
+    if (!encoded) {
+      return null
+    }
+
+    // we subtract 1 because the length does not
+    // include the type
+    length = encoded.length - 1
+
+    if (length === 1) {
+      headers.push(0xd4)
+    } else if (length === 2) {
+      headers.push(0xd5)
+    } else if (length === 4) {
+      headers.push(0xd6)
+    } else if (length === 8) {
+      headers.push(0xd7)
+    } else if (length === 16) {
+      headers.push(0xd8)
+    } else if (length < 256) {
+      headers.push(0xc7)
+      headers.push(length)
+    } else if (length < 0x10000) {
+      headers.push(0xc8)
+      headers.push(length >> 8)
+      headers.push(length & 0x00ff)
+    } else {
+      headers.push(0xc9)
+      headers.push(length >> 24)
+      headers.push((length >> 16) & 0x000000ff)
+      headers.push((length >> 8) & 0x000000ff)
+      headers.push(length & 0x000000ff)
+    }
+
+    return bl().append(Buffer.from(headers)).append(encoded)
+  }
+
+  function encodeObject (obj) {
+    var acc = []
+    var length = 0
+    var key
+    var header
+
+    for (key in obj) {
+      if (obj.hasOwnProperty(key) &&
+        obj[key] !== undefined &&
+        typeof obj[key] !== 'function') {
+        ++length
+        acc.push(encode(key, true))
+        acc.push(encode(obj[key], true))
+      }
+    }
+
+    if (length < 16) {
+      header = Buffer.allocUnsafe(1)
+      header[0] = 0x80 | length
+    } else if (length < 0xFFFF) {
+      header = Buffer.allocUnsafe(3)
+      header[0] = 0xde
+      header.writeUInt16BE(length, 1)
+    } else {
+      header = Buffer.allocUnsafe(5)
+      header[0] = 0xdf
+      header.writeUInt32BE(length, 1)
+    }
+
+    acc.unshift(header)
+
+    var result = acc.reduce(function (list, buf) {
+      return list.append(buf)
+    }, bl())
+
+    return result
+  }
+
+  return encode
+}
+
+function write64BitUint (buf, obj) {
+  // Write long byte by byte, in big-endian order
+  for (var currByte = 7; currByte >= 0; currByte--) {
+    buf[currByte + 1] = (obj & 0xff)
+    obj = obj / 256
+  }
+}
+
+function write64BitInt (buf, offset, num) {
+  var negate = num < 0
+
+  if (negate) {
+    num = Math.abs(num)
+  }
+
+  var lo = num % 4294967296
+  var hi = num / 4294967296
+  buf.writeUInt32BE(Math.floor(hi), offset + 0)
+  buf.writeUInt32BE(lo, offset + 4)
+
+  if (negate) {
+    var carry = 1
+    for (var i = offset + 7; i >= offset; i--) {
+      var v = (buf[i] ^ 0xff) + carry
+      buf[i] = v & 0xff
+      carry = v >> 8
+    }
+  }
+}
+
+function isFloat (n) {
+  return n % 1 !== 0
+}
+
+function isNaN (n) {
+  /* eslint-disable no-self-compare */
+  return n !== n && typeof n === 'number'
+  /* eslint-enable no-self-compare */
+}
+
+function encodeFloat (obj, forceFloat64) {
+  var useDoublePrecision = true
+
+  // If `fround` is supported, we can check if a float
+  // is double or single precision by rounding the object
+  // to single precision and comparing the difference.
+  // If it's not supported, it's safer to use a 64 bit
+  // float so we don't lose precision without meaning to.
+  if (Math.fround) {
+    useDoublePrecision = Math.fround(obj) !== obj
+  }
+
+  if (forceFloat64) {
+    useDoublePrecision = true
+  }
+
+  var buf
+
+  if (useDoublePrecision) {
+    buf = Buffer.allocUnsafe(9)
+    buf[0] = 0xcb
+    buf.writeDoubleBE(obj, 1)
+  } else {
+    buf = Buffer.allocUnsafe(5)
+    buf[0] = 0xca
+    buf.writeFloatBE(obj, 1)
+  }
+
+  return buf
+}
+
+},{"bl":1,"safe-buffer":19}],8:[function(require,module,exports){
+'use strict'
+
+var Transform = require('readable-stream').Transform
+var inherits = require('inherits')
+var bl = require('bl')
+
+function Base (opts) {
+  opts = opts || {}
+
+  opts.objectMode = true
+  opts.highWaterMark = 16
+
+  Transform.call(this, opts)
+
+  this._msgpack = opts.msgpack
+}
+
+inherits(Base, Transform)
+
+function Encoder (opts) {
+  if (!(this instanceof Encoder)) {
+    opts = opts || {}
+    opts.msgpack = this
+    return new Encoder(opts)
+  }
+
+  Base.call(this, opts)
+  this._wrap = ('wrap' in opts) && opts.wrap
+}
+
+inherits(Encoder, Base)
+
+Encoder.prototype._transform = function (obj, enc, done) {
+  var buf = null
+
+  try {
+    buf = this._msgpack.encode(this._wrap ? obj.value : obj).slice(0)
+  } catch (err) {
+    this.emit('error', err)
+    return done()
+  }
+
+  this.push(buf)
+  done()
+}
+
+function Decoder (opts) {
+  if (!(this instanceof Decoder)) {
+    opts = opts || {}
+    opts.msgpack = this
+    return new Decoder(opts)
+  }
+
+  Base.call(this, opts)
+
+  this._chunks = bl()
+  this._wrap = ('wrap' in opts) && opts.wrap
+}
+
+inherits(Decoder, Base)
+
+Decoder.prototype._transform = function (buf, enc, done) {
+  if (buf) {
+    this._chunks.append(buf)
+  }
+
+  try {
+    var result = this._msgpack.decode(this._chunks)
+    if (this._wrap) {
+      result = {value: result}
+    }
+    this.push(result)
+  } catch (err) {
+    if (err instanceof this._msgpack.IncompleteBufferError) {
+      done()
+    } else {
+      this.emit('error', err)
+    }
+    return
+  }
+
+  if (this._chunks.length > 0) {
+    this._transform(null, enc, done)
+  } else {
+    done()
+  }
+}
+
+module.exports.decoder = Decoder
+module.exports.encoder = Encoder
+
+},{"bl":1,"inherits":3,"readable-stream":18}],9:[function(require,module,exports){
+(function (process){
+'use strict';
+
+if (!process.version ||
+    process.version.indexOf('v0.') === 0 ||
+    process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {
+  module.exports = { nextTick: nextTick };
+} else {
+  module.exports = process
+}
+
+function nextTick(fn, arg1, arg2, arg3) {
+  if (typeof fn !== 'function') {
+    throw new TypeError('"callback" argument must be a function');
+  }
+  var len = arguments.length;
+  var args, i;
+  switch (len) {
+  case 0:
+  case 1:
+    return process.nextTick(fn);
+  case 2:
+    return process.nextTick(function afterTickOne() {
+      fn.call(null, arg1);
+    });
+  case 3:
+    return process.nextTick(function afterTickTwo() {
+      fn.call(null, arg1, arg2);
+    });
+  case 4:
+    return process.nextTick(function afterTickThree() {
+      fn.call(null, arg1, arg2, arg3);
+    });
+  default:
+    args = new Array(len - 1);
+    i = 0;
+    while (i < args.length) {
+      args[i++] = arguments[i];
+    }
+    return process.nextTick(function afterTick() {
+      fn.apply(null, args);
+    });
+  }
+}
+
+
+}).call(this,require('_process'))
+},{"_process":70}],10:[function(require,module,exports){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a duplex stream is just a stream that is both readable and writable.
+// Since JS doesn't have multiple prototypal inheritance, this class
+// prototypally inherits from Readable, and then parasitically from
+// Writable.
+
+'use strict';
+
+/*<replacement>*/
+
+var pna = require('process-nextick-args');
+/*</replacement>*/
+
+/*<replacement>*/
+var objectKeys = Object.keys || function (obj) {
+  var keys = [];
+  for (var key in obj) {
+    keys.push(key);
+  }return keys;
+};
+/*</replacement>*/
+
+module.exports = Duplex;
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+var Readable = require('./_stream_readable');
+var Writable = require('./_stream_writable');
+
+util.inherits(Duplex, Readable);
+
+{
+  // avoid scope creep, the keys array can then be collected
+  var keys = objectKeys(Writable.prototype);
+  for (var v = 0; v < keys.length; v++) {
+    var method = keys[v];
+    if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
+  }
+}
+
+function Duplex(options) {
+  if (!(this instanceof Duplex)) return new Duplex(options);
+
+  Readable.call(this, options);
+  Writable.call(this, options);
+
+  if (options && options.readable === false) this.readable = false;
+
+  if (options && options.writable === false) this.writable = false;
+
+  this.allowHalfOpen = true;
+  if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
+
+  this.once('end', onend);
+}
+
+Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
+  // making it explicit this property is not enumerable
+  // because otherwise some prototype manipulation in
+  // userland will fail
+  enumerable: false,
+  get: function () {
+    return this._writableState.highWaterMark;
+  }
+});
+
+// the no-half-open enforcer
+function onend() {
+  // if we allow half-open state, or if the writable side ended,
+  // then we're ok.
+  if (this.allowHalfOpen || this._writableState.ended) return;
+
+  // no more data can be written.
+  // But allow more writes to happen in this tick.
+  pna.nextTick(onEndNT, this);
+}
+
+function onEndNT(self) {
+  self.end();
+}
+
+Object.defineProperty(Duplex.prototype, 'destroyed', {
+  get: function () {
+    if (this._readableState === undefined || this._writableState === undefined) {
+      return false;
+    }
+    return this._readableState.destroyed && this._writableState.destroyed;
+  },
+  set: function (value) {
+    // we ignore the value if the stream
+    // has not been initialized yet
+    if (this._readableState === undefined || this._writableState === undefined) {
+      return;
+    }
+
+    // backward compatibility, the user is explicitly
+    // managing destroyed
+    this._readableState.destroyed = value;
+    this._writableState.destroyed = value;
+  }
+});
+
+Duplex.prototype._destroy = function (err, cb) {
+  this.push(null);
+  this.end();
+
+  pna.nextTick(cb, err);
+};
+},{"./_stream_readable":12,"./_stream_writable":14,"core-util-is":2,"inherits":3,"process-nextick-args":9}],11:[function(require,module,exports){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a passthrough stream.
+// basically just the most minimal sort of Transform stream.
+// Every written chunk gets output as-is.
+
+'use strict';
+
+module.exports = PassThrough;
+
+var Transform = require('./_stream_transform');
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+util.inherits(PassThrough, Transform);
+
+function PassThrough(options) {
+  if (!(this instanceof PassThrough)) return new PassThrough(options);
+
+  Transform.call(this, options);
+}
+
+PassThrough.prototype._transform = function (chunk, encoding, cb) {
+  cb(null, chunk);
+};
+},{"./_stream_transform":13,"core-util-is":2,"inherits":3}],12:[function(require,module,exports){
+(function (process,global){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict';
+
+/*<replacement>*/
+
+var pna = require('process-nextick-args');
+/*</replacement>*/
+
+module.exports = Readable;
+
+/*<replacement>*/
+var isArray = require('isarray');
+/*</replacement>*/
+
+/*<replacement>*/
+var Duplex;
+/*</replacement>*/
+
+Readable.ReadableState = ReadableState;
 
-},{}],5:[function(require,module,exports){
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var NALU = (function () {
-    function NALU(data) {
-        this.data = data;
-        this.nri = (data[0] & 0x60) >> 5;
-        this.ntype = data[0] & 0x1f;
+/*<replacement>*/
+var EE = require('events').EventEmitter;
+
+var EElistenerCount = function (emitter, type) {
+  return emitter.listeners(type).length;
+};
+/*</replacement>*/
+
+/*<replacement>*/
+var Stream = require('./internal/streams/stream');
+/*</replacement>*/
+
+/*<replacement>*/
+
+var Buffer = require('safe-buffer').Buffer;
+var OurUint8Array = global.Uint8Array || function () {};
+function _uint8ArrayToBuffer(chunk) {
+  return Buffer.from(chunk);
+}
+function _isUint8Array(obj) {
+  return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
+}
+
+/*</replacement>*/
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+/*<replacement>*/
+var debugUtil = require('util');
+var debug = void 0;
+if (debugUtil && debugUtil.debuglog) {
+  debug = debugUtil.debuglog('stream');
+} else {
+  debug = function () {};
+}
+/*</replacement>*/
+
+var BufferList = require('./internal/streams/BufferList');
+var destroyImpl = require('./internal/streams/destroy');
+var StringDecoder;
+
+util.inherits(Readable, Stream);
+
+var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
+
+function prependListener(emitter, event, fn) {
+  // Sadly this is not cacheable as some libraries bundle their own
+  // event emitter implementation with them.
+  if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
+
+  // This is a hack to make sure that our error handler is attached before any
+  // userland ones.  NEVER DO THIS. This is here only because this code needs
+  // to continue to work with older versions of Node.js that do not include
+  // the prependListener() method. The goal is to eventually remove this hack.
+  if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
+}
+
+function ReadableState(options, stream) {
+  Duplex = Duplex || require('./_stream_duplex');
+
+  options = options || {};
+
+  // Duplex streams are both readable and writable, but share
+  // the same options object.
+  // However, some cases require setting options to different
+  // values for the readable and the writable sides of the duplex stream.
+  // These options can be provided separately as readableXXX and writableXXX.
+  var isDuplex = stream instanceof Duplex;
+
+  // object stream flag. Used to make read(n) ignore n and to
+  // make all the buffer merging and length checks go away
+  this.objectMode = !!options.objectMode;
+
+  if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
+
+  // the point at which it stops calling _read() to fill the buffer
+  // Note: 0 is a valid value, means "don't call _read preemptively ever"
+  var hwm = options.highWaterMark;
+  var readableHwm = options.readableHighWaterMark;
+  var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+
+  if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
+
+  // cast to ints.
+  this.highWaterMark = Math.floor(this.highWaterMark);
+
+  // A linked list is used to store data chunks instead of an array because the
+  // linked list can remove elements from the beginning faster than
+  // array.shift()
+  this.buffer = new BufferList();
+  this.length = 0;
+  this.pipes = null;
+  this.pipesCount = 0;
+  this.flowing = null;
+  this.ended = false;
+  this.endEmitted = false;
+  this.reading = false;
+
+  // a flag to be able to tell if the event 'readable'/'data' is emitted
+  // immediately, or on a later tick.  We set this to true at first, because
+  // any actions that shouldn't happen until "later" should generally also
+  // not happen before the first read call.
+  this.sync = true;
+
+  // whenever we return null, then we set a flag to say
+  // that we're awaiting a 'readable' event emission.
+  this.needReadable = false;
+  this.emittedReadable = false;
+  this.readableListening = false;
+  this.resumeScheduled = false;
+
+  // has it been destroyed
+  this.destroyed = false;
+
+  // Crypto is kind of old and crusty.  Historically, its default string
+  // encoding is 'binary' so we have to make this configurable.
+  // Everything else in the universe uses 'utf8', though.
+  this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+  // the number of writers that are awaiting a drain event in .pipe()s
+  this.awaitDrain = 0;
+
+  // if true, a maybeReadMore has been scheduled
+  this.readingMore = false;
+
+  this.decoder = null;
+  this.encoding = null;
+  if (options.encoding) {
+    if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
+    this.decoder = new StringDecoder(options.encoding);
+    this.encoding = options.encoding;
+  }
+}
+
+function Readable(options) {
+  Duplex = Duplex || require('./_stream_duplex');
+
+  if (!(this instanceof Readable)) return new Readable(options);
+
+  this._readableState = new ReadableState(options, this);
+
+  // legacy
+  this.readable = true;
+
+  if (options) {
+    if (typeof options.read === 'function') this._read = options.read;
+
+    if (typeof options.destroy === 'function') this._destroy = options.destroy;
+  }
+
+  Stream.call(this);
+}
+
+Object.defineProperty(Readable.prototype, 'destroyed', {
+  get: function () {
+    if (this._readableState === undefined) {
+      return false;
     }
-    Object.defineProperty(NALU, "NDR", {
-        get: function () { return 1; },
-        enumerable: true,
-        configurable: true
-    });
-    Object.defineProperty(NALU, "IDR", {
-        get: function () { return 5; },
-        enumerable: true,
-        configurable: true
-    });
-    Object.defineProperty(NALU, "SEI", {
-        get: function () { return 6; },
-        enumerable: true,
-        configurable: true
-    });
-    Object.defineProperty(NALU, "SPS", {
-        get: function () { return 7; },
-        enumerable: true,
-        configurable: true
-    });
-    Object.defineProperty(NALU, "PPS", {
-        get: function () { return 8; },
-        enumerable: true,
-        configurable: true
-    });
-    Object.defineProperty(NALU, "TYPES", {
-        get: function () {
-            return _a = {},
-                _a[NALU.IDR] = 'IDR',
-                _a[NALU.SEI] = 'SEI',
-                _a[NALU.SPS] = 'SPS',
-                _a[NALU.PPS] = 'PPS',
-                _a[NALU.NDR] = 'NDR',
-                _a;
-            var _a;
-        },
-        enumerable: true,
-        configurable: true
-    });
-    NALU.type = function (nalu) {
-        if (nalu.ntype in NALU.TYPES) {
-            return NALU.TYPES[nalu.ntype];
-        }
-        else {
-            return 'UNKNOWN';
+    return this._readableState.destroyed;
+  },
+  set: function (value) {
+    // we ignore the value if the stream
+    // has not been initialized yet
+    if (!this._readableState) {
+      return;
+    }
+
+    // backward compatibility, the user is explicitly
+    // managing destroyed
+    this._readableState.destroyed = value;
+  }
+});
+
+Readable.prototype.destroy = destroyImpl.destroy;
+Readable.prototype._undestroy = destroyImpl.undestroy;
+Readable.prototype._destroy = function (err, cb) {
+  this.push(null);
+  cb(err);
+};
+
+// Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+Readable.prototype.push = function (chunk, encoding) {
+  var state = this._readableState;
+  var skipChunkCheck;
+
+  if (!state.objectMode) {
+    if (typeof chunk === 'string') {
+      encoding = encoding || state.defaultEncoding;
+      if (encoding !== state.encoding) {
+        chunk = Buffer.from(chunk, encoding);
+        encoding = '';
+      }
+      skipChunkCheck = true;
+    }
+  } else {
+    skipChunkCheck = true;
+  }
+
+  return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
+};
+
+// Unshift should *always* be something directly out of read()
+Readable.prototype.unshift = function (chunk) {
+  return readableAddChunk(this, chunk, null, true, false);
+};
+
+function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
+  var state = stream._readableState;
+  if (chunk === null) {
+    state.reading = false;
+    onEofChunk(stream, state);
+  } else {
+    var er;
+    if (!skipChunkCheck) er = chunkInvalid(state, chunk);
+    if (er) {
+      stream.emit('error', er);
+    } else if (state.objectMode || chunk && chunk.length > 0) {
+      if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
+        chunk = _uint8ArrayToBuffer(chunk);
+      }
+
+      if (addToFront) {
+        if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
+      } else if (state.ended) {
+        stream.emit('error', new Error('stream.push() after EOF'));
+      } else {
+        state.reading = false;
+        if (state.decoder && !encoding) {
+          chunk = state.decoder.write(chunk);
+          if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
+        } else {
+          addChunk(stream, state, chunk, false);
         }
-    };
-    NALU.prototype.type = function () {
-        return this.ntype;
-    };
-    NALU.prototype.isKeyframe = function () {
-        return this.ntype === NALU.IDR;
-    };
-    NALU.prototype.getSize = function () {
-        return 4 + this.data.byteLength;
-    };
-    NALU.prototype.getData = function () {
-        var result = new Uint8Array(this.getSize());
-        var view = new DataView(result.buffer);
-        view.setUint32(0, this.getSize() - 4);
-        result.set(this.data, 4);
-        return result;
-    };
-    return NALU;
-}());
-exports.default = NALU;
+      }
+    } else if (!addToFront) {
+      state.reading = false;
+    }
+  }
 
-},{}],6:[function(require,module,exports){
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var BitStream = (function () {
-    function BitStream(data) {
-        this.data = data;
-        this.index = 0;
-        this.bitLength = data.byteLength * 8;
+  return needMoreData(state);
+}
+
+function addChunk(stream, state, chunk, addToFront) {
+  if (state.flowing && state.length === 0 && !state.sync) {
+    stream.emit('data', chunk);
+    stream.read(0);
+  } else {
+    // update the buffer info.
+    state.length += state.objectMode ? 1 : chunk.length;
+    if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
+
+    if (state.needReadable) emitReadable(stream);
+  }
+  maybeReadMore(stream, state);
+}
+
+function chunkInvalid(state, chunk) {
+  var er;
+  if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
+    er = new TypeError('Invalid non-string/buffer chunk');
+  }
+  return er;
+}
+
+// if it's past the high water mark, we can push in some more.
+// Also, if we have no data yet, we can stand some
+// more bytes.  This is to work around cases where hwm=0,
+// such as the repl.  Also, if the push() triggered a
+// readable event, and the user called read(largeNumber) such that
+// needReadable was set, then we ought to push more, so that another
+// 'readable' event will be triggered.
+function needMoreData(state) {
+  return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
+}
+
+Readable.prototype.isPaused = function () {
+  return this._readableState.flowing === false;
+};
+
+// backwards compatibility.
+Readable.prototype.setEncoding = function (enc) {
+  if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
+  this._readableState.decoder = new StringDecoder(enc);
+  this._readableState.encoding = enc;
+  return this;
+};
+
+// Don't raise the hwm > 8MB
+var MAX_HWM = 0x800000;
+function computeNewHighWaterMark(n) {
+  if (n >= MAX_HWM) {
+    n = MAX_HWM;
+  } else {
+    // Get the next highest power of 2 to prevent increasing hwm excessively in
+    // tiny amounts
+    n--;
+    n |= n >>> 1;
+    n |= n >>> 2;
+    n |= n >>> 4;
+    n |= n >>> 8;
+    n |= n >>> 16;
+    n++;
+  }
+  return n;
+}
+
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function howMuchToRead(n, state) {
+  if (n <= 0 || state.length === 0 && state.ended) return 0;
+  if (state.objectMode) return 1;
+  if (n !== n) {
+    // Only flow one buffer at a time
+    if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
+  }
+  // If we're asking for more than the current hwm, then raise the hwm.
+  if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
+  if (n <= state.length) return n;
+  // Don't have enough
+  if (!state.ended) {
+    state.needReadable = true;
+    return 0;
+  }
+  return state.length;
+}
+
+// you can override either this method, or the async _read(n) below.
+Readable.prototype.read = function (n) {
+  debug('read', n);
+  n = parseInt(n, 10);
+  var state = this._readableState;
+  var nOrig = n;
+
+  if (n !== 0) state.emittedReadable = false;
+
+  // if we're doing read(0) to trigger a readable event, but we
+  // already have a bunch of data in the buffer, then just trigger
+  // the 'readable' event and move on.
+  if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
+    debug('read: emitReadable', state.length, state.ended);
+    if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
+    return null;
+  }
+
+  n = howMuchToRead(n, state);
+
+  // if we've ended, and we're now clear, then finish it up.
+  if (n === 0 && state.ended) {
+    if (state.length === 0) endReadable(this);
+    return null;
+  }
+
+  // All the actual chunk generation logic needs to be
+  // *below* the call to _read.  The reason is that in certain
+  // synthetic stream cases, such as passthrough streams, _read
+  // may be a completely synchronous operation which may change
+  // the state of the read buffer, providing enough data when
+  // before there was *not* enough.
+  //
+  // So, the steps are:
+  // 1. Figure out what the state of things will be after we do
+  // a read from the buffer.
+  //
+  // 2. If that resulting state will trigger a _read, then call _read.
+  // Note that this may be asynchronous, or synchronous.  Yes, it is
+  // deeply ugly to write APIs this way, but that still doesn't mean
+  // that the Readable class should behave improperly, as streams are
+  // designed to be sync/async agnostic.
+  // Take note if the _read call is sync or async (ie, if the read call
+  // has returned yet), so that we know whether or not it's safe to emit
+  // 'readable' etc.
+  //
+  // 3. Actually pull the requested chunks out of the buffer and return.
+
+  // if we need a readable event, then we need to do some reading.
+  var doRead = state.needReadable;
+  debug('need readable', doRead);
+
+  // if we currently have less than the highWaterMark, then also read some
+  if (state.length === 0 || state.length - n < state.highWaterMark) {
+    doRead = true;
+    debug('length less than watermark', doRead);
+  }
+
+  // however, if we've ended, then there's no point, and if we're already
+  // reading, then it's unnecessary.
+  if (state.ended || state.reading) {
+    doRead = false;
+    debug('reading or ended', doRead);
+  } else if (doRead) {
+    debug('do read');
+    state.reading = true;
+    state.sync = true;
+    // if the length is currently zero, then we *need* a readable event.
+    if (state.length === 0) state.needReadable = true;
+    // call internal read method
+    this._read(state.highWaterMark);
+    state.sync = false;
+    // If _read pushed data synchronously, then `reading` will be false,
+    // and we need to re-evaluate how much data we can return to the user.
+    if (!state.reading) n = howMuchToRead(nOrig, state);
+  }
+
+  var ret;
+  if (n > 0) ret = fromList(n, state);else ret = null;
+
+  if (ret === null) {
+    state.needReadable = true;
+    n = 0;
+  } else {
+    state.length -= n;
+  }
+
+  if (state.length === 0) {
+    // If we have nothing in the buffer, then we want to know
+    // as soon as we *do* get something into the buffer.
+    if (!state.ended) state.needReadable = true;
+
+    // If we tried to read() past the EOF, then emit end on the next tick.
+    if (nOrig !== n && state.ended) endReadable(this);
+  }
+
+  if (ret !== null) this.emit('data', ret);
+
+  return ret;
+};
+
+function onEofChunk(stream, state) {
+  if (state.ended) return;
+  if (state.decoder) {
+    var chunk = state.decoder.end();
+    if (chunk && chunk.length) {
+      state.buffer.push(chunk);
+      state.length += state.objectMode ? 1 : chunk.length;
     }
-    Object.defineProperty(BitStream.prototype, "bitsAvailable", {
-        get: function () {
-            return this.bitLength - this.index;
-        },
-        enumerable: true,
-        configurable: true
-    });
-    BitStream.prototype.skipBits = function (size) {
-        if (this.bitsAvailable < size) {
-            throw new Error('no bytes available');
-        }
-        this.index += size;
-    };
-    BitStream.prototype.readBits = function (size) {
-        var result = this.getBits(size, this.index);
-        return result;
-    };
-    BitStream.prototype.getBits = function (size, offsetBits, moveIndex) {
-        if (moveIndex === void 0) { moveIndex = true; }
-        if (this.bitsAvailable < size) {
-            throw new Error('no bytes available');
-        }
-        var offset = offsetBits % 8;
-        var byte = this.data[(offsetBits / 8) | 0] & (0xff >>> offset);
-        var bits = 8 - offset;
-        if (bits >= size) {
-            if (moveIndex) {
-                this.index += size;
-            }
-            return byte >> (bits - size);
-        }
-        else {
-            if (moveIndex) {
-                this.index += bits;
-            }
-            var nextSize = size - bits;
-            return (byte << nextSize) | this.getBits(nextSize, offsetBits + bits, moveIndex);
-        }
-    };
-    BitStream.prototype.skipLZ = function () {
-        var leadingZeroCount;
-        for (leadingZeroCount = 0; leadingZeroCount < this.bitLength - this.index; ++leadingZeroCount) {
-            if (0 !== this.getBits(1, this.index + leadingZeroCount, false)) {
-                this.index += leadingZeroCount;
-                return leadingZeroCount;
-            }
-        }
-        return leadingZeroCount;
-    };
-    BitStream.prototype.skipUEG = function () {
-        this.skipBits(1 + this.skipLZ());
-    };
-    BitStream.prototype.skipEG = function () {
-        this.skipBits(1 + this.skipLZ());
-    };
-    BitStream.prototype.readUEG = function () {
-        var prefix = this.skipLZ();
-        return this.readBits(prefix + 1) - 1;
-    };
-    BitStream.prototype.readEG = function () {
-        var value = this.readUEG();
-        if (0x01 & value) {
-            return (1 + value) >>> 1;
-        }
-        else {
-            return -1 * (value >>> 1);
-        }
-    };
-    BitStream.prototype.readBoolean = function () {
-        return 1 === this.readBits(1);
-    };
-    BitStream.prototype.readUByte = function () {
-        return this.readBits(8);
-    };
-    BitStream.prototype.readUShort = function () {
-        return this.readBits(16);
-    };
-    BitStream.prototype.readUInt = function () {
-        return this.readBits(32);
-    };
-    return BitStream;
-}());
-exports.default = BitStream;
+  }
+  state.ended = true;
 
-},{}],7:[function(require,module,exports){
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var logger;
-var errorLogger;
-function setLogger(log, error) {
-    logger = log;
-    errorLogger = error != null ? error : log;
+  // emit 'readable' now to make sure it gets picked up.
+  emitReadable(stream);
 }
-exports.setLogger = setLogger;
-function isEnable() {
-    return logger != null;
+
+// Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow.  This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+function emitReadable(stream) {
+  var state = stream._readableState;
+  state.needReadable = false;
+  if (!state.emittedReadable) {
+    debug('emitReadable', state.flowing);
+    state.emittedReadable = true;
+    if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
+  }
 }
-exports.isEnable = isEnable;
-function log(message) {
-    var optionalParams = [];
-    for (var _i = 1; _i < arguments.length; _i++) {
-        optionalParams[_i - 1] = arguments[_i];
-    }
-    if (logger) {
-        logger.apply(void 0, [message].concat(optionalParams));
-    }
+
+function emitReadable_(stream) {
+  debug('emit readable');
+  stream.emit('readable');
+  flow(stream);
 }
-exports.log = log;
-function error(message) {
-    var optionalParams = [];
-    for (var _i = 1; _i < arguments.length; _i++) {
-        optionalParams[_i - 1] = arguments[_i];
-    }
-    if (errorLogger) {
-        errorLogger.apply(void 0, [message].concat(optionalParams));
-    }
+
+// at this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data.  that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+function maybeReadMore(stream, state) {
+  if (!state.readingMore) {
+    state.readingMore = true;
+    pna.nextTick(maybeReadMore_, stream, state);
+  }
 }
-exports.error = error;
 
-},{}],8:[function(require,module,exports){
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var NALU_1 = require("./NALU");
-var VideoStreamBuffer = (function () {
-    function VideoStreamBuffer() {
-    }
-    VideoStreamBuffer.prototype.clear = function () {
-        this.buffer = undefined;
-    };
-    VideoStreamBuffer.prototype.append = function (value) {
-        var nextNalHeader = function (b) {
-            var i = 3;
-            return function () {
-                var count = 0;
-                for (; i < b.length; i++) {
-                    switch (b[i]) {
-                        case 0:
-                            count++;
-                            break;
-                        case 1:
-                            if (count === 3) {
-                                return i - 3;
-                            }
-                        default:
-                            count = 0;
-                    }
-                }
-                return;
-            };
-        };
-        var result = [];
-        var buffer;
-        if (this.buffer) {
-            if (value[3] === 1 && value[2] === 0 && value[1] === 0 && value[0] === 0) {
-                result.push(new NALU_1.default(this.buffer.subarray(4)));
-                buffer = Uint8Array.from(value);
-            }
-        }
-        if (buffer == null) {
-            buffer = this.mergeBuffer(value);
-        }
-        var lastIndex = 0;
-        var f = nextNalHeader(buffer);
-        for (var index = f(); index != null; index = f()) {
-            result.push(new NALU_1.default(buffer.subarray(lastIndex + 4, index)));
-            lastIndex = index;
-        }
-        this.buffer = buffer.subarray(lastIndex);
-        return result;
-    };
-    VideoStreamBuffer.prototype.mergeBuffer = function (value) {
-        if (this.buffer == null) {
-            return Uint8Array.from(value);
-        }
-        else {
-            var newBuffer = new Uint8Array(this.buffer.byteLength + value.length);
-            if (this.buffer.byteLength > 0) {
-                newBuffer.set(this.buffer, 0);
-            }
-            newBuffer.set(value, this.buffer.byteLength);
-            return newBuffer;
-        }
-    };
-    return VideoStreamBuffer;
-}());
-exports.default = VideoStreamBuffer;
+function maybeReadMore_(stream, state) {
+  var len = state.length;
+  while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
+    debug('maybeReadMore read 0');
+    stream.read(0);
+    if (len === state.length)
+      // didn't get any data, stop spinning.
+      break;else len = state.length;
+  }
+  state.readingMore = false;
+}
+
+// abstract method.  to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+Readable.prototype._read = function (n) {
+  this.emit('error', new Error('_read() is not implemented'));
+};
+
+Readable.prototype.pipe = function (dest, pipeOpts) {
+  var src = this;
+  var state = this._readableState;
+
+  switch (state.pipesCount) {
+    case 0:
+      state.pipes = dest;
+      break;
+    case 1:
+      state.pipes = [state.pipes, dest];
+      break;
+    default:
+      state.pipes.push(dest);
+      break;
+  }
+  state.pipesCount += 1;
+  debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
+
+  var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
+
+  var endFn = doEnd ? onend : unpipe;
+  if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
 
-},{"./NALU":5}],9:[function(require,module,exports){
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-var h264_remuxer_1 = require("./h264-remuxer");
-var mp4_generator_1 = require("./mp4-generator");
-var debug = require("./util/debug");
-var nalu_stream_buffer_1 = require("./util/nalu-stream-buffer");
-exports.mimeType = 'video/mp4; codecs="avc1.42E01E"';
-var VideoConverter = (function () {
-    function VideoConverter(element, fps, fpf) {
-        if (fps === void 0) { fps = 60; }
-        if (fpf === void 0) { fpf = fps; }
-        this.element = element;
-        this.fps = fps;
-        this.fpf = fpf;
-        this.receiveBuffer = new nalu_stream_buffer_1.default();
-        this.queue = [];
-        if (!MediaSource || !MediaSource.isTypeSupported(exports.mimeType)) {
-            throw new Error("Your browser is not supported: " + exports.mimeType);
-        }
-        this.reset();
+  dest.on('unpipe', onunpipe);
+  function onunpipe(readable, unpipeInfo) {
+    debug('onunpipe');
+    if (readable === src) {
+      if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
+        unpipeInfo.hasUnpiped = true;
+        cleanup();
+      }
     }
-    Object.defineProperty(VideoConverter, "errorNotes", {
-        get: function () {
-            return _a = {},
-                _a[MediaError.MEDIA_ERR_ABORTED] = 'fetching process aborted by user',
-                _a[MediaError.MEDIA_ERR_NETWORK] = 'error occurred when downloading',
-                _a[MediaError.MEDIA_ERR_DECODE] = 'error occurred when decoding',
-                _a[MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED] = 'audio/video not supported',
-                _a;
-            var _a;
-        },
-        enumerable: true,
-        configurable: true
-    });
-    VideoConverter.prototype.setup = function () {
-        var _this = this;
-        this.mediaReadyPromise = new Promise(function (resolve, _reject) {
-            _this.mediaSource.addEventListener('sourceopen', function () {
-                debug.log("Media Source opened.");
-                _this.sourceBuffer = _this.mediaSource.addSourceBuffer(exports.mimeType);
-                _this.sourceBuffer.addEventListener('updateend', function () {
-                    debug.log("  SourceBuffer updateend");
-                    debug.log("    sourceBuffer.buffered.length=" + _this.sourceBuffer.buffered.length);
-                    for (var i = 0, len = _this.sourceBuffer.buffered.length; i < len; i++) {
-                        debug.log("    sourceBuffer.buffered [" + i + "]: " +
-                            (_this.sourceBuffer.buffered.start(i) + ", " + _this.sourceBuffer.buffered.end(i)));
-                    }
-                    debug.log("  mediasource.duration=" + _this.mediaSource.duration);
-                    debug.log("  mediasource.readyState=" + _this.mediaSource.readyState);
-                    debug.log("  video.duration=" + _this.element.duration);
-                    debug.log("    video.buffered.length=" + _this.element.buffered.length);
-                    if (debug.isEnable()) {
-                        for (var i = 0, len = _this.element.buffered.length; i < len; i++) {
-                            debug.log("    video.buffered [" + i + "]: " + _this.element.buffered.start(i) + ", " + _this.element.buffered.end(i));
-                        }
-                    }
-                    debug.log("  video.currentTime=" + _this.element.currentTime);
-                    debug.log("  video.readyState=" + _this.element.readyState);
-                    var data = _this.queue.shift();
-                    if (data) {
-                        _this.writeBuffer(data);
-                    }
-                });
-                _this.sourceBuffer.addEventListener('error', function () {
-                    debug.error('  SourceBuffer errored!');
-                });
-                _this.mediaReady = true;
-                resolve();
-            }, false);
-            _this.mediaSource.addEventListener('sourceclose', function () {
-                debug.log("Media Source closed.");
-                _this.mediaReady = false;
-            }, false);
-            _this.element.src = URL.createObjectURL(_this.mediaSource);
-        });
-        return this.mediaReadyPromise;
-    };
-    VideoConverter.prototype.play = function () {
-        var _this = this;
-        if (!this.element.paused) {
-            return;
-        }
-        if (this.mediaReady && this.element.readyState >= 2) {
-            this.element.play();
-        }
-        else {
-            var handler_1 = function () {
-                _this.play();
-                _this.element.removeEventListener('canplaythrough', handler_1);
-            };
-            this.element.addEventListener('canplaythrough', handler_1);
-        }
-    };
-    VideoConverter.prototype.pause = function () {
-        if (this.element.paused) {
-            return;
-        }
-        this.element.pause();
-    };
-    VideoConverter.prototype.reset = function () {
-        this.receiveBuffer.clear();
-        if (this.mediaSource && this.mediaSource.readyState === 'open') {
-            this.mediaSource.duration = 0;
-            this.mediaSource.endOfStream();
-        }
-        this.mediaSource = new MediaSource();
-        this.remuxer = new h264_remuxer_1.default(this.fps, this.fpf, this.fps * 60);
-        this.mediaReady = false;
-        this.mediaReadyPromise = undefined;
-        this.queue = [];
-        this.isFirstFrame = true;
-        this.setup();
-    };
-    VideoConverter.prototype.appendRawData = function (data) {
-        var nalus = this.receiveBuffer.append(data);
-        for (var _i = 0, nalus_1 = nalus; _i < nalus_1.length; _i++) {
-            var nalu = nalus_1[_i];
-            var ret = this.remuxer.remux(nalu);
-            if (ret) {
-                this.writeFragment(ret[0], ret[1]);
-            }
-        }
-    };
-    VideoConverter.prototype.writeFragment = function (dts, pay) {
-        var remuxer = this.remuxer;
-        if (remuxer.mp4track.isKeyFrame) {
-            this.writeBuffer(mp4_generator_1.default.initSegment([remuxer.mp4track], Infinity, remuxer.timescale));
-        }
-        if (pay && pay.byteLength) {
-            debug.log(" Put fragment: " + remuxer.seqNum + ", frames=" + remuxer.mp4track.samples.length + ", size=" + pay.byteLength);
-            var fragment = mp4_generator_1.default.fragmentSegment(remuxer.seqNum, dts, remuxer.mp4track, pay);
-            this.writeBuffer(fragment);
-            remuxer.flush();
-        }
-        else {
-            debug.error("Nothing payload!");
-        }
-    };
-    VideoConverter.prototype.writeBuffer = function (data) {
-        var _this = this;
-        if (this.mediaReady) {
-            if (this.sourceBuffer.updating) {
-                this.queue.push(data);
-            }
-            else {
-                this.doAppend(data);
-            }
-        }
-        else {
-            this.queue.push(data);
-            if (this.mediaReadyPromise) {
-                this.mediaReadyPromise.then(function () {
-                    if (!_this.sourceBuffer.updating) {
-                        var d = _this.queue.shift();
-                        if (d) {
-                            _this.writeBuffer(d);
-                        }
-                    }
-                });
-                this.mediaReadyPromise = undefined;
-            }
-        }
-    };
-    VideoConverter.prototype.doAppend = function (data) {
-        var error = this.element.error;
-        if (error) {
-            debug.error("MSE Error Occured: " + VideoConverter.errorNotes[error.code]);
-            this.element.pause();
-            if (this.mediaSource.readyState === 'open') {
-                this.mediaSource.endOfStream();
-            }
-        }
-        else {
-            try {
-                this.sourceBuffer.appendBuffer(data);
-                debug.log("  appended buffer: size=" + data.byteLength);
-            }
-            catch (err) {
-                debug.error("MSE Error occured while appending buffer. " + err.name + ": " + err.message);
-            }
-        }
-    };
-    return VideoConverter;
-}());
-exports.default = VideoConverter;
+  }
 
-},{"./h264-remuxer":3,"./mp4-generator":4,"./util/debug":7,"./util/nalu-stream-buffer":8}],10:[function(require,module,exports){
-'use strict'
-var DuplexStream = require('readable-stream').Duplex
-  , util         = require('util')
-  , Buffer       = require('safe-buffer').Buffer
+  function onend() {
+    debug('onend');
+    dest.end();
+  }
+
+  // when the dest drains, it reduces the awaitDrain counter
+  // on the source.  This would be more elegant with a .once()
+  // handler in flow(), but adding and removing repeatedly is
+  // too slow.
+  var ondrain = pipeOnDrain(src);
+  dest.on('drain', ondrain);
+
+  var cleanedUp = false;
+  function cleanup() {
+    debug('cleanup');
+    // cleanup event handlers once the pipe is broken
+    dest.removeListener('close', onclose);
+    dest.removeListener('finish', onfinish);
+    dest.removeListener('drain', ondrain);
+    dest.removeListener('error', onerror);
+    dest.removeListener('unpipe', onunpipe);
+    src.removeListener('end', onend);
+    src.removeListener('end', unpipe);
+    src.removeListener('data', ondata);
+
+    cleanedUp = true;
+
+    // if the reader is waiting for a drain event from this
+    // specific writer, then it would cause it to never start
+    // flowing again.
+    // So, if this is awaiting a drain, then we just call it now.
+    // If we don't know, then assume that we are waiting for one.
+    if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
+  }
+
+  // If the user pushes more data while we're writing to dest then we'll end up
+  // in ondata again. However, we only want to increase awaitDrain once because
+  // dest will only emit one 'drain' event for the multiple writes.
+  // => Introduce a guard on increasing awaitDrain.
+  var increasedAwaitDrain = false;
+  src.on('data', ondata);
+  function ondata(chunk) {
+    debug('ondata');
+    increasedAwaitDrain = false;
+    var ret = dest.write(chunk);
+    if (false === ret && !increasedAwaitDrain) {
+      // If the user unpiped during `dest.write()`, it is possible
+      // to get stuck in a permanently paused state if that write
+      // also returned false.
+      // => Check whether `dest` is still a piping destination.
+      if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
+        debug('false write response, pause', src._readableState.awaitDrain);
+        src._readableState.awaitDrain++;
+        increasedAwaitDrain = true;
+      }
+      src.pause();
+    }
+  }
 
-function BufferList (callback) {
-  if (!(this instanceof BufferList))
-    return new BufferList(callback)
+  // if the dest has an error, then stop piping into it.
+  // however, don't suppress the throwing behavior for this.
+  function onerror(er) {
+    debug('onerror', er);
+    unpipe();
+    dest.removeListener('error', onerror);
+    if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
+  }
 
-  this._bufs  = []
-  this.length = 0
+  // Make sure our error handler is attached before userland ones.
+  prependListener(dest, 'error', onerror);
 
-  if (typeof callback == 'function') {
-    this._callback = callback
+  // Both close and finish should trigger unpipe, but only once.
+  function onclose() {
+    dest.removeListener('finish', onfinish);
+    unpipe();
+  }
+  dest.once('close', onclose);
+  function onfinish() {
+    debug('onfinish');
+    dest.removeListener('close', onclose);
+    unpipe();
+  }
+  dest.once('finish', onfinish);
 
-    var piper = function piper (err) {
-      if (this._callback) {
-        this._callback(err)
-        this._callback = null
-      }
-    }.bind(this)
+  function unpipe() {
+    debug('unpipe');
+    src.unpipe(dest);
+  }
 
-    this.on('pipe', function onPipe (src) {
-      src.on('error', piper)
-    })
-    this.on('unpipe', function onUnpipe (src) {
-      src.removeListener('error', piper)
-    })
-  } else {
-    this.append(callback)
+  // tell the dest that it's being piped to
+  dest.emit('pipe', src);
+
+  // start the flow if it hasn't been started already.
+  if (!state.flowing) {
+    debug('pipe resume');
+    src.resume();
   }
 
-  DuplexStream.call(this)
+  return dest;
+};
+
+function pipeOnDrain(src) {
+  return function () {
+    var state = src._readableState;
+    debug('pipeOnDrain', state.awaitDrain);
+    if (state.awaitDrain) state.awaitDrain--;
+    if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
+      state.flowing = true;
+      flow(src);
+    }
+  };
 }
 
+Readable.prototype.unpipe = function (dest) {
+  var state = this._readableState;
+  var unpipeInfo = { hasUnpiped: false };
 
-util.inherits(BufferList, DuplexStream)
+  // if we're not piping anywhere, then do nothing.
+  if (state.pipesCount === 0) return this;
 
+  // just one destination.  most common case.
+  if (state.pipesCount === 1) {
+    // passed in one, but it's not the right one.
+    if (dest && dest !== state.pipes) return this;
 
-BufferList.prototype._offset = function _offset (offset) {
-  var tot = 0, i = 0, _t
-  if (offset === 0) return [ 0, 0 ]
-  for (; i < this._bufs.length; i++) {
-    _t = tot + this._bufs[i].length
-    if (offset < _t || i == this._bufs.length - 1) {
-      return [ i, offset - tot ]
+    if (!dest) dest = state.pipes;
+
+    // got a match.
+    state.pipes = null;
+    state.pipesCount = 0;
+    state.flowing = false;
+    if (dest) dest.emit('unpipe', this, unpipeInfo);
+    return this;
+  }
+
+  // slow case. multiple pipe destinations.
+
+  if (!dest) {
+    // remove all.
+    var dests = state.pipes;
+    var len = state.pipesCount;
+    state.pipes = null;
+    state.pipesCount = 0;
+    state.flowing = false;
+
+    for (var i = 0; i < len; i++) {
+      dests[i].emit('unpipe', this, unpipeInfo);
+    }return this;
+  }
+
+  // try to find the right one.
+  var index = indexOf(state.pipes, dest);
+  if (index === -1) return this;
+
+  state.pipes.splice(index, 1);
+  state.pipesCount -= 1;
+  if (state.pipesCount === 1) state.pipes = state.pipes[0];
+
+  dest.emit('unpipe', this, unpipeInfo);
+
+  return this;
+};
+
+// set up data events if they are asked for
+// Ensure readable listeners eventually get something
+Readable.prototype.on = function (ev, fn) {
+  var res = Stream.prototype.on.call(this, ev, fn);
+
+  if (ev === 'data') {
+    // Start flowing on next tick if stream isn't explicitly paused
+    if (this._readableState.flowing !== false) this.resume();
+  } else if (ev === 'readable') {
+    var state = this._readableState;
+    if (!state.endEmitted && !state.readableListening) {
+      state.readableListening = state.needReadable = true;
+      state.emittedReadable = false;
+      if (!state.reading) {
+        pna.nextTick(nReadingNextTick, this);
+      } else if (state.length) {
+        emitReadable(this);
+      }
     }
-    tot = _t
   }
+
+  return res;
+};
+Readable.prototype.addListener = Readable.prototype.on;
+
+function nReadingNextTick(self) {
+  debug('readable nexttick read 0');
+  self.read(0);
 }
 
-BufferList.prototype._reverseOffset = function (blOffset) {
-  var bufferId = blOffset[0]
-  var offset = blOffset[1]
-  for (var i = 0; i < bufferId; i++) {
-    offset += this._bufs[i].length
+// pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+Readable.prototype.resume = function () {
+  var state = this._readableState;
+  if (!state.flowing) {
+    debug('resume');
+    state.flowing = true;
+    resume(this, state);
+  }
+  return this;
+};
+
+function resume(stream, state) {
+  if (!state.resumeScheduled) {
+    state.resumeScheduled = true;
+    pna.nextTick(resume_, stream, state);
   }
-  return offset
 }
 
-BufferList.prototype.append = function append (buf) {
-  var i = 0
+function resume_(stream, state) {
+  if (!state.reading) {
+    debug('resume read 0');
+    stream.read(0);
+  }
 
-  if (Buffer.isBuffer(buf)) {
-    this._appendBuffer(buf)
-  } else if (Array.isArray(buf)) {
-    for (; i < buf.length; i++)
-      this.append(buf[i])
-  } else if (buf instanceof BufferList) {
-    // unwrap argument into individual BufferLists
-    for (; i < buf._bufs.length; i++)
-      this.append(buf._bufs[i])
-  } else if (buf != null) {
-    // coerce number arguments to strings, since Buffer(number) does
-    // uninitialized memory allocation
-    if (typeof buf == 'number')
-      buf = buf.toString()
+  state.resumeScheduled = false;
+  state.awaitDrain = 0;
+  stream.emit('resume');
+  flow(stream);
+  if (state.flowing && !state.reading) stream.read(0);
+}
 
-    this._appendBuffer(Buffer.from(buf))
+Readable.prototype.pause = function () {
+  debug('call pause flowing=%j', this._readableState.flowing);
+  if (false !== this._readableState.flowing) {
+    debug('pause');
+    this._readableState.flowing = false;
+    this.emit('pause');
+  }
+  return this;
+};
+
+function flow(stream) {
+  var state = stream._readableState;
+  debug('flow', state.flowing);
+  while (state.flowing && stream.read() !== null) {}
+}
+
+// wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+Readable.prototype.wrap = function (stream) {
+  var _this = this;
+
+  var state = this._readableState;
+  var paused = false;
+
+  stream.on('end', function () {
+    debug('wrapped end');
+    if (state.decoder && !state.ended) {
+      var chunk = state.decoder.end();
+      if (chunk && chunk.length) _this.push(chunk);
+    }
+
+    _this.push(null);
+  });
+
+  stream.on('data', function (chunk) {
+    debug('wrapped data');
+    if (state.decoder) chunk = state.decoder.write(chunk);
+
+    // don't skip over falsy values in objectMode
+    if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
+
+    var ret = _this.push(chunk);
+    if (!ret) {
+      paused = true;
+      stream.pause();
+    }
+  });
+
+  // proxy all the other methods.
+  // important when wrapping filters and duplexes.
+  for (var i in stream) {
+    if (this[i] === undefined && typeof stream[i] === 'function') {
+      this[i] = function (method) {
+        return function () {
+          return stream[method].apply(stream, arguments);
+        };
+      }(i);
+    }
   }
 
-  return this
-}
-
+  // proxy certain important events.
+  for (var n = 0; n < kProxyEvents.length; n++) {
+    stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
+  }
 
-BufferList.prototype._appendBuffer = function appendBuffer (buf) {
-  this._bufs.push(buf)
-  this.length += buf.length
-}
+  // when we try to consume some more bytes, simply unpause the
+  // underlying stream.
+  this._read = function (n) {
+    debug('wrapped _read', n);
+    if (paused) {
+      paused = false;
+      stream.resume();
+    }
+  };
 
+  return this;
+};
 
-BufferList.prototype._write = function _write (buf, encoding, callback) {
-  this._appendBuffer(buf)
+Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
+  // making it explicit this property is not enumerable
+  // because otherwise some prototype manipulation in
+  // userland will fail
+  enumerable: false,
+  get: function () {
+    return this._readableState.highWaterMark;
+  }
+});
 
-  if (typeof callback == 'function')
-    callback()
-}
+// exposed for testing purposes only.
+Readable._fromList = fromList;
 
+// Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromList(n, state) {
+  // nothing buffered
+  if (state.length === 0) return null;
 
-BufferList.prototype._read = function _read (size) {
-  if (!this.length)
-    return this.push(null)
+  var ret;
+  if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
+    // read it all, truncate the list
+    if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
+    state.buffer.clear();
+  } else {
+    // read part of list
+    ret = fromListPartial(n, state.buffer, state.decoder);
+  }
 
-  size = Math.min(size, this.length)
-  this.push(this.slice(0, size))
-  this.consume(size)
+  return ret;
 }
 
-
-BufferList.prototype.end = function end (chunk) {
-  DuplexStream.prototype.end.call(this, chunk)
-
-  if (this._callback) {
-    this._callback(null, this.slice())
-    this._callback = null
+// Extracts only enough buffered data to satisfy the amount requested.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromListPartial(n, list, hasStrings) {
+  var ret;
+  if (n < list.head.data.length) {
+    // slice is the same for buffers and strings
+    ret = list.head.data.slice(0, n);
+    list.head.data = list.head.data.slice(n);
+  } else if (n === list.head.data.length) {
+    // first chunk is a perfect match
+    ret = list.shift();
+  } else {
+    // result spans more than one buffer
+    ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
   }
+  return ret;
 }
 
-
-BufferList.prototype.get = function get (index) {
-  if (index > this.length || index < 0) {
-    return undefined
+// Copies a specified amount of characters from the list of buffered data
+// chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBufferString(n, list) {
+  var p = list.head;
+  var c = 1;
+  var ret = p.data;
+  n -= ret.length;
+  while (p = p.next) {
+    var str = p.data;
+    var nb = n > str.length ? str.length : n;
+    if (nb === str.length) ret += str;else ret += str.slice(0, n);
+    n -= nb;
+    if (n === 0) {
+      if (nb === str.length) {
+        ++c;
+        if (p.next) list.head = p.next;else list.head = list.tail = null;
+      } else {
+        list.head = p;
+        p.data = str.slice(nb);
+      }
+      break;
+    }
+    ++c;
   }
-  var offset = this._offset(index)
-  return this._bufs[offset[0]][offset[1]]
+  list.length -= c;
+  return ret;
 }
 
-
-BufferList.prototype.slice = function slice (start, end) {
-  if (typeof start == 'number' && start < 0)
-    start += this.length
-  if (typeof end == 'number' && end < 0)
-    end += this.length
-  return this.copy(null, 0, start, end)
+// Copies a specified amount of bytes from the list of buffered data chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBuffer(n, list) {
+  var ret = Buffer.allocUnsafe(n);
+  var p = list.head;
+  var c = 1;
+  p.data.copy(ret);
+  n -= p.data.length;
+  while (p = p.next) {
+    var buf = p.data;
+    var nb = n > buf.length ? buf.length : n;
+    buf.copy(ret, ret.length - n, 0, nb);
+    n -= nb;
+    if (n === 0) {
+      if (nb === buf.length) {
+        ++c;
+        if (p.next) list.head = p.next;else list.head = list.tail = null;
+      } else {
+        list.head = p;
+        p.data = buf.slice(nb);
+      }
+      break;
+    }
+    ++c;
+  }
+  list.length -= c;
+  return ret;
 }
 
+function endReadable(stream) {
+  var state = stream._readableState;
 
-BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
-  if (typeof srcStart != 'number' || srcStart < 0)
-    srcStart = 0
-  if (typeof srcEnd != 'number' || srcEnd > this.length)
-    srcEnd = this.length
-  if (srcStart >= this.length)
-    return dst || Buffer.alloc(0)
-  if (srcEnd <= 0)
-    return dst || Buffer.alloc(0)
-
-  var copy   = !!dst
-    , off    = this._offset(srcStart)
-    , len    = srcEnd - srcStart
-    , bytes  = len
-    , bufoff = (copy && dstStart) || 0
-    , start  = off[1]
-    , l
-    , i
-
-  // copy/slice everything
-  if (srcStart === 0 && srcEnd == this.length) {
-    if (!copy) { // slice, but full concat if multiple buffers
-      return this._bufs.length === 1
-        ? this._bufs[0]
-        : Buffer.concat(this._bufs, this.length)
-    }
+  // If we get here before consuming all the bytes, then that is a
+  // bug in node.  Should never happen.
+  if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
 
-    // copy, need to copy individual buffers
-    for (i = 0; i < this._bufs.length; i++) {
-      this._bufs[i].copy(dst, bufoff)
-      bufoff += this._bufs[i].length
-    }
+  if (!state.endEmitted) {
+    state.ended = true;
+    pna.nextTick(endReadableNT, state, stream);
+  }
+}
 
-    return dst
+function endReadableNT(state, stream) {
+  // Check that we didn't get one last unshift.
+  if (!state.endEmitted && state.length === 0) {
+    state.endEmitted = true;
+    stream.readable = false;
+    stream.emit('end');
   }
+}
 
-  // easy, cheap case where it's a subset of one of the buffers
-  if (bytes <= this._bufs[off[0]].length - start) {
-    return copy
-      ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
-      : this._bufs[off[0]].slice(start, start + bytes)
+function indexOf(xs, x) {
+  for (var i = 0, l = xs.length; i < l; i++) {
+    if (xs[i] === x) return i;
   }
+  return -1;
+}
+}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
+},{"./_stream_duplex":10,"./internal/streams/BufferList":15,"./internal/streams/destroy":16,"./internal/streams/stream":17,"_process":70,"core-util-is":2,"events":66,"inherits":3,"isarray":4,"process-nextick-args":9,"safe-buffer":19,"string_decoder/":20,"util":64}],13:[function(require,module,exports){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
 
-  if (!copy) // a slice, we need something to copy in to
-    dst = Buffer.allocUnsafe(len)
+// a transform stream is a readable/writable stream where you do
+// something with the data.  Sometimes it's called a "filter",
+// but that's not a great name for it, since that implies a thing where
+// some bits pass through, and others are simply ignored.  (That would
+// be a valid example of a transform, of course.)
+//
+// While the output is causally related to the input, it's not a
+// necessarily symmetric or synchronous transformation.  For example,
+// a zlib stream might take multiple plain-text writes(), and then
+// emit a single compressed chunk some time in the future.
+//
+// Here's how this works:
+//
+// The Transform stream has all the aspects of the readable and writable
+// stream classes.  When you write(chunk), that calls _write(chunk,cb)
+// internally, and returns false if there's a lot of pending writes
+// buffered up.  When you call read(), that calls _read(n) until
+// there's enough pending readable data buffered up.
+//
+// In a transform stream, the written data is placed in a buffer.  When
+// _read(n) is called, it transforms the queued up data, calling the
+// buffered _write cb's as it consumes chunks.  If consuming a single
+// written chunk would result in multiple output chunks, then the first
+// outputted bit calls the readcb, and subsequent chunks just go into
+// the read buffer, and will cause it to emit 'readable' if necessary.
+//
+// This way, back-pressure is actually determined by the reading side,
+// since _read has to be called to start processing a new chunk.  However,
+// a pathological inflate type of transform can cause excessive buffering
+// here.  For example, imagine a stream where every byte of input is
+// interpreted as an integer from 0-255, and then results in that many
+// bytes of output.  Writing the 4 bytes {ff,ff,ff,ff} would result in
+// 1kb of data being output.  In this case, you could write a very small
+// amount of input, and end up with a very large amount of output.  In
+// such a pathological inflating mechanism, there'd be no way to tell
+// the system to stop doing the transform.  A single 4MB write could
+// cause the system to run out of memory.
+//
+// However, even in such a pathological case, only a single written chunk
+// would be consumed, and then the rest would wait (un-transformed) until
+// the results of the previous transformed chunk were consumed.
 
-  for (i = off[0]; i < this._bufs.length; i++) {
-    l = this._bufs[i].length - start
+'use strict';
 
-    if (bytes > l) {
-      this._bufs[i].copy(dst, bufoff, start)
-    } else {
-      this._bufs[i].copy(dst, bufoff, start, start + bytes)
-      break
-    }
+module.exports = Transform;
 
-    bufoff += l
-    bytes -= l
+var Duplex = require('./_stream_duplex');
 
-    if (start)
-      start = 0
-  }
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
 
-  return dst
-}
+util.inherits(Transform, Duplex);
 
-BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
-  start = start || 0
-  end = typeof end !== 'number' ? this.length : end
+function afterTransform(er, data) {
+  var ts = this._transformState;
+  ts.transforming = false;
 
-  if (start < 0)
-    start += this.length
-  if (end < 0)
-    end += this.length
+  var cb = ts.writecb;
 
-  if (start === end) {
-    return new BufferList()
+  if (!cb) {
+    return this.emit('error', new Error('write callback called multiple times'));
   }
-  var startOffset = this._offset(start)
-    , endOffset = this._offset(end)
-    , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
-
-  if (endOffset[1] == 0)
-    buffers.pop()
-  else
-    buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
 
-  if (startOffset[1] != 0)
-    buffers[0] = buffers[0].slice(startOffset[1])
+  ts.writechunk = null;
+  ts.writecb = null;
 
-  return new BufferList(buffers)
-}
+  if (data != null) // single equals check for both `null` and `undefined`
+    this.push(data);
 
-BufferList.prototype.toString = function toString (encoding, start, end) {
-  return this.slice(start, end).toString(encoding)
-}
+  cb(er);
 
-BufferList.prototype.consume = function consume (bytes) {
-  while (this._bufs.length) {
-    if (bytes >= this._bufs[0].length) {
-      bytes -= this._bufs[0].length
-      this.length -= this._bufs[0].length
-      this._bufs.shift()
-    } else {
-      this._bufs[0] = this._bufs[0].slice(bytes)
-      this.length -= bytes
-      break
-    }
+  var rs = this._readableState;
+  rs.reading = false;
+  if (rs.needReadable || rs.length < rs.highWaterMark) {
+    this._read(rs.highWaterMark);
   }
-  return this
 }
 
+function Transform(options) {
+  if (!(this instanceof Transform)) return new Transform(options);
 
-BufferList.prototype.duplicate = function duplicate () {
-  var i = 0
-    , copy = new BufferList()
-
-  for (; i < this._bufs.length; i++)
-    copy.append(this._bufs[i])
-
-  return copy
-}
+  Duplex.call(this, options);
 
+  this._transformState = {
+    afterTransform: afterTransform.bind(this),
+    needTransform: false,
+    transforming: false,
+    writecb: null,
+    writechunk: null,
+    writeencoding: null
+  };
 
-BufferList.prototype.destroy = function destroy () {
-  this._bufs.length = 0
-  this.length = 0
-  this.push(null)
-}
+  // start out asking for a readable event once data is transformed.
+  this._readableState.needReadable = true;
 
+  // we have implemented the _read method, and done the other things
+  // that Readable wants before the first _read call, so unset the
+  // sync guard flag.
+  this._readableState.sync = false;
 
-BufferList.prototype.indexOf = function (search, offset, encoding) {
-  if (encoding === undefined && typeof offset === 'string') {
-    encoding = offset
-    offset = undefined
-  }
-  if (typeof search === 'function' || Array.isArray(search)) {
-    throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
-  } else if (typeof search === 'number') {
-      search = Buffer.from([search])
-  } else if (typeof search === 'string') {
-    search = Buffer.from(search, encoding)
-  } else if (search instanceof BufferList) {
-    search = search.slice()
-  } else if (!Buffer.isBuffer(search)) {
-    search = Buffer.from(search)
-  }
+  if (options) {
+    if (typeof options.transform === 'function') this._transform = options.transform;
 
-  offset = Number(offset || 0)
-  if (isNaN(offset)) {
-    offset = 0
+    if (typeof options.flush === 'function') this._flush = options.flush;
   }
 
-  if (offset < 0) {
-    offset = this.length + offset
-  }
+  // When the writable side finishes, then flush out anything remaining.
+  this.on('prefinish', prefinish);
+}
 
-  if (offset < 0) {
-    offset = 0
-  }
+function prefinish() {
+  var _this = this;
 
-  if (search.length === 0) {
-    return offset > this.length ? this.length : offset
+  if (typeof this._flush === 'function') {
+    this._flush(function (er, data) {
+      done(_this, er, data);
+    });
+  } else {
+    done(this, null, null);
   }
+}
 
-  var blOffset = this._offset(offset)
-  var blIndex = blOffset[0] // index of which internal buffer we're working on
-  var buffOffset = blOffset[1] // offset of the internal buffer we're working on
+Transform.prototype.push = function (chunk, encoding) {
+  this._transformState.needTransform = false;
+  return Duplex.prototype.push.call(this, chunk, encoding);
+};
 
-  // scan over each buffer
-  for (blIndex; blIndex < this._bufs.length; blIndex++) {
-    var buff = this._bufs[blIndex]
-    while(buffOffset < buff.length) {
-      var availableWindow = buff.length - buffOffset
-      if (availableWindow >= search.length) {
-        var nativeSearchResult = buff.indexOf(search, buffOffset)
-        if (nativeSearchResult !== -1) {
-          return this._reverseOffset([blIndex, nativeSearchResult])
-        }
-        buffOffset = buff.length - search.length + 1 // end of native search window
-      } else {
-        var revOffset = this._reverseOffset([blIndex, buffOffset])
-        if (this._match(revOffset, search)) {
-          return revOffset
-        }
-        buffOffset++
-      }
-    }
-    buffOffset = 0
-  }
-  return -1
-}
+// This is the part where you do stuff!
+// override this function in implementation classes.
+// 'chunk' is an input chunk.
+//
+// Call `push(newChunk)` to pass along transformed output
+// to the readable side.  You may call 'push' zero or more times.
+//
+// Call `cb(err)` when you are done with this chunk.  If you pass
+// an error, then that'll put the hurt on the whole operation.  If you
+// never call cb(), then you'll never get another chunk.
+Transform.prototype._transform = function (chunk, encoding, cb) {
+  throw new Error('_transform() is not implemented');
+};
 
-BufferList.prototype._match = function(offset, search) {
-  if (this.length - offset < search.length) {
-    return false
-  }
-  for (var searchOffset = 0; searchOffset < search.length ; searchOffset++) {
-    if(this.get(offset + searchOffset) !== search[searchOffset]){
-      return false
-    }
+Transform.prototype._write = function (chunk, encoding, cb) {
+  var ts = this._transformState;
+  ts.writecb = cb;
+  ts.writechunk = chunk;
+  ts.writeencoding = encoding;
+  if (!ts.transforming) {
+    var rs = this._readableState;
+    if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
   }
-  return true
-}
+};
 
+// Doesn't matter what the args are here.
+// _transform does all the work.
+// That we got here means that the readable side wants more data.
+Transform.prototype._read = function (n) {
+  var ts = this._transformState;
 
-;(function () {
-  var methods = {
-      'readDoubleBE' : 8
-    , 'readDoubleLE' : 8
-    , 'readFloatBE'  : 4
-    , 'readFloatLE'  : 4
-    , 'readInt32BE'  : 4
-    , 'readInt32LE'  : 4
-    , 'readUInt32BE' : 4
-    , 'readUInt32LE' : 4
-    , 'readInt16BE'  : 2
-    , 'readInt16LE'  : 2
-    , 'readUInt16BE' : 2
-    , 'readUInt16LE' : 2
-    , 'readInt8'     : 1
-    , 'readUInt8'    : 1
-    , 'readIntBE'    : null
-    , 'readIntLE'    : null
-    , 'readUIntBE'   : null
-    , 'readUIntLE'   : null
+  if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
+    ts.transforming = true;
+    this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
+  } else {
+    // mark that we need a transform, so that any data that comes in
+    // will get processed, now that we've asked for it.
+    ts.needTransform = true;
   }
+};
 
-  for (var m in methods) {
-    (function (m) {
-      if (methods[m] === null) {
-        BufferList.prototype[m] = function (offset, byteLength) {
-          return this.slice(offset, offset + byteLength)[m](0, byteLength)
-        }
-      }
-      else {
-        BufferList.prototype[m] = function (offset) {
-          return this.slice(offset, offset + methods[m])[m](0)
-        }
-      }
-    }(m))
-  }
-}())
+Transform.prototype._destroy = function (err, cb) {
+  var _this2 = this;
 
+  Duplex.prototype._destroy.call(this, err, function (err2) {
+    cb(err2);
+    _this2.emit('close');
+  });
+};
 
-module.exports = BufferList
+function done(stream, er, data) {
+  if (er) return stream.emit('error', er);
 
-},{"readable-stream":27,"safe-buffer":28,"util":53}],11:[function(require,module,exports){
-(function (Buffer){
+  if (data != null) // single equals check for both `null` and `undefined`
+    stream.push(data);
+
+  // if there's nothing in the write buffer, then that means
+  // that nothing more will ever be provided
+  if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
+
+  if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
+
+  return stream.push(null);
+}
+},{"./_stream_duplex":10,"core-util-is":2,"inherits":3}],14:[function(require,module,exports){
+(function (process,global,setImmediate){
 // Copyright Joyent, Inc. and other Node contributors.
 //
 // Permission is hereby granted, free of charge, to any person obtaining a
@@ -1964,3844 +2976,5208 @@ module.exports = BufferList
 // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
 // USE OR OTHER DEALINGS IN THE SOFTWARE.
 
-// NOTE: These type checking functions intentionally don't use `instanceof`
-// because it is fragile and can be easily faked with `Object.create()`.
+// A bit simpler than readable streams.
+// Implement an async ._write(chunk, encoding, cb), and it'll handle all
+// the drain event emission and buffering.
 
-function isArray(arg) {
-  if (Array.isArray) {
-    return Array.isArray(arg);
-  }
-  return objectToString(arg) === '[object Array]';
-}
-exports.isArray = isArray;
+'use strict';
 
-function isBoolean(arg) {
-  return typeof arg === 'boolean';
-}
-exports.isBoolean = isBoolean;
+/*<replacement>*/
 
-function isNull(arg) {
-  return arg === null;
-}
-exports.isNull = isNull;
+var pna = require('process-nextick-args');
+/*</replacement>*/
 
-function isNullOrUndefined(arg) {
-  return arg == null;
-}
-exports.isNullOrUndefined = isNullOrUndefined;
+module.exports = Writable;
 
-function isNumber(arg) {
-  return typeof arg === 'number';
+/* <replacement> */
+function WriteReq(chunk, encoding, cb) {
+  this.chunk = chunk;
+  this.encoding = encoding;
+  this.callback = cb;
+  this.next = null;
 }
-exports.isNumber = isNumber;
 
-function isString(arg) {
-  return typeof arg === 'string';
-}
-exports.isString = isString;
+// It seems a linked list but it is not
+// there will be only 2 of these for each stream
+function CorkedRequest(state) {
+  var _this = this;
 
-function isSymbol(arg) {
-  return typeof arg === 'symbol';
+  this.next = null;
+  this.entry = null;
+  this.finish = function () {
+    onCorkedFinish(_this, state);
+  };
 }
-exports.isSymbol = isSymbol;
+/* </replacement> */
 
-function isUndefined(arg) {
-  return arg === void 0;
-}
-exports.isUndefined = isUndefined;
+/*<replacement>*/
+var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
+/*</replacement>*/
 
-function isRegExp(re) {
-  return objectToString(re) === '[object RegExp]';
-}
-exports.isRegExp = isRegExp;
+/*<replacement>*/
+var Duplex;
+/*</replacement>*/
 
-function isObject(arg) {
-  return typeof arg === 'object' && arg !== null;
-}
-exports.isObject = isObject;
+Writable.WritableState = WritableState;
 
-function isDate(d) {
-  return objectToString(d) === '[object Date]';
-}
-exports.isDate = isDate;
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
 
-function isError(e) {
-  return (objectToString(e) === '[object Error]' || e instanceof Error);
-}
-exports.isError = isError;
+/*<replacement>*/
+var internalUtil = {
+  deprecate: require('util-deprecate')
+};
+/*</replacement>*/
 
-function isFunction(arg) {
-  return typeof arg === 'function';
-}
-exports.isFunction = isFunction;
+/*<replacement>*/
+var Stream = require('./internal/streams/stream');
+/*</replacement>*/
 
-function isPrimitive(arg) {
-  return arg === null ||
-         typeof arg === 'boolean' ||
-         typeof arg === 'number' ||
-         typeof arg === 'string' ||
-         typeof arg === 'symbol' ||  // ES6 symbol
-         typeof arg === 'undefined';
+/*<replacement>*/
+
+var Buffer = require('safe-buffer').Buffer;
+var OurUint8Array = global.Uint8Array || function () {};
+function _uint8ArrayToBuffer(chunk) {
+  return Buffer.from(chunk);
+}
+function _isUint8Array(obj) {
+  return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
 }
-exports.isPrimitive = isPrimitive;
 
-exports.isBuffer = Buffer.isBuffer;
+/*</replacement>*/
 
-function objectToString(o) {
-  return Object.prototype.toString.call(o);
-}
+var destroyImpl = require('./internal/streams/destroy');
 
-}).call(this,{"isBuffer":require("../../../../../../../../../usr/local/lib/node_modules/browserify/node_modules/is-buffer/index.js")})
-},{"../../../../../../../../../usr/local/lib/node_modules/browserify/node_modules/is-buffer/index.js":47}],12:[function(require,module,exports){
-if (typeof Object.create === 'function') {
-  // implementation from standard node.js 'util' module
-  module.exports = function inherits(ctor, superCtor) {
-    ctor.super_ = superCtor
-    ctor.prototype = Object.create(superCtor.prototype, {
-      constructor: {
-        value: ctor,
-        enumerable: false,
-        writable: true,
-        configurable: true
-      }
-    });
-  };
-} else {
-  // old school shim for old browsers
-  module.exports = function inherits(ctor, superCtor) {
-    ctor.super_ = superCtor
-    var TempCtor = function () {}
-    TempCtor.prototype = superCtor.prototype
-    ctor.prototype = new TempCtor()
-    ctor.prototype.constructor = ctor
-  }
-}
+util.inherits(Writable, Stream);
+
+function nop() {}
+
+function WritableState(options, stream) {
+  Duplex = Duplex || require('./_stream_duplex');
+
+  options = options || {};
+
+  // Duplex streams are both readable and writable, but share
+  // the same options object.
+  // However, some cases require setting options to different
+  // values for the readable and the writable sides of the duplex stream.
+  // These options can be provided separately as readableXXX and writableXXX.
+  var isDuplex = stream instanceof Duplex;
+
+  // object stream flag to indicate whether or not this stream
+  // contains buffers or objects.
+  this.objectMode = !!options.objectMode;
+
+  if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
+
+  // the point at which write() starts returning false
+  // Note: 0 is a valid value, means that we always return false if
+  // the entire buffer is not flushed immediately on write()
+  var hwm = options.highWaterMark;
+  var writableHwm = options.writableHighWaterMark;
+  var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+
+  if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
+
+  // cast to ints.
+  this.highWaterMark = Math.floor(this.highWaterMark);
+
+  // if _final has been called
+  this.finalCalled = false;
+
+  // drain event flag.
+  this.needDrain = false;
+  // at the start of calling end()
+  this.ending = false;
+  // when end() has been called, and returned
+  this.ended = false;
+  // when 'finish' is emitted
+  this.finished = false;
+
+  // has it been destroyed
+  this.destroyed = false;
+
+  // should we decode strings into buffers before passing to _write?
+  // this is here so that some node-core streams can optimize string
+  // handling at a lower level.
+  var noDecode = options.decodeStrings === false;
+  this.decodeStrings = !noDecode;
+
+  // Crypto is kind of old and crusty.  Historically, its default string
+  // encoding is 'binary' so we have to make this configurable.
+  // Everything else in the universe uses 'utf8', though.
+  this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+  // not an actual buffer we keep track of, but a measurement
+  // of how much we're waiting to get pushed to some underlying
+  // socket or file.
+  this.length = 0;
+
+  // a flag to see when we're in the middle of a write.
+  this.writing = false;
 
-},{}],13:[function(require,module,exports){
-var toString = {}.toString;
+  // when true all writes will be buffered until .uncork() call
+  this.corked = 0;
 
-module.exports = Array.isArray || function (arr) {
-  return toString.call(arr) == '[object Array]';
-};
+  // a flag to be able to tell if the onwrite cb is called immediately,
+  // or on a later tick.  We set this to true at first, because any
+  // actions that shouldn't happen until "later" should generally also
+  // not happen before the first write call.
+  this.sync = true;
 
-},{}],14:[function(require,module,exports){
-'use strict'
+  // a flag to know if we're processing previously buffered items, which
+  // may call the _write() callback in the same tick, so that we don't
+  // end up in an overlapped onwrite situation.
+  this.bufferProcessing = false;
 
-var Buffer = require('safe-buffer').Buffer
-var assert = require('assert')
-var bl = require('bl')
-var streams = require('./lib/streams')
-var buildDecode = require('./lib/decoder')
-var buildEncode = require('./lib/encoder')
+  // the callback that's passed to _write(chunk,cb)
+  this.onwrite = function (er) {
+    onwrite(stream, er);
+  };
 
-function msgpack (options) {
-  var encodingTypes = []
-  var decodingTypes = []
+  // the callback that the user supplies to write(chunk,encoding,cb)
+  this.writecb = null;
 
-  options = options || {
-    forceFloat64: false,
-    compatibilityMode: false,
-    disableTimestampEncoding: false // if true, skips encoding Dates using the msgpack timestamp ext format (-1)
-  }
+  // the amount that is being written when _write is called.
+  this.writelen = 0;
 
-  function registerEncoder (check, encode) {
-    assert(check, 'must have an encode function')
-    assert(encode, 'must have an encode function')
+  this.bufferedRequest = null;
+  this.lastBufferedRequest = null;
 
-    encodingTypes.push({
-      check: check, encode: encode
-    })
+  // number of pending user-supplied write callbacks
+  // this must be 0 before 'finish' can be emitted
+  this.pendingcb = 0;
 
-    return this
-  }
+  // emit prefinish if the only thing we're waiting for is _write cbs
+  // This is relevant for synchronous Transform streams
+  this.prefinished = false;
 
-  function registerDecoder (type, decode) {
-    assert(type >= 0, 'must have a non-negative type')
-    assert(decode, 'must have a decode function')
+  // True if the error was already emitted and should not be thrown again
+  this.errorEmitted = false;
 
-    decodingTypes.push({
-      type: type, decode: decode
-    })
+  // count buffered requests
+  this.bufferedRequestCount = 0;
 
-    return this
+  // allocate the first CorkedRequest, there is always
+  // one allocated and free to use, and we maintain at most two
+  this.corkedRequestsFree = new CorkedRequest(this);
+}
+
+WritableState.prototype.getBuffer = function getBuffer() {
+  var current = this.bufferedRequest;
+  var out = [];
+  while (current) {
+    out.push(current);
+    current = current.next;
   }
+  return out;
+};
 
-  function register (type, constructor, encode, decode) {
-    assert(constructor, 'must have a constructor')
-    assert(encode, 'must have an encode function')
-    assert(type >= 0, 'must have a non-negative type')
-    assert(decode, 'must have a decode function')
+(function () {
+  try {
+    Object.defineProperty(WritableState.prototype, 'buffer', {
+      get: internalUtil.deprecate(function () {
+        return this.getBuffer();
+      }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
+    });
+  } catch (_) {}
+})();
 
-    function check (obj) {
-      return (obj instanceof constructor)
+// Test _writableState for inheritance to account for Duplex streams,
+// whose prototype chain only points to Readable.
+var realHasInstance;
+if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
+  realHasInstance = Function.prototype[Symbol.hasInstance];
+  Object.defineProperty(Writable, Symbol.hasInstance, {
+    value: function (object) {
+      if (realHasInstance.call(this, object)) return true;
+      if (this !== Writable) return false;
+
+      return object && object._writableState instanceof WritableState;
     }
+  });
+} else {
+  realHasInstance = function (object) {
+    return object instanceof this;
+  };
+}
 
-    function reEncode (obj) {
-      var buf = bl()
-      var header = Buffer.allocUnsafe(1)
+function Writable(options) {
+  Duplex = Duplex || require('./_stream_duplex');
 
-      header.writeInt8(type, 0)
+  // Writable ctor is applied to Duplexes, too.
+  // `realHasInstance` is necessary because using plain `instanceof`
+  // would return false, as no `_writableState` property is attached.
 
-      buf.append(header)
-      buf.append(encode(obj))
+  // Trying to use the custom `instanceof` for Writable here will also break the
+  // Node.js LazyTransform implementation, which has a non-trivial getter for
+  // `_writableState` that would lead to infinite recursion.
+  if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
+    return new Writable(options);
+  }
 
-      return buf
-    }
+  this._writableState = new WritableState(options, this);
 
-    this.registerEncoder(check, reEncode)
-    this.registerDecoder(type, decode)
+  // legacy.
+  this.writable = true;
 
-    return this
-  }
+  if (options) {
+    if (typeof options.write === 'function') this._write = options.write;
 
-  return {
-    encode: buildEncode(encodingTypes, options.forceFloat64, options.compatibilityMode, options.disableTimestampEncoding),
-    decode: buildDecode(decodingTypes),
-    register: register,
-    registerEncoder: registerEncoder,
-    registerDecoder: registerDecoder,
-    encoder: streams.encoder,
-    decoder: streams.decoder,
-    // needed for levelup support
-    buffer: true,
-    type: 'msgpack5',
-    IncompleteBufferError: buildDecode.IncompleteBufferError
+    if (typeof options.writev === 'function') this._writev = options.writev;
+
+    if (typeof options.destroy === 'function') this._destroy = options.destroy;
+
+    if (typeof options.final === 'function') this._final = options.final;
   }
+
+  Stream.call(this);
 }
 
-module.exports = msgpack
+// Otherwise people can pipe Writable streams, which is just wrong.
+Writable.prototype.pipe = function () {
+  this.emit('error', new Error('Cannot pipe, not readable'));
+};
 
-},{"./lib/decoder":15,"./lib/encoder":16,"./lib/streams":17,"assert":38,"bl":10,"safe-buffer":28}],15:[function(require,module,exports){
-'use strict'
+function writeAfterEnd(stream, cb) {
+  var er = new Error('write after end');
+  // TODO: defer error events consistently everywhere, not just the cb
+  stream.emit('error', er);
+  pna.nextTick(cb, er);
+}
 
-var bl = require('bl')
-var util = require('util')
+// Checks that a user-supplied chunk is valid, especially for the particular
+// mode the stream is in. Currently this means that `null` is never accepted
+// and undefined/non-string values are only allowed in object mode.
+function validChunk(stream, state, chunk, cb) {
+  var valid = true;
+  var er = false;
 
-function IncompleteBufferError (message) {
-  Error.call(this) // super constructor
-  if (Error.captureStackTrace) {
-    Error.captureStackTrace(this, this.constructor) // super helper method to include stack trace in error object
+  if (chunk === null) {
+    er = new TypeError('May not write null values to stream');
+  } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
+    er = new TypeError('Invalid non-string/buffer chunk');
   }
-  this.name = this.constructor.name
-  this.message = message || 'unable to decode'
+  if (er) {
+    stream.emit('error', er);
+    pna.nextTick(cb, er);
+    valid = false;
+  }
+  return valid;
 }
 
-util.inherits(IncompleteBufferError, Error)
-
-module.exports = function buildDecode (decodingTypes) {
-  return decode
-
-  function getSize (first) {
-    switch (first) {
-      case 0xc4:
-        return 2
-      case 0xc5:
-        return 3
-      case 0xc6:
-        return 5
-      case 0xc7:
-        return 3
-      case 0xc8:
-        return 4
-      case 0xc9:
-        return 6
-      case 0xca:
-        return 5
-      case 0xcb:
-        return 9
-      case 0xcc:
-        return 2
-      case 0xcd:
-        return 3
-      case 0xce:
-        return 5
-      case 0xcf:
-        return 9
-      case 0xd0:
-        return 2
-      case 0xd1:
-        return 3
-      case 0xd2:
-        return 5
-      case 0xd3:
-        return 9
-      case 0xd4:
-        return 3
-      case 0xd5:
-        return 4
-      case 0xd6:
-        return 6
-      case 0xd7:
-        return 10
-      case 0xd8:
-        return 18
-      case 0xd9:
-        return 2
-      case 0xda:
-        return 3
-      case 0xdb:
-        return 5
-      case 0xde:
-        return 3
-      default:
-        return -1
-    }
+Writable.prototype.write = function (chunk, encoding, cb) {
+  var state = this._writableState;
+  var ret = false;
+  var isBuf = !state.objectMode && _isUint8Array(chunk);
+
+  if (isBuf && !Buffer.isBuffer(chunk)) {
+    chunk = _uint8ArrayToBuffer(chunk);
   }
 
-  function hasMinBufferSize (first, length) {
-    var size = getSize(first)
+  if (typeof encoding === 'function') {
+    cb = encoding;
+    encoding = null;
+  }
 
-    if (size !== -1 && length < size) {
-      return false
-    } else {
-      return true
-    }
+  if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
+
+  if (typeof cb !== 'function') cb = nop;
+
+  if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
+    state.pendingcb++;
+    ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
   }
 
-  function isValidDataSize (dataLength, bufLength, headerLength) {
-    return bufLength >= headerLength + dataLength
+  return ret;
+};
+
+Writable.prototype.cork = function () {
+  var state = this._writableState;
+
+  state.corked++;
+};
+
+Writable.prototype.uncork = function () {
+  var state = this._writableState;
+
+  if (state.corked) {
+    state.corked--;
+
+    if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
   }
+};
 
-  function buildDecodeResult (value, bytesConsumed) {
-    return {
-      value: value,
-      bytesConsumed: bytesConsumed
-    }
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+  // node::ParseEncoding() requires lower case.
+  if (typeof encoding === 'string') encoding = encoding.toLowerCase();
+  if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
+  this._writableState.defaultEncoding = encoding;
+  return this;
+};
+
+function decodeChunk(state, chunk, encoding) {
+  if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
+    chunk = Buffer.from(chunk, encoding);
   }
+  return chunk;
+}
 
-  function decode (buf) {
-    if (!(buf instanceof bl)) {
-      buf = bl().append(buf)
+Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
+  // making it explicit this property is not enumerable
+  // because otherwise some prototype manipulation in
+  // userland will fail
+  enumerable: false,
+  get: function () {
+    return this._writableState.highWaterMark;
+  }
+});
+
+// if we're already writing something, then just put this
+// in the queue, and wait our turn.  Otherwise, call _write
+// If we return false, then we need a drain event, so set that flag.
+function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
+  if (!isBuf) {
+    var newChunk = decodeChunk(state, chunk, encoding);
+    if (chunk !== newChunk) {
+      isBuf = true;
+      encoding = 'buffer';
+      chunk = newChunk;
     }
+  }
+  var len = state.objectMode ? 1 : chunk.length;
 
-    var result = tryDecode(buf)
-    if (result) {
-      buf.consume(result.bytesConsumed)
-      return result.value
+  state.length += len;
+
+  var ret = state.length < state.highWaterMark;
+  // we must ensure that previous needDrain will not be reset to false.
+  if (!ret) state.needDrain = true;
+
+  if (state.writing || state.corked) {
+    var last = state.lastBufferedRequest;
+    state.lastBufferedRequest = {
+      chunk: chunk,
+      encoding: encoding,
+      isBuf: isBuf,
+      callback: cb,
+      next: null
+    };
+    if (last) {
+      last.next = state.lastBufferedRequest;
     } else {
-      throw new IncompleteBufferError()
+      state.bufferedRequest = state.lastBufferedRequest;
     }
+    state.bufferedRequestCount += 1;
+  } else {
+    doWrite(stream, state, false, len, chunk, encoding, cb);
   }
 
-  function tryDecode (buf, offset) {
-    offset = offset === undefined ? 0 : offset
-    var bufLength = buf.length - offset
-    if (bufLength <= 0) {
-      return null
-    }
+  return ret;
+}
 
-    var first = buf.readUInt8(offset)
-    var length
-    var result = 0
-    var type
-    var bytePos
+function doWrite(stream, state, writev, len, chunk, encoding, cb) {
+  state.writelen = len;
+  state.writecb = cb;
+  state.writing = true;
+  state.sync = true;
+  if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
+  state.sync = false;
+}
 
-    if (!hasMinBufferSize(first, bufLength)) {
-      return null
-    }
+function onwriteError(stream, state, sync, er, cb) {
+  --state.pendingcb;
 
-    switch (first) {
-      case 0xc0:
-        return buildDecodeResult(null, 1)
-      case 0xc2:
-        return buildDecodeResult(false, 1)
-      case 0xc3:
-        return buildDecodeResult(true, 1)
-      case 0xcc:
-        // 1-byte unsigned int
-        result = buf.readUInt8(offset + 1)
-        return buildDecodeResult(result, 2)
-      case 0xcd:
-        // 2-bytes BE unsigned int
-        result = buf.readUInt16BE(offset + 1)
-        return buildDecodeResult(result, 3)
-      case 0xce:
-        // 4-bytes BE unsigned int
-        result = buf.readUInt32BE(offset + 1)
-        return buildDecodeResult(result, 5)
-      case 0xcf:
-        // 8-bytes BE unsigned int
-        // Read long byte by byte, big-endian
-        for (bytePos = 7; bytePos >= 0; bytePos--) {
-          result += (buf.readUInt8(offset + bytePos + 1) * Math.pow(2, (8 * (7 - bytePos))))
-        }
-        return buildDecodeResult(result, 9)
-      case 0xd0:
-        // 1-byte signed int
-        result = buf.readInt8(offset + 1)
-        return buildDecodeResult(result, 2)
-      case 0xd1:
-        // 2-bytes signed int
-        result = buf.readInt16BE(offset + 1)
-        return buildDecodeResult(result, 3)
-      case 0xd2:
-        // 4-bytes signed int
-        result = buf.readInt32BE(offset + 1)
-        return buildDecodeResult(result, 5)
-      case 0xd3:
-        result = readInt64BE(buf.slice(offset + 1, offset + 9), 0)
-        return buildDecodeResult(result, 9)
-      case 0xca:
-        // 4-bytes float
-        result = buf.readFloatBE(offset + 1)
-        return buildDecodeResult(result, 5)
-      case 0xcb:
-        // 8-bytes double
-        result = buf.readDoubleBE(offset + 1)
-        return buildDecodeResult(result, 9)
-      case 0xd9:
-        // strings up to 2^8 - 1 bytes
-        length = buf.readUInt8(offset + 1)
-        if (!isValidDataSize(length, bufLength, 2)) {
-          return null
-        }
-        result = buf.toString('utf8', offset + 2, offset + 2 + length)
-        return buildDecodeResult(result, 2 + length)
-      case 0xda:
-        // strings up to 2^16 - 2 bytes
-        length = buf.readUInt16BE(offset + 1)
-        if (!isValidDataSize(length, bufLength, 3)) {
-          return null
-        }
-        result = buf.toString('utf8', offset + 3, offset + 3 + length)
-        return buildDecodeResult(result, 3 + length)
-      case 0xdb:
-        // strings up to 2^32 - 4 bytes
-        length = buf.readUInt32BE(offset + 1)
-        if (!isValidDataSize(length, bufLength, 5)) {
-          return null
-        }
-        result = buf.toString('utf8', offset + 5, offset + 5 + length)
-        return buildDecodeResult(result, 5 + length)
-      case 0xc4:
-        // buffers up to 2^8 - 1 bytes
-        length = buf.readUInt8(offset + 1)
-        if (!isValidDataSize(length, bufLength, 2)) {
-          return null
-        }
-        result = buf.slice(offset + 2, offset + 2 + length)
-        return buildDecodeResult(result, 2 + length)
-      case 0xc5:
-        // buffers up to 2^16 - 1 bytes
-        length = buf.readUInt16BE(offset + 1)
-        if (!isValidDataSize(length, bufLength, 3)) {
-          return null
-        }
-        result = buf.slice(offset + 3, offset + 3 + length)
-        return buildDecodeResult(result, 3 + length)
-      case 0xc6:
-        // buffers up to 2^32 - 1 bytes
-        length = buf.readUInt32BE(offset + 1)
-        if (!isValidDataSize(length, bufLength, 5)) {
-          return null
-        }
-        result = buf.slice(offset + 5, offset + 5 + length)
-        return buildDecodeResult(result, 5 + length)
-      case 0xdc:
-        // array up to 2^16 elements - 2 bytes
-        if (bufLength < 3) {
-          return null
-        }
+  if (sync) {
+    // defer the callback if we are being called synchronously
+    // to avoid piling up things on the stack
+    pna.nextTick(cb, er);
+    // this can emit finish, and it will always happen
+    // after error
+    pna.nextTick(finishMaybe, stream, state);
+    stream._writableState.errorEmitted = true;
+    stream.emit('error', er);
+  } else {
+    // the caller expect this to happen before if
+    // it is async
+    cb(er);
+    stream._writableState.errorEmitted = true;
+    stream.emit('error', er);
+    // this can emit finish, but finish must
+    // always follow error
+    finishMaybe(stream, state);
+  }
+}
 
-        length = buf.readUInt16BE(offset + 1)
-        return decodeArray(buf, offset, length, 3)
-      case 0xdd:
-        // array up to 2^32 elements - 4 bytes
-        if (bufLength < 5) {
-          return null
-        }
+function onwriteStateUpdate(state) {
+  state.writing = false;
+  state.writecb = null;
+  state.length -= state.writelen;
+  state.writelen = 0;
+}
 
-        length = buf.readUInt32BE(offset + 1)
-        return decodeArray(buf, offset, length, 5)
-      case 0xde:
-        // maps up to 2^16 elements - 2 bytes
-        length = buf.readUInt16BE(offset + 1)
-        return decodeMap(buf, offset, length, 3)
-      case 0xdf:
-        length = buf.readUInt32BE(offset + 1)
-        return decodeMap(buf, offset, length, 5)
-      case 0xd4:
-        return decodeFixExt(buf, offset, 1)
-      case 0xd5:
-        return decodeFixExt(buf, offset, 2)
-      case 0xd6:
-        return decodeFixExt(buf, offset, 4)
-      case 0xd7:
-        return decodeFixExt(buf, offset, 8)
-      case 0xd8:
-        return decodeFixExt(buf, offset, 16)
-      case 0xc7:
-        // ext up to 2^8 - 1 bytes
-        length = buf.readUInt8(offset + 1)
-        type = buf.readUInt8(offset + 2)
-        if (!isValidDataSize(length, bufLength, 3)) {
-          return null
-        }
-        return decodeExt(buf, offset, type, length, 3)
-      case 0xc8:
-        // ext up to 2^16 - 1 bytes
-        length = buf.readUInt16BE(offset + 1)
-        type = buf.readUInt8(offset + 3)
-        if (!isValidDataSize(length, bufLength, 4)) {
-          return null
-        }
-        return decodeExt(buf, offset, type, length, 4)
-      case 0xc9:
-        // ext up to 2^32 - 1 bytes
-        length = buf.readUInt32BE(offset + 1)
-        type = buf.readUInt8(offset + 5)
-        if (!isValidDataSize(length, bufLength, 6)) {
-          return null
-        }
-        return decodeExt(buf, offset, type, length, 6)
+function onwrite(stream, er) {
+  var state = stream._writableState;
+  var sync = state.sync;
+  var cb = state.writecb;
+
+  onwriteStateUpdate(state);
+
+  if (er) onwriteError(stream, state, sync, er, cb);else {
+    // Check if we're actually ready to finish, but don't emit yet
+    var finished = needFinish(state);
+
+    if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
+      clearBuffer(stream, state);
     }
 
-    if ((first & 0xf0) === 0x90) {
-      // we have an array with less than 15 elements
-      length = first & 0x0f
-      return decodeArray(buf, offset, length, 1)
-    } else if ((first & 0xf0) === 0x80) {
-      // we have a map with less than 15 elements
-      length = first & 0x0f
-      return decodeMap(buf, offset, length, 1)
-    } else if ((first & 0xe0) === 0xa0) {
-      // fixstr up to 31 bytes
-      length = first & 0x1f
-      if (isValidDataSize(length, bufLength, 1)) {
-        result = buf.toString('utf8', offset + 1, offset + length + 1)
-        return buildDecodeResult(result, length + 1)
-      } else {
-        return null
-      }
-    } else if (first >= 0xe0) {
-      // 5 bits negative ints
-      result = first - 0x100
-      return buildDecodeResult(result, 1)
-    } else if (first < 0x80) {
-      // 7-bits positive ints
-      return buildDecodeResult(first, 1)
+    if (sync) {
+      /*<replacement>*/
+      asyncWrite(afterWrite, stream, state, finished, cb);
+      /*</replacement>*/
     } else {
-      throw new Error('not implemented yet')
+      afterWrite(stream, state, finished, cb);
     }
   }
+}
 
-  function readInt64BE (buf, offset) {
-    var negate = (buf[offset] & 0x80) == 0x80 // eslint-disable-line
-
-    if (negate) {
-      var carry = 1
-      for (var i = offset + 7; i >= offset; i--) {
-        var v = (buf[i] ^ 0xff) + carry
-        buf[i] = v & 0xff
-        carry = v >> 8
-      }
-    }
+function afterWrite(stream, state, finished, cb) {
+  if (!finished) onwriteDrain(stream, state);
+  state.pendingcb--;
+  cb();
+  finishMaybe(stream, state);
+}
 
-    var hi = buf.readUInt32BE(offset + 0)
-    var lo = buf.readUInt32BE(offset + 4)
-    return (hi * 4294967296 + lo) * (negate ? -1 : +1)
+// Must force callback to be called on nextTick, so that we don't
+// emit 'drain' before the write() consumer gets the 'false' return
+// value, and has a chance to attach a 'drain' listener.
+function onwriteDrain(stream, state) {
+  if (state.length === 0 && state.needDrain) {
+    state.needDrain = false;
+    stream.emit('drain');
   }
+}
 
-  function decodeArray (buf, offset, length, headerLength) {
-    var result = []
-    var i
-    var totalBytesConsumed = 0
+// if there's something in the buffer waiting, then process it
+function clearBuffer(stream, state) {
+  state.bufferProcessing = true;
+  var entry = state.bufferedRequest;
 
-    offset += headerLength
-    for (i = 0; i < length; i++) {
-      var decodeResult = tryDecode(buf, offset)
-      if (decodeResult) {
-        result.push(decodeResult.value)
-        offset += decodeResult.bytesConsumed
-        totalBytesConsumed += decodeResult.bytesConsumed
-      } else {
-        return null
-      }
+  if (stream._writev && entry && entry.next) {
+    // Fast case, write everything using _writev()
+    var l = state.bufferedRequestCount;
+    var buffer = new Array(l);
+    var holder = state.corkedRequestsFree;
+    holder.entry = entry;
+
+    var count = 0;
+    var allBuffers = true;
+    while (entry) {
+      buffer[count] = entry;
+      if (!entry.isBuf) allBuffers = false;
+      entry = entry.next;
+      count += 1;
     }
-    return buildDecodeResult(result, headerLength + totalBytesConsumed)
-  }
+    buffer.allBuffers = allBuffers;
 
-  function decodeMap (buf, offset, length, headerLength) {
-    var result = {}
-    var key
-    var i
-    var totalBytesConsumed = 0
+    doWrite(stream, state, true, state.length, buffer, '', holder.finish);
 
-    offset += headerLength
-    for (i = 0; i < length; i++) {
-      var keyResult = tryDecode(buf, offset)
-      if (keyResult) {
-        offset += keyResult.bytesConsumed
-        var valueResult = tryDecode(buf, offset)
-        if (valueResult) {
-          key = keyResult.value
-          result[key] = valueResult.value
-          offset += valueResult.bytesConsumed
-          totalBytesConsumed += (keyResult.bytesConsumed + valueResult.bytesConsumed)
-        } else {
-          return null
-        }
-      } else {
-        return null
+    // doWrite is almost always async, defer these to save a bit of time
+    // as the hot path ends with doWrite
+    state.pendingcb++;
+    state.lastBufferedRequest = null;
+    if (holder.next) {
+      state.corkedRequestsFree = holder.next;
+      holder.next = null;
+    } else {
+      state.corkedRequestsFree = new CorkedRequest(state);
+    }
+    state.bufferedRequestCount = 0;
+  } else {
+    // Slow case, write chunks one-by-one
+    while (entry) {
+      var chunk = entry.chunk;
+      var encoding = entry.encoding;
+      var cb = entry.callback;
+      var len = state.objectMode ? 1 : chunk.length;
+
+      doWrite(stream, state, false, len, chunk, encoding, cb);
+      entry = entry.next;
+      state.bufferedRequestCount--;
+      // if we didn't call the onwrite immediately, then
+      // it means that we need to wait until it does.
+      // also, that means that the chunk and cb are currently
+      // being processed, so move the buffer counter past them.
+      if (state.writing) {
+        break;
       }
     }
-    return buildDecodeResult(result, headerLength + totalBytesConsumed)
-  }
 
-  function decodeFixExt (buf, offset, size) {
-    var type = buf.readInt8(offset + 1) // Signed
-    return decodeExt(buf, offset, type, size, 2)
+    if (entry === null) state.lastBufferedRequest = null;
   }
 
-  function decodeTimestamp (buf, size, headerSize) {
-    var seconds
-    var nanoseconds = 0
+  state.bufferedRequest = entry;
+  state.bufferProcessing = false;
+}
 
-    switch (size) {
-      case 4:
-        // timestamp 32 stores the number of seconds that have elapsed since 1970-01-01 00:00:00 UTC in an 32-bit unsigned integer
-        seconds = buf.readUInt32BE(0)
-        break
+Writable.prototype._write = function (chunk, encoding, cb) {
+  cb(new Error('_write() is not implemented'));
+};
 
-      case 8:
-        // Timestamp 64 stores the number of seconds and nanoseconds that have elapsed
-        // since 1970-01-01 00:00:00 UTC in 32-bit unsigned integers, split 30/34 bits
-        var upper = buf.readUInt32BE(0)
-        var lower = buf.readUInt32BE(4)
-        nanoseconds = upper / 4
-        seconds = ((upper & 0x03) * Math.pow(2, 32)) + lower // If we use bitwise operators, we get truncated to 32bits
-        break
+Writable.prototype._writev = null;
 
-      case 12:
-        throw new Error('timestamp 96 is not yet implemented')
-    }
+Writable.prototype.end = function (chunk, encoding, cb) {
+  var state = this._writableState;
 
-    var millis = (seconds * 1000) + Math.round(nanoseconds / 1E6)
-    return buildDecodeResult(new Date(millis), size + headerSize)
+  if (typeof chunk === 'function') {
+    cb = chunk;
+    chunk = null;
+    encoding = null;
+  } else if (typeof encoding === 'function') {
+    cb = encoding;
+    encoding = null;
   }
 
-  function decodeExt (buf, offset, type, size, headerSize) {
-    var i,
-      toDecode
+  if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
 
-    offset += headerSize
+  // .end() fully uncorks
+  if (state.corked) {
+    state.corked = 1;
+    this.uncork();
+  }
 
-    // Pre-defined
-    if (type < 0) { // Reserved for future extensions
-      switch (type) {
-        case -1: // Tiemstamp https://github.com/msgpack/msgpack/blob/master/spec.md#timestamp-extension-type
-          toDecode = buf.slice(offset, offset + size)
-          return decodeTimestamp(toDecode, size, headerSize)
-      }
+  // ignore unnecessary end() calls.
+  if (!state.ending && !state.finished) endWritable(this, state, cb);
+};
+
+function needFinish(state) {
+  return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
+}
+function callFinal(stream, state) {
+  stream._final(function (err) {
+    state.pendingcb--;
+    if (err) {
+      stream.emit('error', err);
     }
+    state.prefinished = true;
+    stream.emit('prefinish');
+    finishMaybe(stream, state);
+  });
+}
+function prefinish(stream, state) {
+  if (!state.prefinished && !state.finalCalled) {
+    if (typeof stream._final === 'function') {
+      state.pendingcb++;
+      state.finalCalled = true;
+      pna.nextTick(callFinal, stream, state);
+    } else {
+      state.prefinished = true;
+      stream.emit('prefinish');
+    }
+  }
+}
 
-    for (i = 0; i < decodingTypes.length; i++) {
-      if (type === decodingTypes[i].type) {
-        toDecode = buf.slice(offset, offset + size)
-        var value = decodingTypes[i].decode(toDecode)
-        return buildDecodeResult(value, headerSize + size)
-      }
+function finishMaybe(stream, state) {
+  var need = needFinish(state);
+  if (need) {
+    prefinish(stream, state);
+    if (state.pendingcb === 0) {
+      state.finished = true;
+      stream.emit('finish');
     }
+  }
+  return need;
+}
 
-    throw new Error('unable to find ext type ' + type)
+function endWritable(stream, state, cb) {
+  state.ending = true;
+  finishMaybe(stream, state);
+  if (cb) {
+    if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
   }
+  state.ended = true;
+  stream.writable = false;
 }
 
-module.exports.IncompleteBufferError = IncompleteBufferError
+function onCorkedFinish(corkReq, state, err) {
+  var entry = corkReq.entry;
+  corkReq.entry = null;
+  while (entry) {
+    var cb = entry.callback;
+    state.pendingcb--;
+    cb(err);
+    entry = entry.next;
+  }
+  if (state.corkedRequestsFree) {
+    state.corkedRequestsFree.next = corkReq;
+  } else {
+    state.corkedRequestsFree = corkReq;
+  }
+}
 
-},{"bl":10,"util":53}],16:[function(require,module,exports){
-'use strict'
+Object.defineProperty(Writable.prototype, 'destroyed', {
+  get: function () {
+    if (this._writableState === undefined) {
+      return false;
+    }
+    return this._writableState.destroyed;
+  },
+  set: function (value) {
+    // we ignore the value if the stream
+    // has not been initialized yet
+    if (!this._writableState) {
+      return;
+    }
 
-var Buffer = require('safe-buffer').Buffer
-var bl = require('bl')
+    // backward compatibility, the user is explicitly
+    // managing destroyed
+    this._writableState.destroyed = value;
+  }
+});
 
-module.exports = function buildEncode (encodingTypes, forceFloat64, compatibilityMode, disableTimestampEncoding) {
-  function encode (obj, avoidSlice) {
-    var buf
-    var len
+Writable.prototype.destroy = destroyImpl.destroy;
+Writable.prototype._undestroy = destroyImpl.undestroy;
+Writable.prototype._destroy = function (err, cb) {
+  this.end();
+  cb(err);
+};
+}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {},require("timers").setImmediate)
+},{"./_stream_duplex":10,"./internal/streams/destroy":16,"./internal/streams/stream":17,"_process":70,"core-util-is":2,"inherits":3,"process-nextick-args":9,"safe-buffer":19,"timers":71,"util-deprecate":21}],15:[function(require,module,exports){
+'use strict';
 
-    if (obj === undefined) {
-      throw new Error('undefined is not encodable in msgpack!')
-    } else if (isNaN(obj)) {
-      throw new Error('NaN is not encodable in msgpack!')
-    } else if (obj === null) {
-      buf = Buffer.allocUnsafe(1)
-      buf[0] = 0xc0
-    } else if (obj === true) {
-      buf = Buffer.allocUnsafe(1)
-      buf[0] = 0xc3
-    } else if (obj === false) {
-      buf = Buffer.allocUnsafe(1)
-      buf[0] = 0xc2
-    } else if (typeof obj === 'string') {
-      len = Buffer.byteLength(obj)
-      if (len < 32) {
-        buf = Buffer.allocUnsafe(1 + len)
-        buf[0] = 0xa0 | len
-        if (len > 0) {
-          buf.write(obj, 1)
-        }
-      } else if (len <= 0xff && !compatibilityMode) {
-        // str8, but only when not in compatibility mode
-        buf = Buffer.allocUnsafe(2 + len)
-        buf[0] = 0xd9
-        buf[1] = len
-        buf.write(obj, 2)
-      } else if (len <= 0xffff) {
-        buf = Buffer.allocUnsafe(3 + len)
-        buf[0] = 0xda
-        buf.writeUInt16BE(len, 1)
-        buf.write(obj, 3)
-      } else {
-        buf = Buffer.allocUnsafe(5 + len)
-        buf[0] = 0xdb
-        buf.writeUInt32BE(len, 1)
-        buf.write(obj, 5)
-      }
-    } else if (obj && (obj.readUInt32LE || obj instanceof Uint8Array)) {
-      if (obj instanceof Uint8Array) {
-        obj = Buffer.from(obj)
-      }
-      // weird hack to support Buffer
-      // and Buffer-like objects
-      if (obj.length <= 0xff) {
-        buf = Buffer.allocUnsafe(2)
-        buf[0] = 0xc4
-        buf[1] = obj.length
-      } else if (obj.length <= 0xffff) {
-        buf = Buffer.allocUnsafe(3)
-        buf[0] = 0xc5
-        buf.writeUInt16BE(obj.length, 1)
-      } else {
-        buf = Buffer.allocUnsafe(5)
-        buf[0] = 0xc6
-        buf.writeUInt32BE(obj.length, 1)
-      }
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
 
-      buf = bl([buf, obj])
-    } else if (Array.isArray(obj)) {
-      if (obj.length < 16) {
-        buf = Buffer.allocUnsafe(1)
-        buf[0] = 0x90 | obj.length
-      } else if (obj.length < 65536) {
-        buf = Buffer.allocUnsafe(3)
-        buf[0] = 0xdc
-        buf.writeUInt16BE(obj.length, 1)
-      } else {
-        buf = Buffer.allocUnsafe(5)
-        buf[0] = 0xdd
-        buf.writeUInt32BE(obj.length, 1)
-      }
+var Buffer = require('safe-buffer').Buffer;
+var util = require('util');
 
-      buf = obj.reduce(function (acc, obj) {
-        acc.append(encode(obj, true))
-        return acc
-      }, bl().append(buf))
-    } else if (!disableTimestampEncoding && typeof obj.getDate === 'function') {
-      return encodeDate(obj)
-    } else if (typeof obj === 'object') {
-      buf = encodeExt(obj) || encodeObject(obj)
-    } else if (typeof obj === 'number') {
-      if (isFloat(obj)) {
-        return encodeFloat(obj, forceFloat64)
-      } else if (obj >= 0) {
-        if (obj < 128) {
-          buf = Buffer.allocUnsafe(1)
-          buf[0] = obj
-        } else if (obj < 256) {
-          buf = Buffer.allocUnsafe(2)
-          buf[0] = 0xcc
-          buf[1] = obj
-        } else if (obj < 65536) {
-          buf = Buffer.allocUnsafe(3)
-          buf[0] = 0xcd
-          buf.writeUInt16BE(obj, 1)
-        } else if (obj <= 0xffffffff) {
-          buf = Buffer.allocUnsafe(5)
-          buf[0] = 0xce
-          buf.writeUInt32BE(obj, 1)
-        } else if (obj <= 9007199254740991) {
-          buf = Buffer.allocUnsafe(9)
-          buf[0] = 0xcf
-          write64BitUint(buf, obj)
-        } else {
-          return encodeFloat(obj, true)
-        }
-      } else {
-        if (obj >= -32) {
-          buf = Buffer.allocUnsafe(1)
-          buf[0] = 0x100 + obj
-        } else if (obj >= -128) {
-          buf = Buffer.allocUnsafe(2)
-          buf[0] = 0xd0
-          buf.writeInt8(obj, 1)
-        } else if (obj >= -32768) {
-          buf = Buffer.allocUnsafe(3)
-          buf[0] = 0xd1
-          buf.writeInt16BE(obj, 1)
-        } else if (obj > -214748365) {
-          buf = Buffer.allocUnsafe(5)
-          buf[0] = 0xd2
-          buf.writeInt32BE(obj, 1)
-        } else if (obj >= -9007199254740991) {
-          buf = Buffer.allocUnsafe(9)
-          buf[0] = 0xd3
-          write64BitInt(buf, 1, obj)
-        } else {
-          return encodeFloat(obj, true)
-        }
-      }
-    }
+function copyBuffer(src, target, offset) {
+  src.copy(target, offset);
+}
 
-    if (!buf) {
-      throw new Error('not implemented yet')
-    }
+module.exports = function () {
+  function BufferList() {
+    _classCallCheck(this, BufferList);
 
-    if (avoidSlice) {
-      return buf
-    } else {
-      return buf.slice()
-    }
+    this.head = null;
+    this.tail = null;
+    this.length = 0;
   }
 
-  function encodeDate (dt) {
-    var encoded
-    var millis = dt * 1
-    var seconds = Math.floor(millis / 1000)
-    var nanos = (millis - (seconds * 1000)) * 1E6
+  BufferList.prototype.push = function push(v) {
+    var entry = { data: v, next: null };
+    if (this.length > 0) this.tail.next = entry;else this.head = entry;
+    this.tail = entry;
+    ++this.length;
+  };
 
-    if (nanos || seconds > 0xFFFFFFFF) {
-      // Timestamp64
-      encoded = Buffer.allocUnsafe(10)
-      encoded[0] = 0xd7
-      encoded[1] = -1
+  BufferList.prototype.unshift = function unshift(v) {
+    var entry = { data: v, next: this.head };
+    if (this.length === 0) this.tail = entry;
+    this.head = entry;
+    ++this.length;
+  };
 
-      var upperNanos = ((nanos * 4))
-      var upperSeconds = seconds / Math.pow(2, 32)
-      var upper = (upperNanos + upperSeconds) & 0xFFFFFFFF
-      var lower = seconds & 0xFFFFFFFF
+  BufferList.prototype.shift = function shift() {
+    if (this.length === 0) return;
+    var ret = this.head.data;
+    if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
+    --this.length;
+    return ret;
+  };
 
-      encoded.writeInt32BE(upper, 2)
-      encoded.writeInt32BE(lower, 6)
-    } else {
-      // Timestamp32
-      encoded = Buffer.allocUnsafe(6)
-      encoded[0] = 0xd6
-      encoded[1] = -1
-      encoded.writeUInt32BE(Math.floor(millis / 1000), 2)
-    }
-    return bl().append(encoded)
-  }
+  BufferList.prototype.clear = function clear() {
+    this.head = this.tail = null;
+    this.length = 0;
+  };
 
-  function encodeExt (obj) {
-    var i
-    var encoded
-    var length = -1
-    var headers = []
+  BufferList.prototype.join = function join(s) {
+    if (this.length === 0) return '';
+    var p = this.head;
+    var ret = '' + p.data;
+    while (p = p.next) {
+      ret += s + p.data;
+    }return ret;
+  };
 
-    for (i = 0; i < encodingTypes.length; i++) {
-      if (encodingTypes[i].check(obj)) {
-        encoded = encodingTypes[i].encode(obj)
-        break
-      }
+  BufferList.prototype.concat = function concat(n) {
+    if (this.length === 0) return Buffer.alloc(0);
+    if (this.length === 1) return this.head.data;
+    var ret = Buffer.allocUnsafe(n >>> 0);
+    var p = this.head;
+    var i = 0;
+    while (p) {
+      copyBuffer(p.data, ret, i);
+      i += p.data.length;
+      p = p.next;
     }
+    return ret;
+  };
 
-    if (!encoded) {
-      return null
-    }
+  return BufferList;
+}();
 
-    // we subtract 1 because the length does not
-    // include the type
-    length = encoded.length - 1
+if (util && util.inspect && util.inspect.custom) {
+  module.exports.prototype[util.inspect.custom] = function () {
+    var obj = util.inspect({ length: this.length });
+    return this.constructor.name + ' ' + obj;
+  };
+}
+},{"safe-buffer":19,"util":64}],16:[function(require,module,exports){
+'use strict';
 
-    if (length === 1) {
-      headers.push(0xd4)
-    } else if (length === 2) {
-      headers.push(0xd5)
-    } else if (length === 4) {
-      headers.push(0xd6)
-    } else if (length === 8) {
-      headers.push(0xd7)
-    } else if (length === 16) {
-      headers.push(0xd8)
-    } else if (length < 256) {
-      headers.push(0xc7)
-      headers.push(length)
-    } else if (length < 0x10000) {
-      headers.push(0xc8)
-      headers.push(length >> 8)
-      headers.push(length & 0x00ff)
-    } else {
-      headers.push(0xc9)
-      headers.push(length >> 24)
-      headers.push((length >> 16) & 0x000000ff)
-      headers.push((length >> 8) & 0x000000ff)
-      headers.push(length & 0x000000ff)
-    }
+/*<replacement>*/
 
-    return bl().append(Buffer.from(headers)).append(encoded)
-  }
+var pna = require('process-nextick-args');
+/*</replacement>*/
 
-  function encodeObject (obj) {
-    var acc = []
-    var length = 0
-    var key
-    var header
+// undocumented cb() API, needed for core, not for public API
+function destroy(err, cb) {
+  var _this = this;
 
-    for (key in obj) {
-      if (obj.hasOwnProperty(key) &&
-        obj[key] !== undefined &&
-        typeof obj[key] !== 'function') {
-        ++length
-        acc.push(encode(key, true))
-        acc.push(encode(obj[key], true))
-      }
-    }
+  var readableDestroyed = this._readableState && this._readableState.destroyed;
+  var writableDestroyed = this._writableState && this._writableState.destroyed;
 
-    if (length < 16) {
-      header = Buffer.allocUnsafe(1)
-      header[0] = 0x80 | length
-    } else if (length < 0xFFFF) {
-      header = Buffer.allocUnsafe(3)
-      header[0] = 0xde
-      header.writeUInt16BE(length, 1)
-    } else {
-      header = Buffer.allocUnsafe(5)
-      header[0] = 0xdf
-      header.writeUInt32BE(length, 1)
+  if (readableDestroyed || writableDestroyed) {
+    if (cb) {
+      cb(err);
+    } else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
+      pna.nextTick(emitErrorNT, this, err);
     }
+    return this;
+  }
 
-    acc.unshift(header)
+  // we set destroyed to true before firing error callbacks in order
+  // to make it re-entrance safe in case destroy() is called within callbacks
 
-    var result = acc.reduce(function (list, buf) {
-      return list.append(buf)
-    }, bl())
+  if (this._readableState) {
+    this._readableState.destroyed = true;
+  }
 
-    return result
+  // if this is a duplex stream mark the writable part as destroyed as well
+  if (this._writableState) {
+    this._writableState.destroyed = true;
   }
 
-  return encode
+  this._destroy(err || null, function (err) {
+    if (!cb && err) {
+      pna.nextTick(emitErrorNT, _this, err);
+      if (_this._writableState) {
+        _this._writableState.errorEmitted = true;
+      }
+    } else if (cb) {
+      cb(err);
+    }
+  });
+
+  return this;
 }
 
-function write64BitUint (buf, obj) {
-  // Write long byte by byte, in big-endian order
-  for (var currByte = 7; currByte >= 0; currByte--) {
-    buf[currByte + 1] = (obj & 0xff)
-    obj = obj / 256
+function undestroy() {
+  if (this._readableState) {
+    this._readableState.destroyed = false;
+    this._readableState.reading = false;
+    this._readableState.ended = false;
+    this._readableState.endEmitted = false;
+  }
+
+  if (this._writableState) {
+    this._writableState.destroyed = false;
+    this._writableState.ended = false;
+    this._writableState.ending = false;
+    this._writableState.finished = false;
+    this._writableState.errorEmitted = false;
   }
 }
 
-function write64BitInt (buf, offset, num) {
-  var negate = num < 0
+function emitErrorNT(self, err) {
+  self.emit('error', err);
+}
 
-  if (negate) {
-    num = Math.abs(num)
-  }
+module.exports = {
+  destroy: destroy,
+  undestroy: undestroy
+};
+},{"process-nextick-args":9}],17:[function(require,module,exports){
+module.exports = require('events').EventEmitter;
 
-  var lo = num % 4294967296
-  var hi = num / 4294967296
-  buf.writeUInt32BE(Math.floor(hi), offset + 0)
-  buf.writeUInt32BE(lo, offset + 4)
+},{"events":66}],18:[function(require,module,exports){
+exports = module.exports = require('./lib/_stream_readable.js');
+exports.Stream = exports;
+exports.Readable = exports;
+exports.Writable = require('./lib/_stream_writable.js');
+exports.Duplex = require('./lib/_stream_duplex.js');
+exports.Transform = require('./lib/_stream_transform.js');
+exports.PassThrough = require('./lib/_stream_passthrough.js');
 
-  if (negate) {
-    var carry = 1
-    for (var i = offset + 7; i >= offset; i--) {
-      var v = (buf[i] ^ 0xff) + carry
-      buf[i] = v & 0xff
-      carry = v >> 8
-    }
+},{"./lib/_stream_duplex.js":10,"./lib/_stream_passthrough.js":11,"./lib/_stream_readable.js":12,"./lib/_stream_transform.js":13,"./lib/_stream_writable.js":14}],19:[function(require,module,exports){
+/* eslint-disable node/no-deprecated-api */
+var buffer = require('buffer')
+var Buffer = buffer.Buffer
+
+// alternative to using Object.keys for old browsers
+function copyProps (src, dst) {
+  for (var key in src) {
+    dst[key] = src[key]
   }
 }
-
-function isFloat (n) {
-  return n % 1 !== 0
+if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
+  module.exports = buffer
+} else {
+  // Copy properties from require('buffer')
+  copyProps(buffer, exports)
+  exports.Buffer = SafeBuffer
 }
 
-function isNaN (n) {
-  /* eslint-disable no-self-compare */
-  return n !== n && typeof n === 'number'
-  /* eslint-enable no-self-compare */
+function SafeBuffer (arg, encodingOrOffset, length) {
+  return Buffer(arg, encodingOrOffset, length)
 }
 
-function encodeFloat (obj, forceFloat64) {
-  var useDoublePrecision = true
+// Copy static methods from Buffer
+copyProps(Buffer, SafeBuffer)
 
-  // If `fround` is supported, we can check if a float
-  // is double or single precision by rounding the object
-  // to single precision and comparing the difference.
-  // If it's not supported, it's safer to use a 64 bit
-  // float so we don't lose precision without meaning to.
-  if (Math.fround) {
-    useDoublePrecision = Math.fround(obj) !== obj
+SafeBuffer.from = function (arg, encodingOrOffset, length) {
+  if (typeof arg === 'number') {
+    throw new TypeError('Argument must not be a number')
   }
+  return Buffer(arg, encodingOrOffset, length)
+}
 
-  if (forceFloat64) {
-    useDoublePrecision = true
+SafeBuffer.alloc = function (size, fill, encoding) {
+  if (typeof size !== 'number') {
+    throw new TypeError('Argument must be a number')
   }
-
-  var buf
-
-  if (useDoublePrecision) {
-    buf = Buffer.allocUnsafe(9)
-    buf[0] = 0xcb
-    buf.writeDoubleBE(obj, 1)
+  var buf = Buffer(size)
+  if (fill !== undefined) {
+    if (typeof encoding === 'string') {
+      buf.fill(fill, encoding)
+    } else {
+      buf.fill(fill)
+    }
   } else {
-    buf = Buffer.allocUnsafe(5)
-    buf[0] = 0xca
-    buf.writeFloatBE(obj, 1)
+    buf.fill(0)
   }
-
   return buf
 }
 
-},{"bl":10,"safe-buffer":28}],17:[function(require,module,exports){
-'use strict'
+SafeBuffer.allocUnsafe = function (size) {
+  if (typeof size !== 'number') {
+    throw new TypeError('Argument must be a number')
+  }
+  return Buffer(size)
+}
 
-var Transform = require('readable-stream').Transform
-var inherits = require('inherits')
-var bl = require('bl')
+SafeBuffer.allocUnsafeSlow = function (size) {
+  if (typeof size !== 'number') {
+    throw new TypeError('Argument must be a number')
+  }
+  return buffer.SlowBuffer(size)
+}
 
-function Base (opts) {
-  opts = opts || {}
+},{"buffer":65}],20:[function(require,module,exports){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
 
-  opts.objectMode = true
-  opts.highWaterMark = 16
+'use strict';
 
-  Transform.call(this, opts)
+/*<replacement>*/
 
-  this._msgpack = opts.msgpack
-}
+var Buffer = require('safe-buffer').Buffer;
+/*</replacement>*/
 
-inherits(Base, Transform)
+var isEncoding = Buffer.isEncoding || function (encoding) {
+  encoding = '' + encoding;
+  switch (encoding && encoding.toLowerCase()) {
+    case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
+      return true;
+    default:
+      return false;
+  }
+};
 
-function Encoder (opts) {
-  if (!(this instanceof Encoder)) {
-    opts = opts || {}
-    opts.msgpack = this
-    return new Encoder(opts)
+function _normalizeEncoding(enc) {
+  if (!enc) return 'utf8';
+  var retried;
+  while (true) {
+    switch (enc) {
+      case 'utf8':
+      case 'utf-8':
+        return 'utf8';
+      case 'ucs2':
+      case 'ucs-2':
+      case 'utf16le':
+      case 'utf-16le':
+        return 'utf16le';
+      case 'latin1':
+      case 'binary':
+        return 'latin1';
+      case 'base64':
+      case 'ascii':
+      case 'hex':
+        return enc;
+      default:
+        if (retried) return; // undefined
+        enc = ('' + enc).toLowerCase();
+        retried = true;
+    }
   }
+};
 
-  Base.call(this, opts)
-  this._wrap = ('wrap' in opts) && opts.wrap
+// Do not cache `Buffer.isEncoding` when checking encoding names as some
+// modules monkey-patch it to support additional encodings
+function normalizeEncoding(enc) {
+  var nenc = _normalizeEncoding(enc);
+  if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
+  return nenc || enc;
 }
 
-inherits(Encoder, Base)
+// StringDecoder provides an interface for efficiently splitting a series of
+// buffers into a series of JS strings without breaking apart multi-byte
+// characters.
+exports.StringDecoder = StringDecoder;
+function StringDecoder(encoding) {
+  this.encoding = normalizeEncoding(encoding);
+  var nb;
+  switch (this.encoding) {
+    case 'utf16le':
+      this.text = utf16Text;
+      this.end = utf16End;
+      nb = 4;
+      break;
+    case 'utf8':
+      this.fillLast = utf8FillLast;
+      nb = 4;
+      break;
+    case 'base64':
+      this.text = base64Text;
+      this.end = base64End;
+      nb = 3;
+      break;
+    default:
+      this.write = simpleWrite;
+      this.end = simpleEnd;
+      return;
+  }
+  this.lastNeed = 0;
+  this.lastTotal = 0;
+  this.lastChar = Buffer.allocUnsafe(nb);
+}
 
-Encoder.prototype._transform = function (obj, enc, done) {
-  var buf = null
+StringDecoder.prototype.write = function (buf) {
+  if (buf.length === 0) return '';
+  var r;
+  var i;
+  if (this.lastNeed) {
+    r = this.fillLast(buf);
+    if (r === undefined) return '';
+    i = this.lastNeed;
+    this.lastNeed = 0;
+  } else {
+    i = 0;
+  }
+  if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
+  return r || '';
+};
 
-  try {
-    buf = this._msgpack.encode(this._wrap ? obj.value : obj).slice(0)
-  } catch (err) {
-    this.emit('error', err)
-    return done()
+StringDecoder.prototype.end = utf8End;
+
+// Returns only complete characters in a Buffer
+StringDecoder.prototype.text = utf8Text;
+
+// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
+StringDecoder.prototype.fillLast = function (buf) {
+  if (this.lastNeed <= buf.length) {
+    buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
+    return this.lastChar.toString(this.encoding, 0, this.lastTotal);
   }
+  buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
+  this.lastNeed -= buf.length;
+};
 
-  this.push(buf)
-  done()
+// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
+// continuation byte. If an invalid byte is detected, -2 is returned.
+function utf8CheckByte(byte) {
+  if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
+  return byte >> 6 === 0x02 ? -1 : -2;
 }
 
-function Decoder (opts) {
-  if (!(this instanceof Decoder)) {
-    opts = opts || {}
-    opts.msgpack = this
-    return new Decoder(opts)
+// Checks at most 3 bytes at the end of a Buffer in order to detect an
+// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
+// needed to complete the UTF-8 character (if applicable) are returned.
+function utf8CheckIncomplete(self, buf, i) {
+  var j = buf.length - 1;
+  if (j < i) return 0;
+  var nb = utf8CheckByte(buf[j]);
+  if (nb >= 0) {
+    if (nb > 0) self.lastNeed = nb - 1;
+    return nb;
+  }
+  if (--j < i || nb === -2) return 0;
+  nb = utf8CheckByte(buf[j]);
+  if (nb >= 0) {
+    if (nb > 0) self.lastNeed = nb - 2;
+    return nb;
+  }
+  if (--j < i || nb === -2) return 0;
+  nb = utf8CheckByte(buf[j]);
+  if (nb >= 0) {
+    if (nb > 0) {
+      if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
+    }
+    return nb;
   }
-
-  Base.call(this, opts)
-
-  this._chunks = bl()
-  this._wrap = ('wrap' in opts) && opts.wrap
+  return 0;
 }
 
-inherits(Decoder, Base)
-
-Decoder.prototype._transform = function (buf, enc, done) {
-  if (buf) {
-    this._chunks.append(buf)
+// Validates as many continuation bytes for a multi-byte UTF-8 character as
+// needed or are available. If we see a non-continuation byte where we expect
+// one, we "replace" the validated continuation bytes we've seen so far with
+// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
+// behavior. The continuation byte check is included three times in the case
+// where all of the continuation bytes for a character exist in the same buffer.
+// It is also done this way as a slight performance increase instead of using a
+// loop.
+function utf8CheckExtraBytes(self, buf, p) {
+  if ((buf[0] & 0xC0) !== 0x80) {
+    self.lastNeed = 0;
+    return '\ufffd';
   }
-
-  try {
-    var result = this._msgpack.decode(this._chunks)
-    if (this._wrap) {
-      result = {value: result}
+  if (self.lastNeed > 1 && buf.length > 1) {
+    if ((buf[1] & 0xC0) !== 0x80) {
+      self.lastNeed = 1;
+      return '\ufffd';
     }
-    this.push(result)
-  } catch (err) {
-    if (err instanceof this._msgpack.IncompleteBufferError) {
-      done()
-    } else {
-      this.emit('error', err)
+    if (self.lastNeed > 2 && buf.length > 2) {
+      if ((buf[2] & 0xC0) !== 0x80) {
+        self.lastNeed = 2;
+        return '\ufffd';
+      }
     }
-    return
   }
+}
 
-  if (this._chunks.length > 0) {
-    this._transform(null, enc, done)
-  } else {
-    done()
+// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
+function utf8FillLast(buf) {
+  var p = this.lastTotal - this.lastNeed;
+  var r = utf8CheckExtraBytes(this, buf, p);
+  if (r !== undefined) return r;
+  if (this.lastNeed <= buf.length) {
+    buf.copy(this.lastChar, p, 0, this.lastNeed);
+    return this.lastChar.toString(this.encoding, 0, this.lastTotal);
   }
+  buf.copy(this.lastChar, p, 0, buf.length);
+  this.lastNeed -= buf.length;
 }
 
-module.exports.decoder = Decoder
-module.exports.encoder = Encoder
-
-},{"bl":10,"inherits":12,"readable-stream":27}],18:[function(require,module,exports){
-(function (process){
-'use strict';
+// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
+// partial character, the character's bytes are buffered until the required
+// number of bytes are available.
+function utf8Text(buf, i) {
+  var total = utf8CheckIncomplete(this, buf, i);
+  if (!this.lastNeed) return buf.toString('utf8', i);
+  this.lastTotal = total;
+  var end = buf.length - (total - this.lastNeed);
+  buf.copy(this.lastChar, 0, end);
+  return buf.toString('utf8', i, end);
+}
 
-if (typeof process === 'undefined' ||
-    !process.version ||
-    process.version.indexOf('v0.') === 0 ||
-    process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {
-  module.exports = { nextTick: nextTick };
-} else {
-  module.exports = process
+// For UTF-8, a replacement character is added when ending on a partial
+// character.
+function utf8End(buf) {
+  var r = buf && buf.length ? this.write(buf) : '';
+  if (this.lastNeed) return r + '\ufffd';
+  return r;
 }
 
-function nextTick(fn, arg1, arg2, arg3) {
-  if (typeof fn !== 'function') {
-    throw new TypeError('"callback" argument must be a function');
-  }
-  var len = arguments.length;
-  var args, i;
-  switch (len) {
-  case 0:
-  case 1:
-    return process.nextTick(fn);
-  case 2:
-    return process.nextTick(function afterTickOne() {
-      fn.call(null, arg1);
-    });
-  case 3:
-    return process.nextTick(function afterTickTwo() {
-      fn.call(null, arg1, arg2);
-    });
-  case 4:
-    return process.nextTick(function afterTickThree() {
-      fn.call(null, arg1, arg2, arg3);
-    });
-  default:
-    args = new Array(len - 1);
-    i = 0;
-    while (i < args.length) {
-      args[i++] = arguments[i];
+// UTF-16LE typically needs two bytes per character, but even if we have an even
+// number of bytes available, we need to check if we end on a leading/high
+// surrogate. In that case, we need to wait for the next two bytes in order to
+// decode the last character properly.
+function utf16Text(buf, i) {
+  if ((buf.length - i) % 2 === 0) {
+    var r = buf.toString('utf16le', i);
+    if (r) {
+      var c = r.charCodeAt(r.length - 1);
+      if (c >= 0xD800 && c <= 0xDBFF) {
+        this.lastNeed = 2;
+        this.lastTotal = 4;
+        this.lastChar[0] = buf[buf.length - 2];
+        this.lastChar[1] = buf[buf.length - 1];
+        return r.slice(0, -1);
+      }
     }
-    return process.nextTick(function afterTick() {
-      fn.apply(null, args);
-    });
+    return r;
   }
+  this.lastNeed = 1;
+  this.lastTotal = 2;
+  this.lastChar[0] = buf[buf.length - 1];
+  return buf.toString('utf16le', i, buf.length - 1);
 }
 
+// For UTF-16LE we do not explicitly append special replacement characters if we
+// end on a partial character, we simply let v8 handle that.
+function utf16End(buf) {
+  var r = buf && buf.length ? this.write(buf) : '';
+  if (this.lastNeed) {
+    var end = this.lastTotal - this.lastNeed;
+    return r + this.lastChar.toString('utf16le', 0, end);
+  }
+  return r;
+}
 
-}).call(this,require('_process'))
-},{"_process":49}],19:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// a duplex stream is just a stream that is both readable and writable.
-// Since JS doesn't have multiple prototypal inheritance, this class
-// prototypally inherits from Readable, and then parasitically from
-// Writable.
-
-'use strict';
-
-/*<replacement>*/
-
-var pna = require('process-nextick-args');
-/*</replacement>*/
-
-/*<replacement>*/
-var objectKeys = Object.keys || function (obj) {
-  var keys = [];
-  for (var key in obj) {
-    keys.push(key);
-  }return keys;
-};
-/*</replacement>*/
-
-module.exports = Duplex;
-
-/*<replacement>*/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/*</replacement>*/
+function base64Text(buf, i) {
+  var n = (buf.length - i) % 3;
+  if (n === 0) return buf.toString('base64', i);
+  this.lastNeed = 3 - n;
+  this.lastTotal = 3;
+  if (n === 1) {
+    this.lastChar[0] = buf[buf.length - 1];
+  } else {
+    this.lastChar[0] = buf[buf.length - 2];
+    this.lastChar[1] = buf[buf.length - 1];
+  }
+  return buf.toString('base64', i, buf.length - n);
+}
 
-var Readable = require('./_stream_readable');
-var Writable = require('./_stream_writable');
+function base64End(buf) {
+  var r = buf && buf.length ? this.write(buf) : '';
+  if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
+  return r;
+}
 
-util.inherits(Duplex, Readable);
+// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
+function simpleWrite(buf) {
+  return buf.toString(this.encoding);
+}
 
-{
-  // avoid scope creep, the keys array can then be collected
-  var keys = objectKeys(Writable.prototype);
-  for (var v = 0; v < keys.length; v++) {
-    var method = keys[v];
-    if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
-  }
+function simpleEnd(buf) {
+  return buf && buf.length ? this.write(buf) : '';
 }
+},{"safe-buffer":19}],21:[function(require,module,exports){
+(function (global){
 
-function Duplex(options) {
-  if (!(this instanceof Duplex)) return new Duplex(options);
+/**
+ * Module exports.
+ */
 
-  Readable.call(this, options);
-  Writable.call(this, options);
+module.exports = deprecate;
 
-  if (options && options.readable === false) this.readable = false;
+/**
+ * Mark that a method should not be used.
+ * Returns a modified function which warns once by default.
+ *
+ * If `localStorage.noDeprecation = true` is set, then it is a no-op.
+ *
+ * If `localStorage.throwDeprecation = true` is set, then deprecated functions
+ * will throw an Error when invoked.
+ *
+ * If `localStorage.traceDeprecation = true` is set, then deprecated functions
+ * will invoke `console.trace()` instead of `console.error()`.
+ *
+ * @param {Function} fn - the function to deprecate
+ * @param {String} msg - the string to print to the console when `fn` is invoked
+ * @returns {Function} a new "deprecated" version of `fn`
+ * @api public
+ */
 
-  if (options && options.writable === false) this.writable = false;
+function deprecate (fn, msg) {
+  if (config('noDeprecation')) {
+    return fn;
+  }
 
-  this.allowHalfOpen = true;
-  if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
+  var warned = false;
+  function deprecated() {
+    if (!warned) {
+      if (config('throwDeprecation')) {
+        throw new Error(msg);
+      } else if (config('traceDeprecation')) {
+        console.trace(msg);
+      } else {
+        console.warn(msg);
+      }
+      warned = true;
+    }
+    return fn.apply(this, arguments);
+  }
 
-  this.once('end', onend);
+  return deprecated;
 }
 
-Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
-  // making it explicit this property is not enumerable
-  // because otherwise some prototype manipulation in
-  // userland will fail
-  enumerable: false,
-  get: function () {
-    return this._writableState.highWaterMark;
-  }
-});
-
-// the no-half-open enforcer
-function onend() {
-  // if we allow half-open state, or if the writable side ended,
-  // then we're ok.
-  if (this.allowHalfOpen || this._writableState.ended) return;
+/**
+ * Checks `localStorage` for boolean values for the given `name`.
+ *
+ * @param {String} name
+ * @returns {Boolean}
+ * @api private
+ */
 
-  // no more data can be written.
-  // But allow more writes to happen in this tick.
-  pna.nextTick(onEndNT, this);
+function config (name) {
+  // accessing global.localStorage can trigger a DOMException in sandboxed iframes
+  try {
+    if (!global.localStorage) return false;
+  } catch (_) {
+    return false;
+  }
+  var val = global.localStorage[name];
+  if (null == val) return false;
+  return String(val).toLowerCase() === 'true';
 }
 
-function onEndNT(self) {
-  self.end();
+}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
+},{}],22:[function(require,module,exports){
+(function (Buffer){
+const Peer = require('../../server/src/peer')
+const VideoConverter = require('./lib/dist/video-converter');
+const msgpack = require('msgpack5')();
+
+let current_data = {};
+let peer;
+let player;
+
+/**
+ * Validates that the user is logged in by sending the token 
+ */
+checkIfLoggedIn = async () => {
+    //     const token = window.localStorage.getItem('token')
+    //     console.log(token)
+    //     if(!token){
+    //         console.log("You need to login")
+    //         renderLogin()
+    //     }else{
+
+    //         //Check if the token is valid
+    //         const response = await fetch('http://localhost:8080/auth/validation', {
+    //             method: 'POST',
+    //             headers: {'Authorization': token}
+    //         })
+    //         console.log('RESPONSE', response)
+            
+    //         //Token is valid, show available streams
+    //         if(response.status === 200){
+    //             console.log("SUCCESS")
+                 renderThumbnails()
+
+    //         }
+    //     }
+}
+
+/**
+ * Returns a list of available streams
+ */
+getAvailableStreams = async () => {
+    try{
+        const streamsInJson = await fetch(`./streams`);
+        const streams = await streamsInJson.json();
+        console.log('AVAILABLE', streams)
+        return streams;
+    }catch(err){
+        console.log(err)
+    }
+}
+
+
+createVideoPlayer = () => {
+	const containerDiv = document.getElementById('container');
+	containerDiv.innerHTML = '';
+    /*containerDiv.innerHTML = `<h1>Stream from source ${current_data.uri}</h1><br>
+        <button onclick="renderThumbnails(); closeStream()">Go back</button>
+        <button onclick="connectToStream('${current_data.uri}')">Start Stream</button><br>
+        <button onclick="webSocketTest()">WebSocket Test</button><br>
+        <video id="ftlab-stream-video" width="640" height="360"></video>`;
+    containerDiv.innerHTML += '<br>'
+    containerDiv.innerHTML += ''*/
+    createPeer();
+	//connectToStream();
+	new FTLStream(peer, current_data.uri, containerDiv);
+}
+
+/**
+ * Creates thumbnail (image) for all available streams and adds them to div class='container'
+ */
+renderThumbnails = async () => {
+    const thumbnails = await getAvailableStreams();
+    const containerDiv = document.getElementById('container')
+    containerDiv.innerHTML = '';
+    containerDiv.innerHTML = `<button onClick="configs()">change configs</button>`
+    containerDiv.innerHTML += `<div class="ftlab-stream-thumbnails"></div>`
+    if(thumbnails.length === 0){
+        containerDiv.innerHTML = `<h3>No streams running currently</h3>`
+    }else{
+        for(var i=0; i<thumbnails.length; i++){
+            const encodedURI = encodeURIComponent(thumbnails[i])
+            current_data.uri = thumbnails[i]
+            try{
+                const someData = await fetch(`./stream/rgb?uri=${encodedURI}`)
+                if(!someData.ok){
+                    throw new Error('Image not found')
+                }
+                const myBlob = await someData.blob();
+                const objectURL = URL.createObjectURL(myBlob);
+                // containerDiv.innerHTML += createCard()
+                containerDiv.innerHTML += createCard(objectURL, i+4)
+            }catch(err){
+                console.log("Couldn't create thumbnail");
+                console.log(err) 
+            }
+        }
+    }
+}
+
+
+/** 
+ * Method to create a single thumbnail
+ */
+createCard = (url, viewers) => {
+    return `<div class='ftlab-card-component' >
+                <img src='${url}' class="thumbnail-img" alt="Hups" width="250px"></img>
+                <p>Viewers: ${viewers}</p>
+                <button onclick="createVideoPlayer()">button</button>
+            </div>`
+}
+
+
+createPeer = () => {
+	// FOR PRODUCTION
+	console.log("HOST", location.host);
+    const ws = new WebSocket("ws://" + location.host + location.pathname);
+	//const ws = new WebSocket("ws://localhost:8080")
+    ws.binaryType = "arraybuffer";
+    peer = new Peer(ws)
+}
+
+webSocketTest = () => {
+    peer.send("update_cfg", "ftl://utu.fi#reconstruction_default/0/renderer/cool_effect", "true")    
+}
+
+function FTLStream(peer, uri, element) {
+	this.uri = uri;
+	this.peer = peer;
+
+	//this.elements_ = {};
+	//this.converters_ = {};
+
+	//const element = document.getElementById('ftlab-stream-video');
+	this.outer = element;
+	this.element = document.createElement("VIDEO");
+	this.element.setAttribute("width", 640);
+	this.element.setAttribute("height", 360);
+	this.element.setAttribute("controls", true);
+	this.outer.appendChild(this.element);
+	this.play_button = document.createElement("BUTTON");
+	this.play_button.innerHTML = "Play";
+	this.play_button.onclick = () => {
+		this.start(0,0,0);
+	}
+	this.outer.appendChild(this.play_button);
+    this.converter = null;
+
+    let rxcount = 0;
+    let ts = 0;
+	let dts = 0;
+
+    this.peer.bind(uri, (latency, streampckg, pckg) => {
+        if(pckg[0] === 2){  // H264 packet.
+			let id = "id-"+streampckg[1]+"-"+streampckg[2]+"-"+streampckg[3];
+
+			if (this.current == id) {
+				rxcount++;
+				if (rxcount >= 25) {
+					rxcount = 0;
+					peer.send(uri, 0, [1,0,255,0],[255,7,35,0,0,Buffer.alloc(0)]);
+					//peer.send(current_data.uri, 0, [255,7,35,0,0,Buffer.alloc(0)], [1,0,255,0]);
+				}
+			
+				if (this.converter) {
+					/*function decode(value){
+						this.converter.appendRawData(value);
+					}
+					decode(pckg[5]);*/
+					this.converter.appendRawData(pckg[5]);
+					this.converter.play();
+				} else {
+					if (ts > 0) {
+						dts = streampckg[0] - ts;
+						console.log("Framerate = ", 1000/dts);
+						this.converter = new VideoConverter.default(this.element, 30, 1);
+					}
+					ts = streampckg[0];
+				}
+			}
+        } else if (pckg[0] === 103) {
+			console.log(msgpack.decode(pckg[5]));
+		}
+	});
+	
+	//this.start();
+}
+
+FTLStream.prototype.start = function(fs, source, channel) {
+	let id = "id-"+fs+"-"+source+"-"+channel;
+	this.current = id;
+
+	if (this.found) {
+		this.peer.send(this.uri, 0, [1,fs,255,channel],[255,7,35,0,0,Buffer.alloc(0)]);
+	} else {
+		this.peer.rpc("find_stream", (res) => {
+			this.found = true;
+			this.peer.send(this.uri, 0, [1,fs,255,channel],[255,7,35,0,0,Buffer.alloc(0)]);
+		}, this.uri);
+	}
+}
+
+
+/*connectToStream = () => {
+    const element = document.getElementById('ftlab-stream-video');
+    let converter = null;
+
+    let rxcount = 0;
+    let ts = 0;
+    let dts = 0;
+
+    peer.bind(current_data.uri, (latency, streampckg, pckg) => {
+        if(pckg[0] === 2){
+            rxcount++;
+            if (rxcount >= 25) {
+                rxcount = 0;
+                peer.send(current_data.uri, 0, [1,0,255,0],[255,7,35,0,0,Buffer.alloc(0)]);
+                //peer.send(current_data.uri, 0, [255,7,35,0,0,Buffer.alloc(0)], [1,0,255,0]);
+            }
+
+            if (converter) {
+                function decode(value){
+                    converter.appendRawData(value);
+                }
+                decode(pckg[5]);
+                converter.play();
+            } else {
+                if (ts > 0) {
+                    dts = streampckg[0] - ts;
+                    console.log("Framerate = ", 1000/dts);
+                    converter = new VideoConverter.default(element, 30, 1);
+                }
+                ts = streampckg[0];
+            }
+        } else if (pckg[0] === 103) {
+			console.log(msgpack.decode(pckg[5]));
+		}
+    })
+
+    // Start the transaction
+    //peer.send("get_stream", (current_data.uri, 30, 0, current_data.uri));
+
+    peer.rpc("find_stream", (res) => {
+        peer.send(current_data.uri, 0, [1,0,255,0],[255,7,35,0,0,Buffer.alloc(0)]);
+        //peer.send(current_data.uri, [255,7,35,0,0,Buffer.alloc(0)], [1,0,255,0]);
+    }, current_data.uri);
+}*/
+
+closeStream = () => {
+    peer.sock.close()
+}
+
+
+
+/**
+ * **************
+ * CONFIGURATIONS
+ * **************
+ */
+
+
+current_data.configURI = "ftl://utu.fi#reconstruction_snap8/net"
+
+configs = () => {
+    const container = document.getElementById("container");
+    container.innerHTML = `<div class="ftlab-configurations"></div>`;
+    renderConfigOptions();
+}
+
+
+renderConfigOptions = () => {
+    const input = `<p>input1</p><br>ftl://utu.fi#<input type="text">`
+    const doc = document.getElementsByClassName('ftlab-configurations')[0];
+    doc.innerHTML = input;
+}
+
+/**
+ * 
+ */
+loadConfigs = async (str) => {
+    const configURI = encodeURIComponent(`ftl://utu.fi#reconstruction_snap8${str}`);
+    const uri = encodeURIComponent(current_data.uri)
+    const rawResp = await fetch(`./stream/config?settings=${configURI}&uri=${uri}`)
+    const response = await rawResp.json();
+    const content = JSON.parse(response);
+    container.innerHTML += `<p>${response}</p>`;
+}
+
+// current_data.configData = '{"peers": 1}';
+
+/**
+ * Method to send configurations to backend 
+ */
+saveConfigs = async () => {
+    let {uri, configURI, configData} = current_data
+    const rawResp = await fetch('./stream/config', {
+        method: 'POST',
+        headers: {
+            'Accept': 'application/json',
+            'Content-Type': 'application/json'
+        },
+        body: JSON.stringify({peerURI: uri, configURI, data: configData, saveToCPP: true})
+    });
+    const content = await rawResp.json();
 }
-
-Object.defineProperty(Duplex.prototype, 'destroyed', {
-  get: function () {
-    if (this._readableState === undefined || this._writableState === undefined) {
-      return false;
-    }
-    return this._readableState.destroyed && this._writableState.destroyed;
-  },
-  set: function (value) {
-    // we ignore the value if the stream
-    // has not been initialized yet
-    if (this._readableState === undefined || this._writableState === undefined) {
-      return;
+}).call(this,require("buffer").Buffer)
+},{"../../server/src/peer":57,"./lib/dist/video-converter":30,"buffer":65,"msgpack5":5}],23:[function(require,module,exports){
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var bit_stream_1 = require("./util/bit-stream");
+var debug = require("./util/debug");
+var NALU_1 = require("./util/NALU");
+var H264Parser = (function () {
+    function H264Parser(remuxer) {
+        this.remuxer = remuxer;
+        this.track = remuxer.mp4track;
     }
+    H264Parser.prototype.parseSEI = function (sei) {
+        var messages = H264Parser.readSEI(sei);
+        for (var _i = 0, messages_1 = messages; _i < messages_1.length; _i++) {
+            var m = messages_1[_i];
+            switch (m.type) {
+                case 0:
+                    this.track.seiBuffering = true;
+                    break;
+                case 5:
+                    return true;
+                default:
+                    break;
+            }
+        }
+        return false;
+    };
+    H264Parser.prototype.parseSPS = function (sps) {
+        var config = H264Parser.readSPS(sps);
+        this.track.width = config.width;
+        this.track.height = config.height;
+        this.track.sps = [sps];
+        this.track.codec = 'avc1.';
+        var codecArray = new DataView(sps.buffer, sps.byteOffset + 1, 4);
+        for (var i = 0; i < 3; ++i) {
+            var h = codecArray.getUint8(i).toString(16);
+            if (h.length < 2) {
+                h = '0' + h;
+            }
+            this.track.codec += h;
+        }
+    };
+    H264Parser.prototype.parsePPS = function (pps) {
+        this.track.pps = [pps];
+    };
+    H264Parser.prototype.parseNAL = function (unit) {
+        if (!unit) {
+            return false;
+        }
+        var push = false;
+        switch (unit.type()) {
+            case NALU_1.default.NDR:
+            case NALU_1.default.IDR:
+                push = true;
+                break;
+            case NALU_1.default.SEI:
+                push = this.parseSEI(unit.getData().subarray(4));
+                break;
+            case NALU_1.default.SPS:
+                if (this.track.sps.length === 0) {
+                    this.parseSPS(unit.getData().subarray(4));
+                    debug.log(" Found SPS type NALU frame.");
+                    if (!this.remuxer.readyToDecode && this.track.pps.length > 0 && this.track.sps.length > 0) {
+                        this.remuxer.readyToDecode = true;
+                    }
+                }
+                break;
+            case NALU_1.default.PPS:
+                if (this.track.pps.length === 0) {
+                    this.parsePPS(unit.getData().subarray(4));
+                    debug.log(" Found PPS type NALU frame.");
+                    if (!this.remuxer.readyToDecode && this.track.pps.length > 0 && this.track.sps.length > 0) {
+                        this.remuxer.readyToDecode = true;
+                    }
+                }
+                break;
+            default:
+                debug.log(" Found Unknown type NALU frame. type=" + unit.type());
+                break;
+        }
+        return push;
+    };
+    H264Parser.skipScalingList = function (decoder, count) {
+        var lastScale = 8;
+        var nextScale = 8;
+        for (var j = 0; j < count; j++) {
+            if (nextScale !== 0) {
+                var deltaScale = decoder.readEG();
+                nextScale = (lastScale + deltaScale + 256) % 256;
+            }
+            lastScale = (nextScale === 0) ? lastScale : nextScale;
+        }
+    };
+    H264Parser.readSPS = function (data) {
+        var decoder = new bit_stream_1.default(data);
+        var frameCropLeftOffset = 0;
+        var frameCropRightOffset = 0;
+        var frameCropTopOffset = 0;
+        var frameCropBottomOffset = 0;
+        var sarScale = 1;
+        decoder.readUByte();
+        var profileIdc = decoder.readUByte();
+        decoder.skipBits(5);
+        decoder.skipBits(3);
+        decoder.skipBits(8);
+        decoder.skipUEG();
+        if (profileIdc === 100 ||
+            profileIdc === 110 ||
+            profileIdc === 122 ||
+            profileIdc === 244 ||
+            profileIdc === 44 ||
+            profileIdc === 83 ||
+            profileIdc === 86 ||
+            profileIdc === 118 ||
+            profileIdc === 128) {
+            var chromaFormatIdc = decoder.readUEG();
+            if (chromaFormatIdc === 3) {
+                decoder.skipBits(1);
+            }
+            decoder.skipUEG();
+            decoder.skipUEG();
+            decoder.skipBits(1);
+            if (decoder.readBoolean()) {
+                var scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
+                for (var i = 0; i < scalingListCount; ++i) {
+                    if (decoder.readBoolean()) {
+                        if (i < 6) {
+                            H264Parser.skipScalingList(decoder, 16);
+                        }
+                        else {
+                            H264Parser.skipScalingList(decoder, 64);
+                        }
+                    }
+                }
+            }
+        }
+        decoder.skipUEG();
+        var picOrderCntType = decoder.readUEG();
+        if (picOrderCntType === 0) {
+            decoder.readUEG();
+        }
+        else if (picOrderCntType === 1) {
+            decoder.skipBits(1);
+            decoder.skipEG();
+            decoder.skipEG();
+            var numRefFramesInPicOrderCntCycle = decoder.readUEG();
+            for (var i = 0; i < numRefFramesInPicOrderCntCycle; ++i) {
+                decoder.skipEG();
+            }
+        }
+        decoder.skipUEG();
+        decoder.skipBits(1);
+        var picWidthInMbsMinus1 = decoder.readUEG();
+        var picHeightInMapUnitsMinus1 = decoder.readUEG();
+        var frameMbsOnlyFlag = decoder.readBits(1);
+        if (frameMbsOnlyFlag === 0) {
+            decoder.skipBits(1);
+        }
+        decoder.skipBits(1);
+        if (decoder.readBoolean()) {
+            frameCropLeftOffset = decoder.readUEG();
+            frameCropRightOffset = decoder.readUEG();
+            frameCropTopOffset = decoder.readUEG();
+            frameCropBottomOffset = decoder.readUEG();
+        }
+        if (decoder.readBoolean()) {
+            if (decoder.readBoolean()) {
+                var sarRatio = void 0;
+                var aspectRatioIdc = decoder.readUByte();
+                switch (aspectRatioIdc) {
+                    case 1:
+                        sarRatio = [1, 1];
+                        break;
+                    case 2:
+                        sarRatio = [12, 11];
+                        break;
+                    case 3:
+                        sarRatio = [10, 11];
+                        break;
+                    case 4:
+                        sarRatio = [16, 11];
+                        break;
+                    case 5:
+                        sarRatio = [40, 33];
+                        break;
+                    case 6:
+                        sarRatio = [24, 11];
+                        break;
+                    case 7:
+                        sarRatio = [20, 11];
+                        break;
+                    case 8:
+                        sarRatio = [32, 11];
+                        break;
+                    case 9:
+                        sarRatio = [80, 33];
+                        break;
+                    case 10:
+                        sarRatio = [18, 11];
+                        break;
+                    case 11:
+                        sarRatio = [15, 11];
+                        break;
+                    case 12:
+                        sarRatio = [64, 33];
+                        break;
+                    case 13:
+                        sarRatio = [160, 99];
+                        break;
+                    case 14:
+                        sarRatio = [4, 3];
+                        break;
+                    case 15:
+                        sarRatio = [3, 2];
+                        break;
+                    case 16:
+                        sarRatio = [2, 1];
+                        break;
+                    case 255: {
+                        sarRatio = [decoder.readUByte() << 8 | decoder.readUByte(), decoder.readUByte() << 8 | decoder.readUByte()];
+                        break;
+                    }
+                    default: {
+                        debug.error("  H264: Unknown aspectRatioIdc=" + aspectRatioIdc);
+                    }
+                }
+                if (sarRatio) {
+                    sarScale = sarRatio[0] / sarRatio[1];
+                }
+            }
+            if (decoder.readBoolean()) {
+                decoder.skipBits(1);
+            }
+            if (decoder.readBoolean()) {
+                decoder.skipBits(4);
+                if (decoder.readBoolean()) {
+                    decoder.skipBits(24);
+                }
+            }
+            if (decoder.readBoolean()) {
+                decoder.skipUEG();
+                decoder.skipUEG();
+            }
+            if (decoder.readBoolean()) {
+                var unitsInTick = decoder.readUInt();
+                var timeScale = decoder.readUInt();
+                var fixedFrameRate = decoder.readBoolean();
+                var frameDuration = timeScale / (2 * unitsInTick);
+                debug.log("timescale: " + timeScale + "; unitsInTick: " + unitsInTick + "; " +
+                    ("fixedFramerate: " + fixedFrameRate + "; avgFrameDuration: " + frameDuration));
+            }
+        }
+        return {
+            width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
+            height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) -
+                ((frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset)),
+        };
+    };
+    H264Parser.readSEI = function (data) {
+        var decoder = new bit_stream_1.default(data);
+        decoder.skipBits(8);
+        var result = [];
+        while (decoder.bitsAvailable > 3 * 8) {
+            result.push(this.readSEIMessage(decoder));
+        }
+        return result;
+    };
+    H264Parser.readSEIMessage = function (decoder) {
+        function get() {
+            var result = 0;
+            while (true) {
+                var value = decoder.readUByte();
+                result += value;
+                if (value !== 0xff) {
+                    break;
+                }
+            }
+            return result;
+        }
+        var payloadType = get();
+        var payloadSize = get();
+        return this.readSEIPayload(decoder, payloadType, payloadSize);
+    };
+    H264Parser.readSEIPayload = function (decoder, type, size) {
+        var result;
+        switch (type) {
+            default:
+                result = { type: type };
+                decoder.skipBits(size * 8);
+        }
+        decoder.skipBits(decoder.bitsAvailable % 8);
+        return result;
+    };
+    return H264Parser;
+}());
+exports.default = H264Parser;
 
-    // backward compatibility, the user is explicitly
-    // managing destroyed
-    this._readableState.destroyed = value;
-    this._writableState.destroyed = value;
-  }
-});
-
-Duplex.prototype._destroy = function (err, cb) {
-  this.push(null);
-  this.end();
-
-  pna.nextTick(cb, err);
-};
-},{"./_stream_readable":21,"./_stream_writable":23,"core-util-is":11,"inherits":12,"process-nextick-args":18}],20:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// a passthrough stream.
-// basically just the most minimal sort of Transform stream.
-// Every written chunk gets output as-is.
-
-'use strict';
-
-module.exports = PassThrough;
-
-var Transform = require('./_stream_transform');
-
-/*<replacement>*/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/*</replacement>*/
-
-util.inherits(PassThrough, Transform);
-
-function PassThrough(options) {
-  if (!(this instanceof PassThrough)) return new PassThrough(options);
-
-  Transform.call(this, options);
-}
-
-PassThrough.prototype._transform = function (chunk, encoding, cb) {
-  cb(null, chunk);
-};
-},{"./_stream_transform":22,"core-util-is":11,"inherits":12}],21:[function(require,module,exports){
-(function (process,global){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-'use strict';
-
-/*<replacement>*/
-
-var pna = require('process-nextick-args');
-/*</replacement>*/
-
-module.exports = Readable;
-
-/*<replacement>*/
-var isArray = require('isarray');
-/*</replacement>*/
-
-/*<replacement>*/
-var Duplex;
-/*</replacement>*/
-
-Readable.ReadableState = ReadableState;
-
-/*<replacement>*/
-var EE = require('events').EventEmitter;
-
-var EElistenerCount = function (emitter, type) {
-  return emitter.listeners(type).length;
-};
-/*</replacement>*/
-
-/*<replacement>*/
-var Stream = require('./internal/streams/stream');
-/*</replacement>*/
-
-/*<replacement>*/
-
-var Buffer = require('safe-buffer').Buffer;
-var OurUint8Array = global.Uint8Array || function () {};
-function _uint8ArrayToBuffer(chunk) {
-  return Buffer.from(chunk);
-}
-function _isUint8Array(obj) {
-  return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
-
-/*</replacement>*/
-
-/*<replacement>*/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/*</replacement>*/
-
-/*<replacement>*/
-var debugUtil = require('util');
-var debug = void 0;
-if (debugUtil && debugUtil.debuglog) {
-  debug = debugUtil.debuglog('stream');
-} else {
-  debug = function () {};
-}
-/*</replacement>*/
-
-var BufferList = require('./internal/streams/BufferList');
-var destroyImpl = require('./internal/streams/destroy');
-var StringDecoder;
-
-util.inherits(Readable, Stream);
-
-var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
-
-function prependListener(emitter, event, fn) {
-  // Sadly this is not cacheable as some libraries bundle their own
-  // event emitter implementation with them.
-  if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
-
-  // This is a hack to make sure that our error handler is attached before any
-  // userland ones.  NEVER DO THIS. This is here only because this code needs
-  // to continue to work with older versions of Node.js that do not include
-  // the prependListener() method. The goal is to eventually remove this hack.
-  if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
-}
-
-function ReadableState(options, stream) {
-  Duplex = Duplex || require('./_stream_duplex');
-
-  options = options || {};
-
-  // Duplex streams are both readable and writable, but share
-  // the same options object.
-  // However, some cases require setting options to different
-  // values for the readable and the writable sides of the duplex stream.
-  // These options can be provided separately as readableXXX and writableXXX.
-  var isDuplex = stream instanceof Duplex;
-
-  // object stream flag. Used to make read(n) ignore n and to
-  // make all the buffer merging and length checks go away
-  this.objectMode = !!options.objectMode;
-
-  if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
-
-  // the point at which it stops calling _read() to fill the buffer
-  // Note: 0 is a valid value, means "don't call _read preemptively ever"
-  var hwm = options.highWaterMark;
-  var readableHwm = options.readableHighWaterMark;
-  var defaultHwm = this.objectMode ? 16 : 16 * 1024;
-
-  if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
+},{"./util/NALU":26,"./util/bit-stream":27,"./util/debug":28}],24:[function(require,module,exports){
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var h264_parser_1 = require("./h264-parser");
+var debug = require("./util/debug");
+var NALU_1 = require("./util/NALU");
+var trackId = 1;
+var H264Remuxer = (function () {
+    function H264Remuxer(fps, framePerFragment, timescale) {
+        this.fps = fps;
+        this.framePerFragment = framePerFragment;
+        this.timescale = timescale;
+        this.readyToDecode = false;
+        this.totalDTS = 0;
+        this.stepDTS = Math.round(this.timescale / this.fps);
+        this.frameCount = 0;
+        this.seq = 1;
+        this.mp4track = {
+            id: H264Remuxer.getTrackID(),
+            type: 'video',
+            len: 0,
+            codec: '',
+            sps: [],
+            pps: [],
+            seiBuffering: false,
+            width: 0,
+            height: 0,
+            timescale: timescale,
+            duration: timescale,
+            samples: [],
+            isKeyFrame: true,
+        };
+        this.unitSamples = [[]];
+        this.parser = new h264_parser_1.default(this);
+    }
+    H264Remuxer.getTrackID = function () {
+        return trackId++;
+    };
+    Object.defineProperty(H264Remuxer.prototype, "seqNum", {
+        get: function () {
+            return this.seq;
+        },
+        enumerable: true,
+        configurable: true
+    });
+    H264Remuxer.prototype.remux = function (nalu) {
+        if (this.mp4track.seiBuffering && nalu.type() === NALU_1.default.SEI) {
+            return this.createNextFrame();
+        }
+        if (this.parser.parseNAL(nalu)) {
+            this.unitSamples[this.unitSamples.length - 1].push(nalu);
+            this.mp4track.len += nalu.getSize();
+        }
+        if (!this.mp4track.seiBuffering && (nalu.type() === NALU_1.default.IDR || nalu.type() === NALU_1.default.NDR)) {
+            return this.createNextFrame();
+        }
+        return;
+    };
+    H264Remuxer.prototype.createNextFrame = function () {
+        if (this.mp4track.len > 0) {
+            this.frameCount++;
+            if (this.frameCount % this.framePerFragment === 0) {
+                var fragment = this.getFragment();
+                if (fragment) {
+                    var dts = this.totalDTS;
+                    this.totalDTS = this.stepDTS * this.frameCount;
+                    return [dts, fragment];
+                }
+                else {
+                    debug.log("No mp4 sample data.");
+                }
+            }
+            this.unitSamples.push([]);
+        }
+        return;
+    };
+    H264Remuxer.prototype.flush = function () {
+        this.seq++;
+        this.mp4track.len = 0;
+        this.mp4track.samples = [];
+        this.mp4track.isKeyFrame = false;
+        this.unitSamples = [[]];
+    };
+    H264Remuxer.prototype.getFragment = function () {
+        if (!this.checkReadyToDecode()) {
+            return undefined;
+        }
+        var payload = new Uint8Array(this.mp4track.len);
+        this.mp4track.samples = [];
+        var offset = 0;
+        for (var i = 0, len = this.unitSamples.length; i < len; i++) {
+            var units = this.unitSamples[i];
+            if (units.length === 0) {
+                continue;
+            }
+            var mp4Sample = {
+                size: 0,
+                cts: this.stepDTS * i,
+            };
+            for (var _i = 0, units_1 = units; _i < units_1.length; _i++) {
+                var unit = units_1[_i];
+                mp4Sample.size += unit.getSize();
+                payload.set(unit.getData(), offset);
+                offset += unit.getSize();
+            }
+            this.mp4track.samples.push(mp4Sample);
+        }
+        if (offset === 0) {
+            return undefined;
+        }
+        return payload;
+    };
+    H264Remuxer.prototype.checkReadyToDecode = function () {
+        if (!this.readyToDecode || this.unitSamples.filter(function (array) { return array.length > 0; }).length === 0) {
+            debug.log("Not ready to decode! readyToDecode(" + this.readyToDecode + ") is false or units is empty.");
+            return false;
+        }
+        return true;
+    };
+    return H264Remuxer;
+}());
+exports.default = H264Remuxer;
 
-  // cast to ints.
-  this.highWaterMark = Math.floor(this.highWaterMark);
+},{"./h264-parser":23,"./util/NALU":26,"./util/debug":28}],25:[function(require,module,exports){
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var MP4 = (function () {
+    function MP4() {
+    }
+    MP4.init = function () {
+        MP4.initalized = true;
+        MP4.types = {
+            avc1: [],
+            avcC: [],
+            btrt: [],
+            dinf: [],
+            dref: [],
+            esds: [],
+            ftyp: [],
+            hdlr: [],
+            mdat: [],
+            mdhd: [],
+            mdia: [],
+            mfhd: [],
+            minf: [],
+            moof: [],
+            moov: [],
+            mp4a: [],
+            mvex: [],
+            mvhd: [],
+            sdtp: [],
+            stbl: [],
+            stco: [],
+            stsc: [],
+            stsd: [],
+            stsz: [],
+            stts: [],
+            styp: [],
+            tfdt: [],
+            tfhd: [],
+            traf: [],
+            trak: [],
+            trun: [],
+            trep: [],
+            trex: [],
+            tkhd: [],
+            vmhd: [],
+            smhd: [],
+        };
+        for (var type in MP4.types) {
+            if (MP4.types.hasOwnProperty(type)) {
+                MP4.types[type] = [
+                    type.charCodeAt(0),
+                    type.charCodeAt(1),
+                    type.charCodeAt(2),
+                    type.charCodeAt(3),
+                ];
+            }
+        }
+        var hdlr = new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x76, 0x69, 0x64, 0x65,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x56, 0x69, 0x64, 0x65,
+            0x6f, 0x48, 0x61, 0x6e,
+            0x64, 0x6c, 0x65, 0x72, 0x00,
+        ]);
+        var dref = new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x01,
+            0x00, 0x00, 0x00, 0x0c,
+            0x75, 0x72, 0x6c, 0x20,
+            0x00,
+            0x00, 0x00, 0x01,
+        ]);
+        var stco = new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+        ]);
+        MP4.STTS = MP4.STSC = MP4.STCO = stco;
+        MP4.STSZ = new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+        ]);
+        MP4.VMHD = new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x01,
+            0x00, 0x00,
+            0x00, 0x00,
+            0x00, 0x00,
+            0x00, 0x00,
+        ]);
+        MP4.SMHD = new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00,
+            0x00, 0x00,
+        ]);
+        MP4.STSD = new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x01
+        ]);
+        MP4.FTYP = MP4.box(MP4.types.ftyp, new Uint8Array([
+            0x69, 0x73, 0x6f, 0x35,
+            0x00, 0x00, 0x00, 0x01,
+            0x61, 0x76, 0x63, 0x31,
+            0x69, 0x73, 0x6f, 0x35,
+            0x64, 0x61, 0x73, 0x68,
+        ]));
+        MP4.STYP = MP4.box(MP4.types.styp, new Uint8Array([
+            0x6d, 0x73, 0x64, 0x68,
+            0x00, 0x00, 0x00, 0x00,
+            0x6d, 0x73, 0x64, 0x68,
+            0x6d, 0x73, 0x69, 0x78,
+        ]));
+        MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref));
+        MP4.HDLR = MP4.box(MP4.types.hdlr, hdlr);
+    };
+    MP4.box = function (type) {
+        var payload = [];
+        for (var _i = 1; _i < arguments.length; _i++) {
+            payload[_i - 1] = arguments[_i];
+        }
+        var size = 8;
+        for (var _a = 0, payload_1 = payload; _a < payload_1.length; _a++) {
+            var p = payload_1[_a];
+            size += p.byteLength;
+        }
+        var result = new Uint8Array(size);
+        result[0] = (size >> 24) & 0xff;
+        result[1] = (size >> 16) & 0xff;
+        result[2] = (size >> 8) & 0xff;
+        result[3] = size & 0xff;
+        result.set(type, 4);
+        size = 8;
+        for (var _b = 0, payload_2 = payload; _b < payload_2.length; _b++) {
+            var box = payload_2[_b];
+            result.set(box, size);
+            size += box.byteLength;
+        }
+        return result;
+    };
+    MP4.mdat = function (data) {
+        return MP4.box(MP4.types.mdat, data);
+    };
+    MP4.mdhd = function (timescale) {
+        return MP4.box(MP4.types.mdhd, new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x01,
+            0x00, 0x00, 0x00, 0x02,
+            (timescale >> 24) & 0xFF,
+            (timescale >> 16) & 0xFF,
+            (timescale >> 8) & 0xFF,
+            timescale & 0xFF,
+            0x00, 0x00, 0x00, 0x00,
+            0x55, 0xc4,
+            0x00, 0x00,
+        ]));
+    };
+    MP4.mdia = function (track) {
+        return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale), MP4.HDLR, MP4.minf(track));
+    };
+    MP4.mfhd = function (sequenceNumber) {
+        return MP4.box(MP4.types.mfhd, new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            (sequenceNumber >> 24),
+            (sequenceNumber >> 16) & 0xFF,
+            (sequenceNumber >> 8) & 0xFF,
+            sequenceNumber & 0xFF,
+        ]));
+    };
+    MP4.minf = function (track) {
+        return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track));
+    };
+    MP4.moof = function (sn, baseMediaDecodeTime, track) {
+        return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track, baseMediaDecodeTime));
+    };
+    MP4.moov = function (tracks, duration, timescale) {
+        var boxes = [];
+        for (var _i = 0, tracks_1 = tracks; _i < tracks_1.length; _i++) {
+            var track = tracks_1[_i];
+            boxes.push(MP4.trak(track));
+        }
+        return MP4.box.apply(MP4, [MP4.types.moov, MP4.mvhd(timescale, duration), MP4.mvex(tracks)].concat(boxes));
+    };
+    MP4.mvhd = function (timescale, duration) {
+        var bytes = new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x01,
+            0x00, 0x00, 0x00, 0x02,
+            (timescale >> 24) & 0xFF,
+            (timescale >> 16) & 0xFF,
+            (timescale >> 8) & 0xFF,
+            timescale & 0xFF,
+            (duration >> 24) & 0xFF,
+            (duration >> 16) & 0xFF,
+            (duration >> 8) & 0xFF,
+            duration & 0xFF,
+            0x00, 0x01, 0x00, 0x00,
+            0x01, 0x00,
+            0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x01, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x01, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x40, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x02,
+        ]);
+        return MP4.box(MP4.types.mvhd, bytes);
+    };
+    MP4.mvex = function (tracks) {
+        var boxes = [];
+        for (var _i = 0, tracks_2 = tracks; _i < tracks_2.length; _i++) {
+            var track = tracks_2[_i];
+            boxes.push(MP4.trex(track));
+        }
+        return MP4.box.apply(MP4, [MP4.types.mvex].concat(boxes, [MP4.trep()]));
+    };
+    MP4.trep = function () {
+        return MP4.box(MP4.types.trep, new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x01,
+        ]));
+    };
+    MP4.stbl = function (track) {
+        return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO));
+    };
+    MP4.avc1 = function (track) {
+        var sps = [];
+        var pps = [];
+        for (var _i = 0, _a = track.sps; _i < _a.length; _i++) {
+            var data = _a[_i];
+            var len = data.byteLength;
+            sps.push((len >>> 8) & 0xFF);
+            sps.push((len & 0xFF));
+            sps = sps.concat(Array.prototype.slice.call(data));
+        }
+        for (var _b = 0, _c = track.pps; _b < _c.length; _b++) {
+            var data = _c[_b];
+            var len = data.byteLength;
+            pps.push((len >>> 8) & 0xFF);
+            pps.push((len & 0xFF));
+            pps = pps.concat(Array.prototype.slice.call(data));
+        }
+        var avcc = MP4.box(MP4.types.avcC, new Uint8Array([
+            0x01,
+            sps[3],
+            sps[4],
+            sps[5],
+            0xfc | 3,
+            0xE0 | track.sps.length,
+        ].concat(sps).concat([
+            track.pps.length,
+        ]).concat(pps)));
+        var width = track.width;
+        var height = track.height;
+        return MP4.box(MP4.types.avc1, new Uint8Array([
+            0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x01,
+            0x00, 0x00,
+            0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            (width >> 8) & 0xFF,
+            width & 0xff,
+            (height >> 8) & 0xFF,
+            height & 0xff,
+            0x00, 0x48, 0x00, 0x00,
+            0x00, 0x48, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x01,
+            0x12,
+            0x62, 0x69, 0x6E, 0x65,
+            0x6C, 0x70, 0x72, 0x6F,
+            0x2E, 0x72, 0x75, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00,
+            0x00, 0x18,
+            0x11, 0x11
+        ]), avcc, MP4.box(MP4.types.btrt, new Uint8Array([
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x2d, 0xc6, 0xc0,
+            0x00, 0x2d, 0xc6, 0xc0,
+        ])));
+    };
+    MP4.stsd = function (track) {
+        return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
+    };
+    MP4.tkhd = function (track) {
+        var id = track.id;
+        var width = track.width;
+        var height = track.height;
+        return MP4.box(MP4.types.tkhd, new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x01,
+            0x00, 0x00, 0x00, 0x01,
+            0x00, 0x00, 0x00, 0x02,
+            (id >> 24) & 0xFF,
+            (id >> 16) & 0xFF,
+            (id >> 8) & 0xFF,
+            id & 0xFF,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00,
+            0x00, 0x00,
+            (track.type === 'audio' ? 0x01 : 0x00), 0x00,
+            0x00, 0x00,
+            0x00, 0x01, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x01, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x00, 0x00, 0x00,
+            0x40, 0x00, 0x00, 0x00,
+            (width >> 8) & 0xFF,
+            width & 0xFF,
+            0x00, 0x00,
+            (height >> 8) & 0xFF,
+            height & 0xFF,
+            0x00, 0x00,
+        ]));
+    };
+    MP4.traf = function (track, baseMediaDecodeTime) {
+        var id = track.id;
+        return MP4.box(MP4.types.traf, MP4.box(MP4.types.tfhd, new Uint8Array([
+            0x00,
+            0x02, 0x00, 0x00,
+            (id >> 24),
+            (id >> 16) & 0XFF,
+            (id >> 8) & 0XFF,
+            (id & 0xFF),
+        ])), MP4.box(MP4.types.tfdt, new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            (baseMediaDecodeTime >> 24),
+            (baseMediaDecodeTime >> 16) & 0XFF,
+            (baseMediaDecodeTime >> 8) & 0XFF,
+            (baseMediaDecodeTime & 0xFF),
+        ])), MP4.trun(track, 16 +
+            16 +
+            8 +
+            16 +
+            8 +
+            8));
+    };
+    MP4.trak = function (track) {
+        track.duration = track.duration || 0xffffffff;
+        return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track));
+    };
+    MP4.trex = function (track) {
+        var id = track.id;
+        return MP4.box(MP4.types.trex, new Uint8Array([
+            0x00,
+            0x00, 0x00, 0x00,
+            (id >> 24),
+            (id >> 16) & 0XFF,
+            (id >> 8) & 0XFF,
+            (id & 0xFF),
+            0x00, 0x00, 0x00, 0x01,
+            0x00, 0x00, 0x00, 0x3c,
+            0x00, 0x00, 0x00, 0x00,
+            0x00, 0x01, 0x00, 0x00,
+        ]));
+    };
+    MP4.trun = function (track, offset) {
+        var samples = track.samples || [];
+        var len = samples.length;
+        var additionalLen = track.isKeyFrame ? 4 : 0;
+        var arraylen = 12 + additionalLen + (4 * len);
+        var array = new Uint8Array(arraylen);
+        offset += 8 + arraylen;
+        array.set([
+            0x00,
+            0x00, 0x02, (track.isKeyFrame ? 0x05 : 0x01),
+            (len >>> 24) & 0xFF,
+            (len >>> 16) & 0xFF,
+            (len >>> 8) & 0xFF,
+            len & 0xFF,
+            (offset >>> 24) & 0xFF,
+            (offset >>> 16) & 0xFF,
+            (offset >>> 8) & 0xFF,
+            offset & 0xFF,
+        ], 0);
+        if (track.isKeyFrame) {
+            array.set([
+                0x00, 0x00, 0x00, 0x00,
+            ], 12);
+        }
+        for (var i = 0; i < len; i++) {
+            var sample = samples[i];
+            var size = sample.size;
+            array.set([
+                (size >>> 24) & 0xFF,
+                (size >>> 16) & 0xFF,
+                (size >>> 8) & 0xFF,
+                size & 0xFF,
+            ], 12 + additionalLen + 4 * i);
+        }
+        return MP4.box(MP4.types.trun, array);
+    };
+    MP4.initSegment = function (tracks, duration, timescale) {
+        if (!MP4.initalized) {
+            MP4.init();
+        }
+        var movie = MP4.moov(tracks, duration, timescale);
+        var result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength);
+        result.set(MP4.FTYP);
+        result.set(movie, MP4.FTYP.byteLength);
+        return result;
+    };
+    MP4.fragmentSegment = function (sn, baseMediaDecodeTime, track, payload) {
+        var moof = MP4.moof(sn, baseMediaDecodeTime, track);
+        var mdat = MP4.mdat(payload);
+        var result = new Uint8Array(MP4.STYP.byteLength + moof.byteLength + mdat.byteLength);
+        result.set(MP4.STYP);
+        result.set(moof, MP4.STYP.byteLength);
+        result.set(mdat, MP4.STYP.byteLength + moof.byteLength);
+        return result;
+    };
+    return MP4;
+}());
+MP4.types = {};
+MP4.initalized = false;
+exports.default = MP4;
 
-  // A linked list is used to store data chunks instead of an array because the
-  // linked list can remove elements from the beginning faster than
-  // array.shift()
-  this.buffer = new BufferList();
-  this.length = 0;
-  this.pipes = null;
-  this.pipesCount = 0;
-  this.flowing = null;
-  this.ended = false;
-  this.endEmitted = false;
-  this.reading = false;
+},{}],26:[function(require,module,exports){
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var NALU = (function () {
+    function NALU(data) {
+        this.data = data;
+        this.nri = (data[0] & 0x60) >> 5;
+        this.ntype = data[0] & 0x1f;
+    }
+    Object.defineProperty(NALU, "NDR", {
+        get: function () { return 1; },
+        enumerable: true,
+        configurable: true
+    });
+    Object.defineProperty(NALU, "IDR", {
+        get: function () { return 5; },
+        enumerable: true,
+        configurable: true
+    });
+    Object.defineProperty(NALU, "SEI", {
+        get: function () { return 6; },
+        enumerable: true,
+        configurable: true
+    });
+    Object.defineProperty(NALU, "SPS", {
+        get: function () { return 7; },
+        enumerable: true,
+        configurable: true
+    });
+    Object.defineProperty(NALU, "PPS", {
+        get: function () { return 8; },
+        enumerable: true,
+        configurable: true
+    });
+    Object.defineProperty(NALU, "TYPES", {
+        get: function () {
+            return _a = {},
+                _a[NALU.IDR] = 'IDR',
+                _a[NALU.SEI] = 'SEI',
+                _a[NALU.SPS] = 'SPS',
+                _a[NALU.PPS] = 'PPS',
+                _a[NALU.NDR] = 'NDR',
+                _a;
+            var _a;
+        },
+        enumerable: true,
+        configurable: true
+    });
+    NALU.type = function (nalu) {
+        if (nalu.ntype in NALU.TYPES) {
+            return NALU.TYPES[nalu.ntype];
+        }
+        else {
+            return 'UNKNOWN';
+        }
+    };
+    NALU.prototype.type = function () {
+        return this.ntype;
+    };
+    NALU.prototype.isKeyframe = function () {
+        return this.ntype === NALU.IDR;
+    };
+    NALU.prototype.getSize = function () {
+        return 4 + this.data.byteLength;
+    };
+    NALU.prototype.getData = function () {
+        var result = new Uint8Array(this.getSize());
+        var view = new DataView(result.buffer);
+        view.setUint32(0, this.getSize() - 4);
+        result.set(this.data, 4);
+        return result;
+    };
+    return NALU;
+}());
+exports.default = NALU;
 
-  // a flag to be able to tell if the event 'readable'/'data' is emitted
-  // immediately, or on a later tick.  We set this to true at first, because
-  // any actions that shouldn't happen until "later" should generally also
-  // not happen before the first read call.
-  this.sync = true;
+},{}],27:[function(require,module,exports){
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var BitStream = (function () {
+    function BitStream(data) {
+        this.data = data;
+        this.index = 0;
+        this.bitLength = data.byteLength * 8;
+    }
+    Object.defineProperty(BitStream.prototype, "bitsAvailable", {
+        get: function () {
+            return this.bitLength - this.index;
+        },
+        enumerable: true,
+        configurable: true
+    });
+    BitStream.prototype.skipBits = function (size) {
+        if (this.bitsAvailable < size) {
+            throw new Error('no bytes available');
+        }
+        this.index += size;
+    };
+    BitStream.prototype.readBits = function (size) {
+        var result = this.getBits(size, this.index);
+        return result;
+    };
+    BitStream.prototype.getBits = function (size, offsetBits, moveIndex) {
+        if (moveIndex === void 0) { moveIndex = true; }
+        if (this.bitsAvailable < size) {
+            throw new Error('no bytes available');
+        }
+        var offset = offsetBits % 8;
+        var byte = this.data[(offsetBits / 8) | 0] & (0xff >>> offset);
+        var bits = 8 - offset;
+        if (bits >= size) {
+            if (moveIndex) {
+                this.index += size;
+            }
+            return byte >> (bits - size);
+        }
+        else {
+            if (moveIndex) {
+                this.index += bits;
+            }
+            var nextSize = size - bits;
+            return (byte << nextSize) | this.getBits(nextSize, offsetBits + bits, moveIndex);
+        }
+    };
+    BitStream.prototype.skipLZ = function () {
+        var leadingZeroCount;
+        for (leadingZeroCount = 0; leadingZeroCount < this.bitLength - this.index; ++leadingZeroCount) {
+            if (0 !== this.getBits(1, this.index + leadingZeroCount, false)) {
+                this.index += leadingZeroCount;
+                return leadingZeroCount;
+            }
+        }
+        return leadingZeroCount;
+    };
+    BitStream.prototype.skipUEG = function () {
+        this.skipBits(1 + this.skipLZ());
+    };
+    BitStream.prototype.skipEG = function () {
+        this.skipBits(1 + this.skipLZ());
+    };
+    BitStream.prototype.readUEG = function () {
+        var prefix = this.skipLZ();
+        return this.readBits(prefix + 1) - 1;
+    };
+    BitStream.prototype.readEG = function () {
+        var value = this.readUEG();
+        if (0x01 & value) {
+            return (1 + value) >>> 1;
+        }
+        else {
+            return -1 * (value >>> 1);
+        }
+    };
+    BitStream.prototype.readBoolean = function () {
+        return 1 === this.readBits(1);
+    };
+    BitStream.prototype.readUByte = function () {
+        return this.readBits(8);
+    };
+    BitStream.prototype.readUShort = function () {
+        return this.readBits(16);
+    };
+    BitStream.prototype.readUInt = function () {
+        return this.readBits(32);
+    };
+    return BitStream;
+}());
+exports.default = BitStream;
 
-  // whenever we return null, then we set a flag to say
-  // that we're awaiting a 'readable' event emission.
-  this.needReadable = false;
-  this.emittedReadable = false;
-  this.readableListening = false;
-  this.resumeScheduled = false;
+},{}],28:[function(require,module,exports){
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var logger;
+var errorLogger;
+function setLogger(log, error) {
+    logger = log;
+    errorLogger = error != null ? error : log;
+}
+exports.setLogger = setLogger;
+function isEnable() {
+    return logger != null;
+}
+exports.isEnable = isEnable;
+function log(message) {
+    var optionalParams = [];
+    for (var _i = 1; _i < arguments.length; _i++) {
+        optionalParams[_i - 1] = arguments[_i];
+    }
+    if (logger) {
+        logger.apply(void 0, [message].concat(optionalParams));
+    }
+}
+exports.log = log;
+function error(message) {
+    var optionalParams = [];
+    for (var _i = 1; _i < arguments.length; _i++) {
+        optionalParams[_i - 1] = arguments[_i];
+    }
+    if (errorLogger) {
+        errorLogger.apply(void 0, [message].concat(optionalParams));
+    }
+}
+exports.error = error;
 
-  // has it been destroyed
-  this.destroyed = false;
+},{}],29:[function(require,module,exports){
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var NALU_1 = require("./NALU");
+var VideoStreamBuffer = (function () {
+    function VideoStreamBuffer() {
+    }
+    VideoStreamBuffer.prototype.clear = function () {
+        this.buffer = undefined;
+    };
+    VideoStreamBuffer.prototype.append = function (value) {
+        var nextNalHeader = function (b) {
+            var i = 3;
+            return function () {
+                var count = 0;
+                for (; i < b.length; i++) {
+                    switch (b[i]) {
+                        case 0:
+                            count++;
+                            break;
+                        case 1:
+                            if (count === 3) {
+                                return i - 3;
+                            }
+                        default:
+                            count = 0;
+                    }
+                }
+                return;
+            };
+        };
+        var result = [];
+        var buffer;
+        if (this.buffer) {
+            if (value[3] === 1 && value[2] === 0 && value[1] === 0 && value[0] === 0) {
+                result.push(new NALU_1.default(this.buffer.subarray(4)));
+                buffer = Uint8Array.from(value);
+            }
+        }
+        if (buffer == null) {
+            buffer = this.mergeBuffer(value);
+        }
+        var lastIndex = 0;
+        var f = nextNalHeader(buffer);
+        for (var index = f(); index != null; index = f()) {
+            result.push(new NALU_1.default(buffer.subarray(lastIndex + 4, index)));
+            lastIndex = index;
+        }
+        this.buffer = buffer.subarray(lastIndex);
+        return result;
+    };
+    VideoStreamBuffer.prototype.mergeBuffer = function (value) {
+        if (this.buffer == null) {
+            return Uint8Array.from(value);
+        }
+        else {
+            var newBuffer = new Uint8Array(this.buffer.byteLength + value.length);
+            if (this.buffer.byteLength > 0) {
+                newBuffer.set(this.buffer, 0);
+            }
+            newBuffer.set(value, this.buffer.byteLength);
+            return newBuffer;
+        }
+    };
+    return VideoStreamBuffer;
+}());
+exports.default = VideoStreamBuffer;
 
-  // Crypto is kind of old and crusty.  Historically, its default string
-  // encoding is 'binary' so we have to make this configurable.
-  // Everything else in the universe uses 'utf8', though.
-  this.defaultEncoding = options.defaultEncoding || 'utf8';
+},{"./NALU":26}],30:[function(require,module,exports){
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var h264_remuxer_1 = require("./h264-remuxer");
+var mp4_generator_1 = require("./mp4-generator");
+var debug = require("./util/debug");
+var nalu_stream_buffer_1 = require("./util/nalu-stream-buffer");
+exports.mimeType = 'video/mp4; codecs="avc1.42E01E"';
+var VideoConverter = (function () {
+    function VideoConverter(element, fps, fpf) {
+        if (fps === void 0) { fps = 60; }
+        if (fpf === void 0) { fpf = fps; }
+        this.element = element;
+        this.fps = fps;
+        this.fpf = fpf;
+        this.receiveBuffer = new nalu_stream_buffer_1.default();
+        this.queue = [];
+        if (!MediaSource || !MediaSource.isTypeSupported(exports.mimeType)) {
+            throw new Error("Your browser is not supported: " + exports.mimeType);
+        }
+        this.reset();
+    }
+    Object.defineProperty(VideoConverter, "errorNotes", {
+        get: function () {
+            return _a = {},
+                _a[MediaError.MEDIA_ERR_ABORTED] = 'fetching process aborted by user',
+                _a[MediaError.MEDIA_ERR_NETWORK] = 'error occurred when downloading',
+                _a[MediaError.MEDIA_ERR_DECODE] = 'error occurred when decoding',
+                _a[MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED] = 'audio/video not supported',
+                _a;
+            var _a;
+        },
+        enumerable: true,
+        configurable: true
+    });
+    VideoConverter.prototype.setup = function () {
+        var _this = this;
+        this.mediaReadyPromise = new Promise(function (resolve, _reject) {
+            _this.mediaSource.addEventListener('sourceopen', function () {
+                debug.log("Media Source opened.");
+                _this.sourceBuffer = _this.mediaSource.addSourceBuffer(exports.mimeType);
+                _this.sourceBuffer.addEventListener('updateend', function () {
+                    debug.log("  SourceBuffer updateend");
+                    debug.log("    sourceBuffer.buffered.length=" + _this.sourceBuffer.buffered.length);
+                    for (var i = 0, len = _this.sourceBuffer.buffered.length; i < len; i++) {
+                        debug.log("    sourceBuffer.buffered [" + i + "]: " +
+                            (_this.sourceBuffer.buffered.start(i) + ", " + _this.sourceBuffer.buffered.end(i)));
+                    }
+                    debug.log("  mediasource.duration=" + _this.mediaSource.duration);
+                    debug.log("  mediasource.readyState=" + _this.mediaSource.readyState);
+                    debug.log("  video.duration=" + _this.element.duration);
+                    debug.log("    video.buffered.length=" + _this.element.buffered.length);
+                    if (debug.isEnable()) {
+                        for (var i = 0, len = _this.element.buffered.length; i < len; i++) {
+                            debug.log("    video.buffered [" + i + "]: " + _this.element.buffered.start(i) + ", " + _this.element.buffered.end(i));
+                        }
+                    }
+                    debug.log("  video.currentTime=" + _this.element.currentTime);
+                    debug.log("  video.readyState=" + _this.element.readyState);
+                    var data = _this.queue.shift();
+                    if (data) {
+                        _this.writeBuffer(data);
+                    }
+                });
+                _this.sourceBuffer.addEventListener('error', function () {
+                    debug.error('  SourceBuffer errored!');
+                });
+                _this.mediaReady = true;
+                resolve();
+            }, false);
+            _this.mediaSource.addEventListener('sourceclose', function () {
+                debug.log("Media Source closed.");
+                _this.mediaReady = false;
+            }, false);
+            _this.element.src = URL.createObjectURL(_this.mediaSource);
+        });
+        return this.mediaReadyPromise;
+    };
+    VideoConverter.prototype.play = function () {
+        var _this = this;
+        if (!this.element.paused) {
+            return;
+        }
+        if (this.mediaReady && this.element.readyState >= 2) {
+            this.element.play();
+        }
+        else {
+            var handler_1 = function () {
+                _this.play();
+                _this.element.removeEventListener('canplaythrough', handler_1);
+            };
+            this.element.addEventListener('canplaythrough', handler_1);
+        }
+    };
+    VideoConverter.prototype.pause = function () {
+        if (this.element.paused) {
+            return;
+        }
+        this.element.pause();
+    };
+    VideoConverter.prototype.reset = function () {
+        this.receiveBuffer.clear();
+        if (this.mediaSource && this.mediaSource.readyState === 'open') {
+            this.mediaSource.duration = 0;
+            this.mediaSource.endOfStream();
+        }
+        this.mediaSource = new MediaSource();
+        this.remuxer = new h264_remuxer_1.default(this.fps, this.fpf, this.fps * 60);
+        this.mediaReady = false;
+        this.mediaReadyPromise = undefined;
+        this.queue = [];
+        this.isFirstFrame = true;
+        this.setup();
+    };
+    VideoConverter.prototype.appendRawData = function (data) {
+        var nalus = this.receiveBuffer.append(data);
+        for (var _i = 0, nalus_1 = nalus; _i < nalus_1.length; _i++) {
+            var nalu = nalus_1[_i];
+            var ret = this.remuxer.remux(nalu);
+            if (ret) {
+                this.writeFragment(ret[0], ret[1]);
+            }
+        }
+    };
+    VideoConverter.prototype.writeFragment = function (dts, pay) {
+        var remuxer = this.remuxer;
+        if (remuxer.mp4track.isKeyFrame) {
+            this.writeBuffer(mp4_generator_1.default.initSegment([remuxer.mp4track], Infinity, remuxer.timescale));
+        }
+        if (pay && pay.byteLength) {
+            debug.log(" Put fragment: " + remuxer.seqNum + ", frames=" + remuxer.mp4track.samples.length + ", size=" + pay.byteLength);
+            var fragment = mp4_generator_1.default.fragmentSegment(remuxer.seqNum, dts, remuxer.mp4track, pay);
+            this.writeBuffer(fragment);
+            remuxer.flush();
+        }
+        else {
+            debug.error("Nothing payload!");
+        }
+    };
+    VideoConverter.prototype.writeBuffer = function (data) {
+        var _this = this;
+        if (this.mediaReady) {
+            if (this.sourceBuffer.updating) {
+                this.queue.push(data);
+            }
+            else {
+                this.doAppend(data);
+            }
+        }
+        else {
+            this.queue.push(data);
+            if (this.mediaReadyPromise) {
+                this.mediaReadyPromise.then(function () {
+                    if (!_this.sourceBuffer.updating) {
+                        var d = _this.queue.shift();
+                        if (d) {
+                            _this.writeBuffer(d);
+                        }
+                    }
+                });
+                this.mediaReadyPromise = undefined;
+            }
+        }
+    };
+    VideoConverter.prototype.doAppend = function (data) {
+        var error = this.element.error;
+        if (error) {
+            debug.error("MSE Error Occured: " + VideoConverter.errorNotes[error.code]);
+            this.element.pause();
+            if (this.mediaSource.readyState === 'open') {
+                this.mediaSource.endOfStream();
+            }
+        }
+        else {
+            try {
+                this.sourceBuffer.appendBuffer(data);
+                debug.log("  appended buffer: size=" + data.byteLength);
+            }
+            catch (err) {
+                debug.error("MSE Error occured while appending buffer. " + err.name + ": " + err.message);
+            }
+        }
+    };
+    return VideoConverter;
+}());
+exports.default = VideoConverter;
 
-  // the number of writers that are awaiting a drain event in .pipe()s
-  this.awaitDrain = 0;
+},{"./h264-remuxer":24,"./mp4-generator":25,"./util/debug":28,"./util/nalu-stream-buffer":29}],31:[function(require,module,exports){
+arguments[4][1][0].apply(exports,arguments)
+},{"dup":1,"readable-stream":48,"safe-buffer":49,"util":74}],32:[function(require,module,exports){
+(function (Buffer){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
 
-  // if true, a maybeReadMore has been scheduled
-  this.readingMore = false;
+// NOTE: These type checking functions intentionally don't use `instanceof`
+// because it is fragile and can be easily faked with `Object.create()`.
 
-  this.decoder = null;
-  this.encoding = null;
-  if (options.encoding) {
-    if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
-    this.decoder = new StringDecoder(options.encoding);
-    this.encoding = options.encoding;
+function isArray(arg) {
+  if (Array.isArray) {
+    return Array.isArray(arg);
   }
+  return objectToString(arg) === '[object Array]';
 }
+exports.isArray = isArray;
 
-function Readable(options) {
-  Duplex = Duplex || require('./_stream_duplex');
-
-  if (!(this instanceof Readable)) return new Readable(options);
-
-  this._readableState = new ReadableState(options, this);
-
-  // legacy
-  this.readable = true;
-
-  if (options) {
-    if (typeof options.read === 'function') this._read = options.read;
-
-    if (typeof options.destroy === 'function') this._destroy = options.destroy;
-  }
-
-  Stream.call(this);
+function isBoolean(arg) {
+  return typeof arg === 'boolean';
 }
+exports.isBoolean = isBoolean;
 
-Object.defineProperty(Readable.prototype, 'destroyed', {
-  get: function () {
-    if (this._readableState === undefined) {
-      return false;
-    }
-    return this._readableState.destroyed;
-  },
-  set: function (value) {
-    // we ignore the value if the stream
-    // has not been initialized yet
-    if (!this._readableState) {
-      return;
-    }
-
-    // backward compatibility, the user is explicitly
-    // managing destroyed
-    this._readableState.destroyed = value;
-  }
-});
-
-Readable.prototype.destroy = destroyImpl.destroy;
-Readable.prototype._undestroy = destroyImpl.undestroy;
-Readable.prototype._destroy = function (err, cb) {
-  this.push(null);
-  cb(err);
-};
+function isNull(arg) {
+  return arg === null;
+}
+exports.isNull = isNull;
 
-// Manually shove something into the read() buffer.
-// This returns true if the highWaterMark has not been hit yet,
-// similar to how Writable.write() returns true if you should
-// write() some more.
-Readable.prototype.push = function (chunk, encoding) {
-  var state = this._readableState;
-  var skipChunkCheck;
+function isNullOrUndefined(arg) {
+  return arg == null;
+}
+exports.isNullOrUndefined = isNullOrUndefined;
 
-  if (!state.objectMode) {
-    if (typeof chunk === 'string') {
-      encoding = encoding || state.defaultEncoding;
-      if (encoding !== state.encoding) {
-        chunk = Buffer.from(chunk, encoding);
-        encoding = '';
-      }
-      skipChunkCheck = true;
-    }
-  } else {
-    skipChunkCheck = true;
-  }
+function isNumber(arg) {
+  return typeof arg === 'number';
+}
+exports.isNumber = isNumber;
 
-  return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
-};
+function isString(arg) {
+  return typeof arg === 'string';
+}
+exports.isString = isString;
 
-// Unshift should *always* be something directly out of read()
-Readable.prototype.unshift = function (chunk) {
-  return readableAddChunk(this, chunk, null, true, false);
-};
+function isSymbol(arg) {
+  return typeof arg === 'symbol';
+}
+exports.isSymbol = isSymbol;
 
-function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
-  var state = stream._readableState;
-  if (chunk === null) {
-    state.reading = false;
-    onEofChunk(stream, state);
-  } else {
-    var er;
-    if (!skipChunkCheck) er = chunkInvalid(state, chunk);
-    if (er) {
-      stream.emit('error', er);
-    } else if (state.objectMode || chunk && chunk.length > 0) {
-      if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
-        chunk = _uint8ArrayToBuffer(chunk);
-      }
+function isUndefined(arg) {
+  return arg === void 0;
+}
+exports.isUndefined = isUndefined;
 
-      if (addToFront) {
-        if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
-      } else if (state.ended) {
-        stream.emit('error', new Error('stream.push() after EOF'));
-      } else {
-        state.reading = false;
-        if (state.decoder && !encoding) {
-          chunk = state.decoder.write(chunk);
-          if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
-        } else {
-          addChunk(stream, state, chunk, false);
-        }
-      }
-    } else if (!addToFront) {
-      state.reading = false;
-    }
-  }
+function isRegExp(re) {
+  return objectToString(re) === '[object RegExp]';
+}
+exports.isRegExp = isRegExp;
 
-  return needMoreData(state);
+function isObject(arg) {
+  return typeof arg === 'object' && arg !== null;
 }
+exports.isObject = isObject;
 
-function addChunk(stream, state, chunk, addToFront) {
-  if (state.flowing && state.length === 0 && !state.sync) {
-    stream.emit('data', chunk);
-    stream.read(0);
-  } else {
-    // update the buffer info.
-    state.length += state.objectMode ? 1 : chunk.length;
-    if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
+function isDate(d) {
+  return objectToString(d) === '[object Date]';
+}
+exports.isDate = isDate;
 
-    if (state.needReadable) emitReadable(stream);
-  }
-  maybeReadMore(stream, state);
+function isError(e) {
+  return (objectToString(e) === '[object Error]' || e instanceof Error);
 }
+exports.isError = isError;
 
-function chunkInvalid(state, chunk) {
-  var er;
-  if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
-    er = new TypeError('Invalid non-string/buffer chunk');
-  }
-  return er;
+function isFunction(arg) {
+  return typeof arg === 'function';
 }
+exports.isFunction = isFunction;
 
-// if it's past the high water mark, we can push in some more.
-// Also, if we have no data yet, we can stand some
-// more bytes.  This is to work around cases where hwm=0,
-// such as the repl.  Also, if the push() triggered a
-// readable event, and the user called read(largeNumber) such that
-// needReadable was set, then we ought to push more, so that another
-// 'readable' event will be triggered.
-function needMoreData(state) {
-  return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
+function isPrimitive(arg) {
+  return arg === null ||
+         typeof arg === 'boolean' ||
+         typeof arg === 'number' ||
+         typeof arg === 'string' ||
+         typeof arg === 'symbol' ||  // ES6 symbol
+         typeof arg === 'undefined';
 }
+exports.isPrimitive = isPrimitive;
 
-Readable.prototype.isPaused = function () {
-  return this._readableState.flowing === false;
-};
+exports.isBuffer = Buffer.isBuffer;
 
-// backwards compatibility.
-Readable.prototype.setEncoding = function (enc) {
-  if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
-  this._readableState.decoder = new StringDecoder(enc);
-  this._readableState.encoding = enc;
-  return this;
-};
+function objectToString(o) {
+  return Object.prototype.toString.call(o);
+}
 
-// Don't raise the hwm > 8MB
-var MAX_HWM = 0x800000;
-function computeNewHighWaterMark(n) {
-  if (n >= MAX_HWM) {
-    n = MAX_HWM;
-  } else {
-    // Get the next highest power of 2 to prevent increasing hwm excessively in
-    // tiny amounts
-    n--;
-    n |= n >>> 1;
-    n |= n >>> 2;
-    n |= n >>> 4;
-    n |= n >>> 8;
-    n |= n >>> 16;
-    n++;
-  }
-  return n;
+}).call(this,{"isBuffer":require("../../../../../../../../../usr/local/lib/node_modules/browserify/node_modules/is-buffer/index.js")})
+},{"../../../../../../../../../usr/local/lib/node_modules/browserify/node_modules/is-buffer/index.js":68}],33:[function(require,module,exports){
+arguments[4][3][0].apply(exports,arguments)
+},{"dup":3}],34:[function(require,module,exports){
+arguments[4][4][0].apply(exports,arguments)
+},{"dup":4}],35:[function(require,module,exports){
+arguments[4][5][0].apply(exports,arguments)
+},{"./lib/decoder":36,"./lib/encoder":37,"./lib/streams":38,"assert":59,"bl":31,"dup":5,"safe-buffer":49}],36:[function(require,module,exports){
+arguments[4][6][0].apply(exports,arguments)
+},{"bl":31,"dup":6,"util":74}],37:[function(require,module,exports){
+arguments[4][7][0].apply(exports,arguments)
+},{"bl":31,"dup":7,"safe-buffer":49}],38:[function(require,module,exports){
+arguments[4][8][0].apply(exports,arguments)
+},{"bl":31,"dup":8,"inherits":33,"readable-stream":48}],39:[function(require,module,exports){
+(function (process){
+'use strict';
+
+if (typeof process === 'undefined' ||
+    !process.version ||
+    process.version.indexOf('v0.') === 0 ||
+    process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {
+  module.exports = { nextTick: nextTick };
+} else {
+  module.exports = process
 }
 
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function howMuchToRead(n, state) {
-  if (n <= 0 || state.length === 0 && state.ended) return 0;
-  if (state.objectMode) return 1;
-  if (n !== n) {
-    // Only flow one buffer at a time
-    if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
+function nextTick(fn, arg1, arg2, arg3) {
+  if (typeof fn !== 'function') {
+    throw new TypeError('"callback" argument must be a function');
   }
-  // If we're asking for more than the current hwm, then raise the hwm.
-  if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
-  if (n <= state.length) return n;
-  // Don't have enough
-  if (!state.ended) {
-    state.needReadable = true;
-    return 0;
+  var len = arguments.length;
+  var args, i;
+  switch (len) {
+  case 0:
+  case 1:
+    return process.nextTick(fn);
+  case 2:
+    return process.nextTick(function afterTickOne() {
+      fn.call(null, arg1);
+    });
+  case 3:
+    return process.nextTick(function afterTickTwo() {
+      fn.call(null, arg1, arg2);
+    });
+  case 4:
+    return process.nextTick(function afterTickThree() {
+      fn.call(null, arg1, arg2, arg3);
+    });
+  default:
+    args = new Array(len - 1);
+    i = 0;
+    while (i < args.length) {
+      args[i++] = arguments[i];
+    }
+    return process.nextTick(function afterTick() {
+      fn.apply(null, args);
+    });
   }
-  return state.length;
 }
 
-// you can override either this method, or the async _read(n) below.
-Readable.prototype.read = function (n) {
-  debug('read', n);
-  n = parseInt(n, 10);
-  var state = this._readableState;
-  var nOrig = n;
 
-  if (n !== 0) state.emittedReadable = false;
+}).call(this,require('_process'))
+},{"_process":70}],40:[function(require,module,exports){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
 
-  // if we're doing read(0) to trigger a readable event, but we
-  // already have a bunch of data in the buffer, then just trigger
-  // the 'readable' event and move on.
-  if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
-    debug('read: emitReadable', state.length, state.ended);
-    if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
-    return null;
-  }
+// a duplex stream is just a stream that is both readable and writable.
+// Since JS doesn't have multiple prototypal inheritance, this class
+// prototypally inherits from Readable, and then parasitically from
+// Writable.
 
-  n = howMuchToRead(n, state);
+'use strict';
 
-  // if we've ended, and we're now clear, then finish it up.
-  if (n === 0 && state.ended) {
-    if (state.length === 0) endReadable(this);
-    return null;
-  }
+/*<replacement>*/
 
-  // All the actual chunk generation logic needs to be
-  // *below* the call to _read.  The reason is that in certain
-  // synthetic stream cases, such as passthrough streams, _read
-  // may be a completely synchronous operation which may change
-  // the state of the read buffer, providing enough data when
-  // before there was *not* enough.
-  //
-  // So, the steps are:
-  // 1. Figure out what the state of things will be after we do
-  // a read from the buffer.
-  //
-  // 2. If that resulting state will trigger a _read, then call _read.
-  // Note that this may be asynchronous, or synchronous.  Yes, it is
-  // deeply ugly to write APIs this way, but that still doesn't mean
-  // that the Readable class should behave improperly, as streams are
-  // designed to be sync/async agnostic.
-  // Take note if the _read call is sync or async (ie, if the read call
-  // has returned yet), so that we know whether or not it's safe to emit
-  // 'readable' etc.
-  //
-  // 3. Actually pull the requested chunks out of the buffer and return.
+var pna = require('process-nextick-args');
+/*</replacement>*/
 
-  // if we need a readable event, then we need to do some reading.
-  var doRead = state.needReadable;
-  debug('need readable', doRead);
+/*<replacement>*/
+var objectKeys = Object.keys || function (obj) {
+  var keys = [];
+  for (var key in obj) {
+    keys.push(key);
+  }return keys;
+};
+/*</replacement>*/
 
-  // if we currently have less than the highWaterMark, then also read some
-  if (state.length === 0 || state.length - n < state.highWaterMark) {
-    doRead = true;
-    debug('length less than watermark', doRead);
-  }
+module.exports = Duplex;
 
-  // however, if we've ended, then there's no point, and if we're already
-  // reading, then it's unnecessary.
-  if (state.ended || state.reading) {
-    doRead = false;
-    debug('reading or ended', doRead);
-  } else if (doRead) {
-    debug('do read');
-    state.reading = true;
-    state.sync = true;
-    // if the length is currently zero, then we *need* a readable event.
-    if (state.length === 0) state.needReadable = true;
-    // call internal read method
-    this._read(state.highWaterMark);
-    state.sync = false;
-    // If _read pushed data synchronously, then `reading` will be false,
-    // and we need to re-evaluate how much data we can return to the user.
-    if (!state.reading) n = howMuchToRead(nOrig, state);
-  }
+/*<replacement>*/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/*</replacement>*/
 
-  var ret;
-  if (n > 0) ret = fromList(n, state);else ret = null;
+var Readable = require('./_stream_readable');
+var Writable = require('./_stream_writable');
 
-  if (ret === null) {
-    state.needReadable = true;
-    n = 0;
-  } else {
-    state.length -= n;
+util.inherits(Duplex, Readable);
+
+{
+  // avoid scope creep, the keys array can then be collected
+  var keys = objectKeys(Writable.prototype);
+  for (var v = 0; v < keys.length; v++) {
+    var method = keys[v];
+    if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
   }
+}
 
-  if (state.length === 0) {
-    // If we have nothing in the buffer, then we want to know
-    // as soon as we *do* get something into the buffer.
-    if (!state.ended) state.needReadable = true;
+function Duplex(options) {
+  if (!(this instanceof Duplex)) return new Duplex(options);
 
-    // If we tried to read() past the EOF, then emit end on the next tick.
-    if (nOrig !== n && state.ended) endReadable(this);
-  }
+  Readable.call(this, options);
+  Writable.call(this, options);
 
-  if (ret !== null) this.emit('data', ret);
+  if (options && options.readable === false) this.readable = false;
 
-  return ret;
-};
+  if (options && options.writable === false) this.writable = false;
 
-function onEofChunk(stream, state) {
-  if (state.ended) return;
-  if (state.decoder) {
-    var chunk = state.decoder.end();
-    if (chunk && chunk.length) {
-      state.buffer.push(chunk);
-      state.length += state.objectMode ? 1 : chunk.length;
-    }
+  this.allowHalfOpen = true;
+  if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
+
+  this.once('end', onend);
+}
+
+Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
+  // making it explicit this property is not enumerable
+  // because otherwise some prototype manipulation in
+  // userland will fail
+  enumerable: false,
+  get: function () {
+    return this._writableState.highWaterMark;
   }
-  state.ended = true;
+});
 
-  // emit 'readable' now to make sure it gets picked up.
-  emitReadable(stream);
-}
+// the no-half-open enforcer
+function onend() {
+  // if we allow half-open state, or if the writable side ended,
+  // then we're ok.
+  if (this.allowHalfOpen || this._writableState.ended) return;
 
-// Don't emit readable right away in sync mode, because this can trigger
-// another read() call => stack overflow.  This way, it might trigger
-// a nextTick recursion warning, but that's not so bad.
-function emitReadable(stream) {
-  var state = stream._readableState;
-  state.needReadable = false;
-  if (!state.emittedReadable) {
-    debug('emitReadable', state.flowing);
-    state.emittedReadable = true;
-    if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
-  }
+  // no more data can be written.
+  // But allow more writes to happen in this tick.
+  pna.nextTick(onEndNT, this);
 }
 
-function emitReadable_(stream) {
-  debug('emit readable');
-  stream.emit('readable');
-  flow(stream);
+function onEndNT(self) {
+  self.end();
 }
 
-// at this point, the user has presumably seen the 'readable' event,
-// and called read() to consume some data.  that may have triggered
-// in turn another _read(n) call, in which case reading = true if
-// it's in progress.
-// However, if we're not ended, or reading, and the length < hwm,
-// then go ahead and try to read some more preemptively.
-function maybeReadMore(stream, state) {
-  if (!state.readingMore) {
-    state.readingMore = true;
-    pna.nextTick(maybeReadMore_, stream, state);
-  }
-}
+Object.defineProperty(Duplex.prototype, 'destroyed', {
+  get: function () {
+    if (this._readableState === undefined || this._writableState === undefined) {
+      return false;
+    }
+    return this._readableState.destroyed && this._writableState.destroyed;
+  },
+  set: function (value) {
+    // we ignore the value if the stream
+    // has not been initialized yet
+    if (this._readableState === undefined || this._writableState === undefined) {
+      return;
+    }
 
-function maybeReadMore_(stream, state) {
-  var len = state.length;
-  while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
-    debug('maybeReadMore read 0');
-    stream.read(0);
-    if (len === state.length)
-      // didn't get any data, stop spinning.
-      break;else len = state.length;
+    // backward compatibility, the user is explicitly
+    // managing destroyed
+    this._readableState.destroyed = value;
+    this._writableState.destroyed = value;
   }
-  state.readingMore = false;
-}
+});
 
-// abstract method.  to be overridden in specific implementation classes.
-// call cb(er, data) where data is <= n in length.
-// for virtual (non-string, non-buffer) streams, "length" is somewhat
-// arbitrary, and perhaps not very meaningful.
-Readable.prototype._read = function (n) {
-  this.emit('error', new Error('_read() is not implemented'));
+Duplex.prototype._destroy = function (err, cb) {
+  this.push(null);
+  this.end();
+
+  pna.nextTick(cb, err);
 };
+},{"./_stream_readable":42,"./_stream_writable":44,"core-util-is":32,"inherits":33,"process-nextick-args":39}],41:[function(require,module,exports){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
 
-Readable.prototype.pipe = function (dest, pipeOpts) {
-  var src = this;
-  var state = this._readableState;
+// a passthrough stream.
+// basically just the most minimal sort of Transform stream.
+// Every written chunk gets output as-is.
 
-  switch (state.pipesCount) {
-    case 0:
-      state.pipes = dest;
-      break;
-    case 1:
-      state.pipes = [state.pipes, dest];
-      break;
-    default:
-      state.pipes.push(dest);
-      break;
-  }
-  state.pipesCount += 1;
-  debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
+'use strict';
 
-  var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
+module.exports = PassThrough;
 
-  var endFn = doEnd ? onend : unpipe;
-  if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
+var Transform = require('./_stream_transform');
 
-  dest.on('unpipe', onunpipe);
-  function onunpipe(readable, unpipeInfo) {
-    debug('onunpipe');
-    if (readable === src) {
-      if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
-        unpipeInfo.hasUnpiped = true;
-        cleanup();
-      }
-    }
-  }
+/*<replacement>*/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/*</replacement>*/
 
-  function onend() {
-    debug('onend');
-    dest.end();
-  }
+util.inherits(PassThrough, Transform);
 
-  // when the dest drains, it reduces the awaitDrain counter
-  // on the source.  This would be more elegant with a .once()
-  // handler in flow(), but adding and removing repeatedly is
-  // too slow.
-  var ondrain = pipeOnDrain(src);
-  dest.on('drain', ondrain);
+function PassThrough(options) {
+  if (!(this instanceof PassThrough)) return new PassThrough(options);
 
-  var cleanedUp = false;
-  function cleanup() {
-    debug('cleanup');
-    // cleanup event handlers once the pipe is broken
-    dest.removeListener('close', onclose);
-    dest.removeListener('finish', onfinish);
-    dest.removeListener('drain', ondrain);
-    dest.removeListener('error', onerror);
-    dest.removeListener('unpipe', onunpipe);
-    src.removeListener('end', onend);
-    src.removeListener('end', unpipe);
-    src.removeListener('data', ondata);
+  Transform.call(this, options);
+}
 
-    cleanedUp = true;
+PassThrough.prototype._transform = function (chunk, encoding, cb) {
+  cb(null, chunk);
+};
+},{"./_stream_transform":43,"core-util-is":32,"inherits":33}],42:[function(require,module,exports){
+(function (process,global){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
 
-    // if the reader is waiting for a drain event from this
-    // specific writer, then it would cause it to never start
-    // flowing again.
-    // So, if this is awaiting a drain, then we just call it now.
-    // If we don't know, then assume that we are waiting for one.
-    if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
-  }
+'use strict';
 
-  // If the user pushes more data while we're writing to dest then we'll end up
-  // in ondata again. However, we only want to increase awaitDrain once because
-  // dest will only emit one 'drain' event for the multiple writes.
-  // => Introduce a guard on increasing awaitDrain.
-  var increasedAwaitDrain = false;
-  src.on('data', ondata);
-  function ondata(chunk) {
-    debug('ondata');
-    increasedAwaitDrain = false;
-    var ret = dest.write(chunk);
-    if (false === ret && !increasedAwaitDrain) {
-      // If the user unpiped during `dest.write()`, it is possible
-      // to get stuck in a permanently paused state if that write
-      // also returned false.
-      // => Check whether `dest` is still a piping destination.
-      if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
-        debug('false write response, pause', src._readableState.awaitDrain);
-        src._readableState.awaitDrain++;
-        increasedAwaitDrain = true;
-      }
-      src.pause();
-    }
-  }
+/*<replacement>*/
 
-  // if the dest has an error, then stop piping into it.
-  // however, don't suppress the throwing behavior for this.
-  function onerror(er) {
-    debug('onerror', er);
-    unpipe();
-    dest.removeListener('error', onerror);
-    if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
-  }
+var pna = require('process-nextick-args');
+/*</replacement>*/
 
-  // Make sure our error handler is attached before userland ones.
-  prependListener(dest, 'error', onerror);
+module.exports = Readable;
 
-  // Both close and finish should trigger unpipe, but only once.
-  function onclose() {
-    dest.removeListener('finish', onfinish);
-    unpipe();
-  }
-  dest.once('close', onclose);
-  function onfinish() {
-    debug('onfinish');
-    dest.removeListener('close', onclose);
-    unpipe();
-  }
-  dest.once('finish', onfinish);
+/*<replacement>*/
+var isArray = require('isarray');
+/*</replacement>*/
 
-  function unpipe() {
-    debug('unpipe');
-    src.unpipe(dest);
-  }
+/*<replacement>*/
+var Duplex;
+/*</replacement>*/
 
-  // tell the dest that it's being piped to
-  dest.emit('pipe', src);
+Readable.ReadableState = ReadableState;
 
-  // start the flow if it hasn't been started already.
-  if (!state.flowing) {
-    debug('pipe resume');
-    src.resume();
-  }
+/*<replacement>*/
+var EE = require('events').EventEmitter;
 
-  return dest;
+var EElistenerCount = function (emitter, type) {
+  return emitter.listeners(type).length;
 };
+/*</replacement>*/
 
-function pipeOnDrain(src) {
-  return function () {
-    var state = src._readableState;
-    debug('pipeOnDrain', state.awaitDrain);
-    if (state.awaitDrain) state.awaitDrain--;
-    if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
-      state.flowing = true;
-      flow(src);
-    }
-  };
-}
+/*<replacement>*/
+var Stream = require('./internal/streams/stream');
+/*</replacement>*/
 
-Readable.prototype.unpipe = function (dest) {
-  var state = this._readableState;
-  var unpipeInfo = { hasUnpiped: false };
+/*<replacement>*/
 
-  // if we're not piping anywhere, then do nothing.
-  if (state.pipesCount === 0) return this;
+var Buffer = require('safe-buffer').Buffer;
+var OurUint8Array = global.Uint8Array || function () {};
+function _uint8ArrayToBuffer(chunk) {
+  return Buffer.from(chunk);
+}
+function _isUint8Array(obj) {
+  return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
+}
 
-  // just one destination.  most common case.
-  if (state.pipesCount === 1) {
-    // passed in one, but it's not the right one.
-    if (dest && dest !== state.pipes) return this;
+/*</replacement>*/
 
-    if (!dest) dest = state.pipes;
+/*<replacement>*/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/*</replacement>*/
 
-    // got a match.
-    state.pipes = null;
-    state.pipesCount = 0;
-    state.flowing = false;
-    if (dest) dest.emit('unpipe', this, unpipeInfo);
-    return this;
-  }
+/*<replacement>*/
+var debugUtil = require('util');
+var debug = void 0;
+if (debugUtil && debugUtil.debuglog) {
+  debug = debugUtil.debuglog('stream');
+} else {
+  debug = function () {};
+}
+/*</replacement>*/
 
-  // slow case. multiple pipe destinations.
+var BufferList = require('./internal/streams/BufferList');
+var destroyImpl = require('./internal/streams/destroy');
+var StringDecoder;
 
-  if (!dest) {
-    // remove all.
-    var dests = state.pipes;
-    var len = state.pipesCount;
-    state.pipes = null;
-    state.pipesCount = 0;
-    state.flowing = false;
+util.inherits(Readable, Stream);
 
-    for (var i = 0; i < len; i++) {
-      dests[i].emit('unpipe', this, unpipeInfo);
-    }return this;
-  }
+var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
 
-  // try to find the right one.
-  var index = indexOf(state.pipes, dest);
-  if (index === -1) return this;
+function prependListener(emitter, event, fn) {
+  // Sadly this is not cacheable as some libraries bundle their own
+  // event emitter implementation with them.
+  if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
 
-  state.pipes.splice(index, 1);
-  state.pipesCount -= 1;
-  if (state.pipesCount === 1) state.pipes = state.pipes[0];
+  // This is a hack to make sure that our error handler is attached before any
+  // userland ones.  NEVER DO THIS. This is here only because this code needs
+  // to continue to work with older versions of Node.js that do not include
+  // the prependListener() method. The goal is to eventually remove this hack.
+  if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
+}
 
-  dest.emit('unpipe', this, unpipeInfo);
+function ReadableState(options, stream) {
+  Duplex = Duplex || require('./_stream_duplex');
 
-  return this;
-};
+  options = options || {};
 
-// set up data events if they are asked for
-// Ensure readable listeners eventually get something
-Readable.prototype.on = function (ev, fn) {
-  var res = Stream.prototype.on.call(this, ev, fn);
+  // Duplex streams are both readable and writable, but share
+  // the same options object.
+  // However, some cases require setting options to different
+  // values for the readable and the writable sides of the duplex stream.
+  // These options can be provided separately as readableXXX and writableXXX.
+  var isDuplex = stream instanceof Duplex;
 
-  if (ev === 'data') {
-    // Start flowing on next tick if stream isn't explicitly paused
-    if (this._readableState.flowing !== false) this.resume();
-  } else if (ev === 'readable') {
-    var state = this._readableState;
-    if (!state.endEmitted && !state.readableListening) {
-      state.readableListening = state.needReadable = true;
-      state.emittedReadable = false;
-      if (!state.reading) {
-        pna.nextTick(nReadingNextTick, this);
-      } else if (state.length) {
-        emitReadable(this);
-      }
-    }
-  }
+  // object stream flag. Used to make read(n) ignore n and to
+  // make all the buffer merging and length checks go away
+  this.objectMode = !!options.objectMode;
 
-  return res;
-};
-Readable.prototype.addListener = Readable.prototype.on;
+  if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
 
-function nReadingNextTick(self) {
-  debug('readable nexttick read 0');
-  self.read(0);
-}
+  // the point at which it stops calling _read() to fill the buffer
+  // Note: 0 is a valid value, means "don't call _read preemptively ever"
+  var hwm = options.highWaterMark;
+  var readableHwm = options.readableHighWaterMark;
+  var defaultHwm = this.objectMode ? 16 : 16 * 1024;
 
-// pause() and resume() are remnants of the legacy readable stream API
-// If the user uses them, then switch into old mode.
-Readable.prototype.resume = function () {
-  var state = this._readableState;
-  if (!state.flowing) {
-    debug('resume');
-    state.flowing = true;
-    resume(this, state);
-  }
-  return this;
-};
+  if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
 
-function resume(stream, state) {
-  if (!state.resumeScheduled) {
-    state.resumeScheduled = true;
-    pna.nextTick(resume_, stream, state);
-  }
-}
+  // cast to ints.
+  this.highWaterMark = Math.floor(this.highWaterMark);
 
-function resume_(stream, state) {
-  if (!state.reading) {
-    debug('resume read 0');
-    stream.read(0);
-  }
+  // A linked list is used to store data chunks instead of an array because the
+  // linked list can remove elements from the beginning faster than
+  // array.shift()
+  this.buffer = new BufferList();
+  this.length = 0;
+  this.pipes = null;
+  this.pipesCount = 0;
+  this.flowing = null;
+  this.ended = false;
+  this.endEmitted = false;
+  this.reading = false;
 
-  state.resumeScheduled = false;
-  state.awaitDrain = 0;
-  stream.emit('resume');
-  flow(stream);
-  if (state.flowing && !state.reading) stream.read(0);
-}
+  // a flag to be able to tell if the event 'readable'/'data' is emitted
+  // immediately, or on a later tick.  We set this to true at first, because
+  // any actions that shouldn't happen until "later" should generally also
+  // not happen before the first read call.
+  this.sync = true;
 
-Readable.prototype.pause = function () {
-  debug('call pause flowing=%j', this._readableState.flowing);
-  if (false !== this._readableState.flowing) {
-    debug('pause');
-    this._readableState.flowing = false;
-    this.emit('pause');
-  }
-  return this;
-};
+  // whenever we return null, then we set a flag to say
+  // that we're awaiting a 'readable' event emission.
+  this.needReadable = false;
+  this.emittedReadable = false;
+  this.readableListening = false;
+  this.resumeScheduled = false;
 
-function flow(stream) {
-  var state = stream._readableState;
-  debug('flow', state.flowing);
-  while (state.flowing && stream.read() !== null) {}
-}
+  // has it been destroyed
+  this.destroyed = false;
 
-// wrap an old-style stream as the async data source.
-// This is *not* part of the readable stream interface.
-// It is an ugly unfortunate mess of history.
-Readable.prototype.wrap = function (stream) {
-  var _this = this;
+  // Crypto is kind of old and crusty.  Historically, its default string
+  // encoding is 'binary' so we have to make this configurable.
+  // Everything else in the universe uses 'utf8', though.
+  this.defaultEncoding = options.defaultEncoding || 'utf8';
 
-  var state = this._readableState;
-  var paused = false;
+  // the number of writers that are awaiting a drain event in .pipe()s
+  this.awaitDrain = 0;
 
-  stream.on('end', function () {
-    debug('wrapped end');
-    if (state.decoder && !state.ended) {
-      var chunk = state.decoder.end();
-      if (chunk && chunk.length) _this.push(chunk);
-    }
+  // if true, a maybeReadMore has been scheduled
+  this.readingMore = false;
 
-    _this.push(null);
-  });
+  this.decoder = null;
+  this.encoding = null;
+  if (options.encoding) {
+    if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
+    this.decoder = new StringDecoder(options.encoding);
+    this.encoding = options.encoding;
+  }
+}
 
-  stream.on('data', function (chunk) {
-    debug('wrapped data');
-    if (state.decoder) chunk = state.decoder.write(chunk);
+function Readable(options) {
+  Duplex = Duplex || require('./_stream_duplex');
+
+  if (!(this instanceof Readable)) return new Readable(options);
 
-    // don't skip over falsy values in objectMode
-    if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
+  this._readableState = new ReadableState(options, this);
 
-    var ret = _this.push(chunk);
-    if (!ret) {
-      paused = true;
-      stream.pause();
-    }
-  });
+  // legacy
+  this.readable = true;
 
-  // proxy all the other methods.
-  // important when wrapping filters and duplexes.
-  for (var i in stream) {
-    if (this[i] === undefined && typeof stream[i] === 'function') {
-      this[i] = function (method) {
-        return function () {
-          return stream[method].apply(stream, arguments);
-        };
-      }(i);
-    }
-  }
+  if (options) {
+    if (typeof options.read === 'function') this._read = options.read;
 
-  // proxy certain important events.
-  for (var n = 0; n < kProxyEvents.length; n++) {
-    stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
+    if (typeof options.destroy === 'function') this._destroy = options.destroy;
   }
 
-  // when we try to consume some more bytes, simply unpause the
-  // underlying stream.
-  this._read = function (n) {
-    debug('wrapped _read', n);
-    if (paused) {
-      paused = false;
-      stream.resume();
-    }
-  };
-
-  return this;
-};
+  Stream.call(this);
+}
 
-Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
-  // making it explicit this property is not enumerable
-  // because otherwise some prototype manipulation in
-  // userland will fail
-  enumerable: false,
+Object.defineProperty(Readable.prototype, 'destroyed', {
   get: function () {
-    return this._readableState.highWaterMark;
+    if (this._readableState === undefined) {
+      return false;
+    }
+    return this._readableState.destroyed;
+  },
+  set: function (value) {
+    // we ignore the value if the stream
+    // has not been initialized yet
+    if (!this._readableState) {
+      return;
+    }
+
+    // backward compatibility, the user is explicitly
+    // managing destroyed
+    this._readableState.destroyed = value;
   }
 });
 
-// exposed for testing purposes only.
-Readable._fromList = fromList;
+Readable.prototype.destroy = destroyImpl.destroy;
+Readable.prototype._undestroy = destroyImpl.undestroy;
+Readable.prototype._destroy = function (err, cb) {
+  this.push(null);
+  cb(err);
+};
 
-// Pluck off n bytes from an array of buffers.
-// Length is the combined lengths of all the buffers in the list.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function fromList(n, state) {
-  // nothing buffered
-  if (state.length === 0) return null;
+// Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+Readable.prototype.push = function (chunk, encoding) {
+  var state = this._readableState;
+  var skipChunkCheck;
 
-  var ret;
-  if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
-    // read it all, truncate the list
-    if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
-    state.buffer.clear();
+  if (!state.objectMode) {
+    if (typeof chunk === 'string') {
+      encoding = encoding || state.defaultEncoding;
+      if (encoding !== state.encoding) {
+        chunk = Buffer.from(chunk, encoding);
+        encoding = '';
+      }
+      skipChunkCheck = true;
+    }
   } else {
-    // read part of list
-    ret = fromListPartial(n, state.buffer, state.decoder);
+    skipChunkCheck = true;
   }
 
-  return ret;
-}
+  return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
+};
 
-// Extracts only enough buffered data to satisfy the amount requested.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function fromListPartial(n, list, hasStrings) {
-  var ret;
-  if (n < list.head.data.length) {
-    // slice is the same for buffers and strings
-    ret = list.head.data.slice(0, n);
-    list.head.data = list.head.data.slice(n);
-  } else if (n === list.head.data.length) {
-    // first chunk is a perfect match
-    ret = list.shift();
-  } else {
-    // result spans more than one buffer
-    ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
-  }
-  return ret;
-}
+// Unshift should *always* be something directly out of read()
+Readable.prototype.unshift = function (chunk) {
+  return readableAddChunk(this, chunk, null, true, false);
+};
 
-// Copies a specified amount of characters from the list of buffered data
-// chunks.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function copyFromBufferString(n, list) {
-  var p = list.head;
-  var c = 1;
-  var ret = p.data;
-  n -= ret.length;
-  while (p = p.next) {
-    var str = p.data;
-    var nb = n > str.length ? str.length : n;
-    if (nb === str.length) ret += str;else ret += str.slice(0, n);
-    n -= nb;
-    if (n === 0) {
-      if (nb === str.length) {
-        ++c;
-        if (p.next) list.head = p.next;else list.head = list.tail = null;
-      } else {
-        list.head = p;
-        p.data = str.slice(nb);
+function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
+  var state = stream._readableState;
+  if (chunk === null) {
+    state.reading = false;
+    onEofChunk(stream, state);
+  } else {
+    var er;
+    if (!skipChunkCheck) er = chunkInvalid(state, chunk);
+    if (er) {
+      stream.emit('error', er);
+    } else if (state.objectMode || chunk && chunk.length > 0) {
+      if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
+        chunk = _uint8ArrayToBuffer(chunk);
       }
-      break;
-    }
-    ++c;
-  }
-  list.length -= c;
-  return ret;
-}
 
-// Copies a specified amount of bytes from the list of buffered data chunks.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function copyFromBuffer(n, list) {
-  var ret = Buffer.allocUnsafe(n);
-  var p = list.head;
-  var c = 1;
-  p.data.copy(ret);
-  n -= p.data.length;
-  while (p = p.next) {
-    var buf = p.data;
-    var nb = n > buf.length ? buf.length : n;
-    buf.copy(ret, ret.length - n, 0, nb);
-    n -= nb;
-    if (n === 0) {
-      if (nb === buf.length) {
-        ++c;
-        if (p.next) list.head = p.next;else list.head = list.tail = null;
+      if (addToFront) {
+        if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
+      } else if (state.ended) {
+        stream.emit('error', new Error('stream.push() after EOF'));
       } else {
-        list.head = p;
-        p.data = buf.slice(nb);
+        state.reading = false;
+        if (state.decoder && !encoding) {
+          chunk = state.decoder.write(chunk);
+          if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
+        } else {
+          addChunk(stream, state, chunk, false);
+        }
       }
-      break;
+    } else if (!addToFront) {
+      state.reading = false;
     }
-    ++c;
   }
-  list.length -= c;
-  return ret;
+
+  return needMoreData(state);
 }
 
-function endReadable(stream) {
-  var state = stream._readableState;
+function addChunk(stream, state, chunk, addToFront) {
+  if (state.flowing && state.length === 0 && !state.sync) {
+    stream.emit('data', chunk);
+    stream.read(0);
+  } else {
+    // update the buffer info.
+    state.length += state.objectMode ? 1 : chunk.length;
+    if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
 
-  // If we get here before consuming all the bytes, then that is a
-  // bug in node.  Should never happen.
-  if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
+    if (state.needReadable) emitReadable(stream);
+  }
+  maybeReadMore(stream, state);
+}
 
-  if (!state.endEmitted) {
-    state.ended = true;
-    pna.nextTick(endReadableNT, state, stream);
+function chunkInvalid(state, chunk) {
+  var er;
+  if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
+    er = new TypeError('Invalid non-string/buffer chunk');
   }
+  return er;
+}
+
+// if it's past the high water mark, we can push in some more.
+// Also, if we have no data yet, we can stand some
+// more bytes.  This is to work around cases where hwm=0,
+// such as the repl.  Also, if the push() triggered a
+// readable event, and the user called read(largeNumber) such that
+// needReadable was set, then we ought to push more, so that another
+// 'readable' event will be triggered.
+function needMoreData(state) {
+  return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
 }
 
-function endReadableNT(state, stream) {
-  // Check that we didn't get one last unshift.
-  if (!state.endEmitted && state.length === 0) {
-    state.endEmitted = true;
-    stream.readable = false;
-    stream.emit('end');
+Readable.prototype.isPaused = function () {
+  return this._readableState.flowing === false;
+};
+
+// backwards compatibility.
+Readable.prototype.setEncoding = function (enc) {
+  if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
+  this._readableState.decoder = new StringDecoder(enc);
+  this._readableState.encoding = enc;
+  return this;
+};
+
+// Don't raise the hwm > 8MB
+var MAX_HWM = 0x800000;
+function computeNewHighWaterMark(n) {
+  if (n >= MAX_HWM) {
+    n = MAX_HWM;
+  } else {
+    // Get the next highest power of 2 to prevent increasing hwm excessively in
+    // tiny amounts
+    n--;
+    n |= n >>> 1;
+    n |= n >>> 2;
+    n |= n >>> 4;
+    n |= n >>> 8;
+    n |= n >>> 16;
+    n++;
   }
+  return n;
 }
 
-function indexOf(xs, x) {
-  for (var i = 0, l = xs.length; i < l; i++) {
-    if (xs[i] === x) return i;
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function howMuchToRead(n, state) {
+  if (n <= 0 || state.length === 0 && state.ended) return 0;
+  if (state.objectMode) return 1;
+  if (n !== n) {
+    // Only flow one buffer at a time
+    if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
   }
-  return -1;
+  // If we're asking for more than the current hwm, then raise the hwm.
+  if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
+  if (n <= state.length) return n;
+  // Don't have enough
+  if (!state.ended) {
+    state.needReadable = true;
+    return 0;
+  }
+  return state.length;
 }
-}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"./_stream_duplex":19,"./internal/streams/BufferList":24,"./internal/streams/destroy":25,"./internal/streams/stream":26,"_process":49,"core-util-is":11,"events":45,"inherits":12,"isarray":13,"process-nextick-args":18,"safe-buffer":28,"string_decoder/":29,"util":43}],22:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// a transform stream is a readable/writable stream where you do
-// something with the data.  Sometimes it's called a "filter",
-// but that's not a great name for it, since that implies a thing where
-// some bits pass through, and others are simply ignored.  (That would
-// be a valid example of a transform, of course.)
-//
-// While the output is causally related to the input, it's not a
-// necessarily symmetric or synchronous transformation.  For example,
-// a zlib stream might take multiple plain-text writes(), and then
-// emit a single compressed chunk some time in the future.
-//
-// Here's how this works:
-//
-// The Transform stream has all the aspects of the readable and writable
-// stream classes.  When you write(chunk), that calls _write(chunk,cb)
-// internally, and returns false if there's a lot of pending writes
-// buffered up.  When you call read(), that calls _read(n) until
-// there's enough pending readable data buffered up.
-//
-// In a transform stream, the written data is placed in a buffer.  When
-// _read(n) is called, it transforms the queued up data, calling the
-// buffered _write cb's as it consumes chunks.  If consuming a single
-// written chunk would result in multiple output chunks, then the first
-// outputted bit calls the readcb, and subsequent chunks just go into
-// the read buffer, and will cause it to emit 'readable' if necessary.
-//
-// This way, back-pressure is actually determined by the reading side,
-// since _read has to be called to start processing a new chunk.  However,
-// a pathological inflate type of transform can cause excessive buffering
-// here.  For example, imagine a stream where every byte of input is
-// interpreted as an integer from 0-255, and then results in that many
-// bytes of output.  Writing the 4 bytes {ff,ff,ff,ff} would result in
-// 1kb of data being output.  In this case, you could write a very small
-// amount of input, and end up with a very large amount of output.  In
-// such a pathological inflating mechanism, there'd be no way to tell
-// the system to stop doing the transform.  A single 4MB write could
-// cause the system to run out of memory.
-//
-// However, even in such a pathological case, only a single written chunk
-// would be consumed, and then the rest would wait (un-transformed) until
-// the results of the previous transformed chunk were consumed.
 
-'use strict';
+// you can override either this method, or the async _read(n) below.
+Readable.prototype.read = function (n) {
+  debug('read', n);
+  n = parseInt(n, 10);
+  var state = this._readableState;
+  var nOrig = n;
 
-module.exports = Transform;
+  if (n !== 0) state.emittedReadable = false;
 
-var Duplex = require('./_stream_duplex');
+  // if we're doing read(0) to trigger a readable event, but we
+  // already have a bunch of data in the buffer, then just trigger
+  // the 'readable' event and move on.
+  if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
+    debug('read: emitReadable', state.length, state.ended);
+    if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
+    return null;
+  }
 
-/*<replacement>*/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/*</replacement>*/
+  n = howMuchToRead(n, state);
 
-util.inherits(Transform, Duplex);
+  // if we've ended, and we're now clear, then finish it up.
+  if (n === 0 && state.ended) {
+    if (state.length === 0) endReadable(this);
+    return null;
+  }
 
-function afterTransform(er, data) {
-  var ts = this._transformState;
-  ts.transforming = false;
+  // All the actual chunk generation logic needs to be
+  // *below* the call to _read.  The reason is that in certain
+  // synthetic stream cases, such as passthrough streams, _read
+  // may be a completely synchronous operation which may change
+  // the state of the read buffer, providing enough data when
+  // before there was *not* enough.
+  //
+  // So, the steps are:
+  // 1. Figure out what the state of things will be after we do
+  // a read from the buffer.
+  //
+  // 2. If that resulting state will trigger a _read, then call _read.
+  // Note that this may be asynchronous, or synchronous.  Yes, it is
+  // deeply ugly to write APIs this way, but that still doesn't mean
+  // that the Readable class should behave improperly, as streams are
+  // designed to be sync/async agnostic.
+  // Take note if the _read call is sync or async (ie, if the read call
+  // has returned yet), so that we know whether or not it's safe to emit
+  // 'readable' etc.
+  //
+  // 3. Actually pull the requested chunks out of the buffer and return.
 
-  var cb = ts.writecb;
+  // if we need a readable event, then we need to do some reading.
+  var doRead = state.needReadable;
+  debug('need readable', doRead);
 
-  if (!cb) {
-    return this.emit('error', new Error('write callback called multiple times'));
+  // if we currently have less than the highWaterMark, then also read some
+  if (state.length === 0 || state.length - n < state.highWaterMark) {
+    doRead = true;
+    debug('length less than watermark', doRead);
   }
 
-  ts.writechunk = null;
-  ts.writecb = null;
-
-  if (data != null) // single equals check for both `null` and `undefined`
-    this.push(data);
+  // however, if we've ended, then there's no point, and if we're already
+  // reading, then it's unnecessary.
+  if (state.ended || state.reading) {
+    doRead = false;
+    debug('reading or ended', doRead);
+  } else if (doRead) {
+    debug('do read');
+    state.reading = true;
+    state.sync = true;
+    // if the length is currently zero, then we *need* a readable event.
+    if (state.length === 0) state.needReadable = true;
+    // call internal read method
+    this._read(state.highWaterMark);
+    state.sync = false;
+    // If _read pushed data synchronously, then `reading` will be false,
+    // and we need to re-evaluate how much data we can return to the user.
+    if (!state.reading) n = howMuchToRead(nOrig, state);
+  }
 
-  cb(er);
+  var ret;
+  if (n > 0) ret = fromList(n, state);else ret = null;
 
-  var rs = this._readableState;
-  rs.reading = false;
-  if (rs.needReadable || rs.length < rs.highWaterMark) {
-    this._read(rs.highWaterMark);
+  if (ret === null) {
+    state.needReadable = true;
+    n = 0;
+  } else {
+    state.length -= n;
   }
-}
 
-function Transform(options) {
-  if (!(this instanceof Transform)) return new Transform(options);
+  if (state.length === 0) {
+    // If we have nothing in the buffer, then we want to know
+    // as soon as we *do* get something into the buffer.
+    if (!state.ended) state.needReadable = true;
 
-  Duplex.call(this, options);
+    // If we tried to read() past the EOF, then emit end on the next tick.
+    if (nOrig !== n && state.ended) endReadable(this);
+  }
 
-  this._transformState = {
-    afterTransform: afterTransform.bind(this),
-    needTransform: false,
-    transforming: false,
-    writecb: null,
-    writechunk: null,
-    writeencoding: null
-  };
+  if (ret !== null) this.emit('data', ret);
 
-  // start out asking for a readable event once data is transformed.
-  this._readableState.needReadable = true;
+  return ret;
+};
 
-  // we have implemented the _read method, and done the other things
-  // that Readable wants before the first _read call, so unset the
-  // sync guard flag.
-  this._readableState.sync = false;
+function onEofChunk(stream, state) {
+  if (state.ended) return;
+  if (state.decoder) {
+    var chunk = state.decoder.end();
+    if (chunk && chunk.length) {
+      state.buffer.push(chunk);
+      state.length += state.objectMode ? 1 : chunk.length;
+    }
+  }
+  state.ended = true;
 
-  if (options) {
-    if (typeof options.transform === 'function') this._transform = options.transform;
+  // emit 'readable' now to make sure it gets picked up.
+  emitReadable(stream);
+}
 
-    if (typeof options.flush === 'function') this._flush = options.flush;
+// Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow.  This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+function emitReadable(stream) {
+  var state = stream._readableState;
+  state.needReadable = false;
+  if (!state.emittedReadable) {
+    debug('emitReadable', state.flowing);
+    state.emittedReadable = true;
+    if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
   }
+}
 
-  // When the writable side finishes, then flush out anything remaining.
-  this.on('prefinish', prefinish);
+function emitReadable_(stream) {
+  debug('emit readable');
+  stream.emit('readable');
+  flow(stream);
 }
 
-function prefinish() {
-  var _this = this;
+// at this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data.  that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+function maybeReadMore(stream, state) {
+  if (!state.readingMore) {
+    state.readingMore = true;
+    pna.nextTick(maybeReadMore_, stream, state);
+  }
+}
 
-  if (typeof this._flush === 'function') {
-    this._flush(function (er, data) {
-      done(_this, er, data);
-    });
-  } else {
-    done(this, null, null);
+function maybeReadMore_(stream, state) {
+  var len = state.length;
+  while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
+    debug('maybeReadMore read 0');
+    stream.read(0);
+    if (len === state.length)
+      // didn't get any data, stop spinning.
+      break;else len = state.length;
   }
+  state.readingMore = false;
 }
 
-Transform.prototype.push = function (chunk, encoding) {
-  this._transformState.needTransform = false;
-  return Duplex.prototype.push.call(this, chunk, encoding);
+// abstract method.  to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+Readable.prototype._read = function (n) {
+  this.emit('error', new Error('_read() is not implemented'));
 };
 
-// This is the part where you do stuff!
-// override this function in implementation classes.
-// 'chunk' is an input chunk.
-//
-// Call `push(newChunk)` to pass along transformed output
-// to the readable side.  You may call 'push' zero or more times.
-//
-// Call `cb(err)` when you are done with this chunk.  If you pass
-// an error, then that'll put the hurt on the whole operation.  If you
-// never call cb(), then you'll never get another chunk.
-Transform.prototype._transform = function (chunk, encoding, cb) {
-  throw new Error('_transform() is not implemented');
-};
+Readable.prototype.pipe = function (dest, pipeOpts) {
+  var src = this;
+  var state = this._readableState;
 
-Transform.prototype._write = function (chunk, encoding, cb) {
-  var ts = this._transformState;
-  ts.writecb = cb;
-  ts.writechunk = chunk;
-  ts.writeencoding = encoding;
-  if (!ts.transforming) {
-    var rs = this._readableState;
-    if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
+  switch (state.pipesCount) {
+    case 0:
+      state.pipes = dest;
+      break;
+    case 1:
+      state.pipes = [state.pipes, dest];
+      break;
+    default:
+      state.pipes.push(dest);
+      break;
   }
-};
-
-// Doesn't matter what the args are here.
-// _transform does all the work.
-// That we got here means that the readable side wants more data.
-Transform.prototype._read = function (n) {
-  var ts = this._transformState;
+  state.pipesCount += 1;
+  debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
 
-  if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
-    ts.transforming = true;
-    this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
-  } else {
-    // mark that we need a transform, so that any data that comes in
-    // will get processed, now that we've asked for it.
-    ts.needTransform = true;
-  }
-};
+  var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
 
-Transform.prototype._destroy = function (err, cb) {
-  var _this2 = this;
+  var endFn = doEnd ? onend : unpipe;
+  if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
 
-  Duplex.prototype._destroy.call(this, err, function (err2) {
-    cb(err2);
-    _this2.emit('close');
-  });
-};
+  dest.on('unpipe', onunpipe);
+  function onunpipe(readable, unpipeInfo) {
+    debug('onunpipe');
+    if (readable === src) {
+      if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
+        unpipeInfo.hasUnpiped = true;
+        cleanup();
+      }
+    }
+  }
 
-function done(stream, er, data) {
-  if (er) return stream.emit('error', er);
+  function onend() {
+    debug('onend');
+    dest.end();
+  }
 
-  if (data != null) // single equals check for both `null` and `undefined`
-    stream.push(data);
+  // when the dest drains, it reduces the awaitDrain counter
+  // on the source.  This would be more elegant with a .once()
+  // handler in flow(), but adding and removing repeatedly is
+  // too slow.
+  var ondrain = pipeOnDrain(src);
+  dest.on('drain', ondrain);
 
-  // if there's nothing in the write buffer, then that means
-  // that nothing more will ever be provided
-  if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
+  var cleanedUp = false;
+  function cleanup() {
+    debug('cleanup');
+    // cleanup event handlers once the pipe is broken
+    dest.removeListener('close', onclose);
+    dest.removeListener('finish', onfinish);
+    dest.removeListener('drain', ondrain);
+    dest.removeListener('error', onerror);
+    dest.removeListener('unpipe', onunpipe);
+    src.removeListener('end', onend);
+    src.removeListener('end', unpipe);
+    src.removeListener('data', ondata);
 
-  if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
+    cleanedUp = true;
 
-  return stream.push(null);
-}
-},{"./_stream_duplex":19,"core-util-is":11,"inherits":12}],23:[function(require,module,exports){
-(function (process,global,setImmediate){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
+    // if the reader is waiting for a drain event from this
+    // specific writer, then it would cause it to never start
+    // flowing again.
+    // So, if this is awaiting a drain, then we just call it now.
+    // If we don't know, then assume that we are waiting for one.
+    if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
+  }
 
-// A bit simpler than readable streams.
-// Implement an async ._write(chunk, encoding, cb), and it'll handle all
-// the drain event emission and buffering.
+  // If the user pushes more data while we're writing to dest then we'll end up
+  // in ondata again. However, we only want to increase awaitDrain once because
+  // dest will only emit one 'drain' event for the multiple writes.
+  // => Introduce a guard on increasing awaitDrain.
+  var increasedAwaitDrain = false;
+  src.on('data', ondata);
+  function ondata(chunk) {
+    debug('ondata');
+    increasedAwaitDrain = false;
+    var ret = dest.write(chunk);
+    if (false === ret && !increasedAwaitDrain) {
+      // If the user unpiped during `dest.write()`, it is possible
+      // to get stuck in a permanently paused state if that write
+      // also returned false.
+      // => Check whether `dest` is still a piping destination.
+      if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
+        debug('false write response, pause', src._readableState.awaitDrain);
+        src._readableState.awaitDrain++;
+        increasedAwaitDrain = true;
+      }
+      src.pause();
+    }
+  }
 
-'use strict';
+  // if the dest has an error, then stop piping into it.
+  // however, don't suppress the throwing behavior for this.
+  function onerror(er) {
+    debug('onerror', er);
+    unpipe();
+    dest.removeListener('error', onerror);
+    if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
+  }
 
-/*<replacement>*/
+  // Make sure our error handler is attached before userland ones.
+  prependListener(dest, 'error', onerror);
 
-var pna = require('process-nextick-args');
-/*</replacement>*/
+  // Both close and finish should trigger unpipe, but only once.
+  function onclose() {
+    dest.removeListener('finish', onfinish);
+    unpipe();
+  }
+  dest.once('close', onclose);
+  function onfinish() {
+    debug('onfinish');
+    dest.removeListener('close', onclose);
+    unpipe();
+  }
+  dest.once('finish', onfinish);
 
-module.exports = Writable;
+  function unpipe() {
+    debug('unpipe');
+    src.unpipe(dest);
+  }
 
-/* <replacement> */
-function WriteReq(chunk, encoding, cb) {
-  this.chunk = chunk;
-  this.encoding = encoding;
-  this.callback = cb;
-  this.next = null;
-}
+  // tell the dest that it's being piped to
+  dest.emit('pipe', src);
 
-// It seems a linked list but it is not
-// there will be only 2 of these for each stream
-function CorkedRequest(state) {
-  var _this = this;
+  // start the flow if it hasn't been started already.
+  if (!state.flowing) {
+    debug('pipe resume');
+    src.resume();
+  }
 
-  this.next = null;
-  this.entry = null;
-  this.finish = function () {
-    onCorkedFinish(_this, state);
+  return dest;
+};
+
+function pipeOnDrain(src) {
+  return function () {
+    var state = src._readableState;
+    debug('pipeOnDrain', state.awaitDrain);
+    if (state.awaitDrain) state.awaitDrain--;
+    if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
+      state.flowing = true;
+      flow(src);
+    }
   };
 }
-/* </replacement> */
 
-/*<replacement>*/
-var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
-/*</replacement>*/
+Readable.prototype.unpipe = function (dest) {
+  var state = this._readableState;
+  var unpipeInfo = { hasUnpiped: false };
 
-/*<replacement>*/
-var Duplex;
-/*</replacement>*/
+  // if we're not piping anywhere, then do nothing.
+  if (state.pipesCount === 0) return this;
 
-Writable.WritableState = WritableState;
+  // just one destination.  most common case.
+  if (state.pipesCount === 1) {
+    // passed in one, but it's not the right one.
+    if (dest && dest !== state.pipes) return this;
 
-/*<replacement>*/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/*</replacement>*/
+    if (!dest) dest = state.pipes;
 
-/*<replacement>*/
-var internalUtil = {
-  deprecate: require('util-deprecate')
-};
-/*</replacement>*/
+    // got a match.
+    state.pipes = null;
+    state.pipesCount = 0;
+    state.flowing = false;
+    if (dest) dest.emit('unpipe', this, unpipeInfo);
+    return this;
+  }
 
-/*<replacement>*/
-var Stream = require('./internal/streams/stream');
-/*</replacement>*/
+  // slow case. multiple pipe destinations.
 
-/*<replacement>*/
+  if (!dest) {
+    // remove all.
+    var dests = state.pipes;
+    var len = state.pipesCount;
+    state.pipes = null;
+    state.pipesCount = 0;
+    state.flowing = false;
 
-var Buffer = require('safe-buffer').Buffer;
-var OurUint8Array = global.Uint8Array || function () {};
-function _uint8ArrayToBuffer(chunk) {
-  return Buffer.from(chunk);
-}
-function _isUint8Array(obj) {
-  return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
+    for (var i = 0; i < len; i++) {
+      dests[i].emit('unpipe', this, unpipeInfo);
+    }return this;
+  }
 
-/*</replacement>*/
+  // try to find the right one.
+  var index = indexOf(state.pipes, dest);
+  if (index === -1) return this;
 
-var destroyImpl = require('./internal/streams/destroy');
+  state.pipes.splice(index, 1);
+  state.pipesCount -= 1;
+  if (state.pipesCount === 1) state.pipes = state.pipes[0];
 
-util.inherits(Writable, Stream);
+  dest.emit('unpipe', this, unpipeInfo);
 
-function nop() {}
+  return this;
+};
 
-function WritableState(options, stream) {
-  Duplex = Duplex || require('./_stream_duplex');
+// set up data events if they are asked for
+// Ensure readable listeners eventually get something
+Readable.prototype.on = function (ev, fn) {
+  var res = Stream.prototype.on.call(this, ev, fn);
 
-  options = options || {};
+  if (ev === 'data') {
+    // Start flowing on next tick if stream isn't explicitly paused
+    if (this._readableState.flowing !== false) this.resume();
+  } else if (ev === 'readable') {
+    var state = this._readableState;
+    if (!state.endEmitted && !state.readableListening) {
+      state.readableListening = state.needReadable = true;
+      state.emittedReadable = false;
+      if (!state.reading) {
+        pna.nextTick(nReadingNextTick, this);
+      } else if (state.length) {
+        emitReadable(this);
+      }
+    }
+  }
 
-  // Duplex streams are both readable and writable, but share
-  // the same options object.
-  // However, some cases require setting options to different
-  // values for the readable and the writable sides of the duplex stream.
-  // These options can be provided separately as readableXXX and writableXXX.
-  var isDuplex = stream instanceof Duplex;
+  return res;
+};
+Readable.prototype.addListener = Readable.prototype.on;
 
-  // object stream flag to indicate whether or not this stream
-  // contains buffers or objects.
-  this.objectMode = !!options.objectMode;
+function nReadingNextTick(self) {
+  debug('readable nexttick read 0');
+  self.read(0);
+}
 
-  if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
+// pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+Readable.prototype.resume = function () {
+  var state = this._readableState;
+  if (!state.flowing) {
+    debug('resume');
+    state.flowing = true;
+    resume(this, state);
+  }
+  return this;
+};
 
-  // the point at which write() starts returning false
-  // Note: 0 is a valid value, means that we always return false if
-  // the entire buffer is not flushed immediately on write()
-  var hwm = options.highWaterMark;
-  var writableHwm = options.writableHighWaterMark;
-  var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+function resume(stream, state) {
+  if (!state.resumeScheduled) {
+    state.resumeScheduled = true;
+    pna.nextTick(resume_, stream, state);
+  }
+}
 
-  if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
+function resume_(stream, state) {
+  if (!state.reading) {
+    debug('resume read 0');
+    stream.read(0);
+  }
 
-  // cast to ints.
-  this.highWaterMark = Math.floor(this.highWaterMark);
+  state.resumeScheduled = false;
+  state.awaitDrain = 0;
+  stream.emit('resume');
+  flow(stream);
+  if (state.flowing && !state.reading) stream.read(0);
+}
 
-  // if _final has been called
-  this.finalCalled = false;
+Readable.prototype.pause = function () {
+  debug('call pause flowing=%j', this._readableState.flowing);
+  if (false !== this._readableState.flowing) {
+    debug('pause');
+    this._readableState.flowing = false;
+    this.emit('pause');
+  }
+  return this;
+};
 
-  // drain event flag.
-  this.needDrain = false;
-  // at the start of calling end()
-  this.ending = false;
-  // when end() has been called, and returned
-  this.ended = false;
-  // when 'finish' is emitted
-  this.finished = false;
+function flow(stream) {
+  var state = stream._readableState;
+  debug('flow', state.flowing);
+  while (state.flowing && stream.read() !== null) {}
+}
 
-  // has it been destroyed
-  this.destroyed = false;
+// wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+Readable.prototype.wrap = function (stream) {
+  var _this = this;
 
-  // should we decode strings into buffers before passing to _write?
-  // this is here so that some node-core streams can optimize string
-  // handling at a lower level.
-  var noDecode = options.decodeStrings === false;
-  this.decodeStrings = !noDecode;
+  var state = this._readableState;
+  var paused = false;
 
-  // Crypto is kind of old and crusty.  Historically, its default string
-  // encoding is 'binary' so we have to make this configurable.
-  // Everything else in the universe uses 'utf8', though.
-  this.defaultEncoding = options.defaultEncoding || 'utf8';
+  stream.on('end', function () {
+    debug('wrapped end');
+    if (state.decoder && !state.ended) {
+      var chunk = state.decoder.end();
+      if (chunk && chunk.length) _this.push(chunk);
+    }
 
-  // not an actual buffer we keep track of, but a measurement
-  // of how much we're waiting to get pushed to some underlying
-  // socket or file.
-  this.length = 0;
+    _this.push(null);
+  });
 
-  // a flag to see when we're in the middle of a write.
-  this.writing = false;
+  stream.on('data', function (chunk) {
+    debug('wrapped data');
+    if (state.decoder) chunk = state.decoder.write(chunk);
 
-  // when true all writes will be buffered until .uncork() call
-  this.corked = 0;
+    // don't skip over falsy values in objectMode
+    if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
 
-  // a flag to be able to tell if the onwrite cb is called immediately,
-  // or on a later tick.  We set this to true at first, because any
-  // actions that shouldn't happen until "later" should generally also
-  // not happen before the first write call.
-  this.sync = true;
+    var ret = _this.push(chunk);
+    if (!ret) {
+      paused = true;
+      stream.pause();
+    }
+  });
 
-  // a flag to know if we're processing previously buffered items, which
-  // may call the _write() callback in the same tick, so that we don't
-  // end up in an overlapped onwrite situation.
-  this.bufferProcessing = false;
+  // proxy all the other methods.
+  // important when wrapping filters and duplexes.
+  for (var i in stream) {
+    if (this[i] === undefined && typeof stream[i] === 'function') {
+      this[i] = function (method) {
+        return function () {
+          return stream[method].apply(stream, arguments);
+        };
+      }(i);
+    }
+  }
+
+  // proxy certain important events.
+  for (var n = 0; n < kProxyEvents.length; n++) {
+    stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
+  }
 
-  // the callback that's passed to _write(chunk,cb)
-  this.onwrite = function (er) {
-    onwrite(stream, er);
+  // when we try to consume some more bytes, simply unpause the
+  // underlying stream.
+  this._read = function (n) {
+    debug('wrapped _read', n);
+    if (paused) {
+      paused = false;
+      stream.resume();
+    }
   };
 
-  // the callback that the user supplies to write(chunk,encoding,cb)
-  this.writecb = null;
+  return this;
+};
 
-  // the amount that is being written when _write is called.
-  this.writelen = 0;
+Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
+  // making it explicit this property is not enumerable
+  // because otherwise some prototype manipulation in
+  // userland will fail
+  enumerable: false,
+  get: function () {
+    return this._readableState.highWaterMark;
+  }
+});
 
-  this.bufferedRequest = null;
-  this.lastBufferedRequest = null;
+// exposed for testing purposes only.
+Readable._fromList = fromList;
 
-  // number of pending user-supplied write callbacks
-  // this must be 0 before 'finish' can be emitted
-  this.pendingcb = 0;
+// Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromList(n, state) {
+  // nothing buffered
+  if (state.length === 0) return null;
 
-  // emit prefinish if the only thing we're waiting for is _write cbs
-  // This is relevant for synchronous Transform streams
-  this.prefinished = false;
+  var ret;
+  if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
+    // read it all, truncate the list
+    if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
+    state.buffer.clear();
+  } else {
+    // read part of list
+    ret = fromListPartial(n, state.buffer, state.decoder);
+  }
 
-  // True if the error was already emitted and should not be thrown again
-  this.errorEmitted = false;
+  return ret;
+}
 
-  // count buffered requests
-  this.bufferedRequestCount = 0;
+// Extracts only enough buffered data to satisfy the amount requested.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromListPartial(n, list, hasStrings) {
+  var ret;
+  if (n < list.head.data.length) {
+    // slice is the same for buffers and strings
+    ret = list.head.data.slice(0, n);
+    list.head.data = list.head.data.slice(n);
+  } else if (n === list.head.data.length) {
+    // first chunk is a perfect match
+    ret = list.shift();
+  } else {
+    // result spans more than one buffer
+    ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
+  }
+  return ret;
+}
 
-  // allocate the first CorkedRequest, there is always
-  // one allocated and free to use, and we maintain at most two
-  this.corkedRequestsFree = new CorkedRequest(this);
+// Copies a specified amount of characters from the list of buffered data
+// chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBufferString(n, list) {
+  var p = list.head;
+  var c = 1;
+  var ret = p.data;
+  n -= ret.length;
+  while (p = p.next) {
+    var str = p.data;
+    var nb = n > str.length ? str.length : n;
+    if (nb === str.length) ret += str;else ret += str.slice(0, n);
+    n -= nb;
+    if (n === 0) {
+      if (nb === str.length) {
+        ++c;
+        if (p.next) list.head = p.next;else list.head = list.tail = null;
+      } else {
+        list.head = p;
+        p.data = str.slice(nb);
+      }
+      break;
+    }
+    ++c;
+  }
+  list.length -= c;
+  return ret;
 }
 
-WritableState.prototype.getBuffer = function getBuffer() {
-  var current = this.bufferedRequest;
-  var out = [];
-  while (current) {
-    out.push(current);
-    current = current.next;
+// Copies a specified amount of bytes from the list of buffered data chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBuffer(n, list) {
+  var ret = Buffer.allocUnsafe(n);
+  var p = list.head;
+  var c = 1;
+  p.data.copy(ret);
+  n -= p.data.length;
+  while (p = p.next) {
+    var buf = p.data;
+    var nb = n > buf.length ? buf.length : n;
+    buf.copy(ret, ret.length - n, 0, nb);
+    n -= nb;
+    if (n === 0) {
+      if (nb === buf.length) {
+        ++c;
+        if (p.next) list.head = p.next;else list.head = list.tail = null;
+      } else {
+        list.head = p;
+        p.data = buf.slice(nb);
+      }
+      break;
+    }
+    ++c;
   }
-  return out;
-};
+  list.length -= c;
+  return ret;
+}
 
-(function () {
-  try {
-    Object.defineProperty(WritableState.prototype, 'buffer', {
-      get: internalUtil.deprecate(function () {
-        return this.getBuffer();
-      }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
-    });
-  } catch (_) {}
-})();
+function endReadable(stream) {
+  var state = stream._readableState;
 
-// Test _writableState for inheritance to account for Duplex streams,
-// whose prototype chain only points to Readable.
-var realHasInstance;
-if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
-  realHasInstance = Function.prototype[Symbol.hasInstance];
-  Object.defineProperty(Writable, Symbol.hasInstance, {
-    value: function (object) {
-      if (realHasInstance.call(this, object)) return true;
-      if (this !== Writable) return false;
+  // If we get here before consuming all the bytes, then that is a
+  // bug in node.  Should never happen.
+  if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
 
-      return object && object._writableState instanceof WritableState;
-    }
-  });
-} else {
-  realHasInstance = function (object) {
-    return object instanceof this;
-  };
+  if (!state.endEmitted) {
+    state.ended = true;
+    pna.nextTick(endReadableNT, state, stream);
+  }
 }
 
-function Writable(options) {
-  Duplex = Duplex || require('./_stream_duplex');
+function endReadableNT(state, stream) {
+  // Check that we didn't get one last unshift.
+  if (!state.endEmitted && state.length === 0) {
+    state.endEmitted = true;
+    stream.readable = false;
+    stream.emit('end');
+  }
+}
+
+function indexOf(xs, x) {
+  for (var i = 0, l = xs.length; i < l; i++) {
+    if (xs[i] === x) return i;
+  }
+  return -1;
+}
+}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
+},{"./_stream_duplex":40,"./internal/streams/BufferList":45,"./internal/streams/destroy":46,"./internal/streams/stream":47,"_process":70,"core-util-is":32,"events":66,"inherits":33,"isarray":34,"process-nextick-args":39,"safe-buffer":49,"string_decoder/":50,"util":64}],43:[function(require,module,exports){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a transform stream is a readable/writable stream where you do
+// something with the data.  Sometimes it's called a "filter",
+// but that's not a great name for it, since that implies a thing where
+// some bits pass through, and others are simply ignored.  (That would
+// be a valid example of a transform, of course.)
+//
+// While the output is causally related to the input, it's not a
+// necessarily symmetric or synchronous transformation.  For example,
+// a zlib stream might take multiple plain-text writes(), and then
+// emit a single compressed chunk some time in the future.
+//
+// Here's how this works:
+//
+// The Transform stream has all the aspects of the readable and writable
+// stream classes.  When you write(chunk), that calls _write(chunk,cb)
+// internally, and returns false if there's a lot of pending writes
+// buffered up.  When you call read(), that calls _read(n) until
+// there's enough pending readable data buffered up.
+//
+// In a transform stream, the written data is placed in a buffer.  When
+// _read(n) is called, it transforms the queued up data, calling the
+// buffered _write cb's as it consumes chunks.  If consuming a single
+// written chunk would result in multiple output chunks, then the first
+// outputted bit calls the readcb, and subsequent chunks just go into
+// the read buffer, and will cause it to emit 'readable' if necessary.
+//
+// This way, back-pressure is actually determined by the reading side,
+// since _read has to be called to start processing a new chunk.  However,
+// a pathological inflate type of transform can cause excessive buffering
+// here.  For example, imagine a stream where every byte of input is
+// interpreted as an integer from 0-255, and then results in that many
+// bytes of output.  Writing the 4 bytes {ff,ff,ff,ff} would result in
+// 1kb of data being output.  In this case, you could write a very small
+// amount of input, and end up with a very large amount of output.  In
+// such a pathological inflating mechanism, there'd be no way to tell
+// the system to stop doing the transform.  A single 4MB write could
+// cause the system to run out of memory.
+//
+// However, even in such a pathological case, only a single written chunk
+// would be consumed, and then the rest would wait (un-transformed) until
+// the results of the previous transformed chunk were consumed.
 
-  // Writable ctor is applied to Duplexes, too.
-  // `realHasInstance` is necessary because using plain `instanceof`
-  // would return false, as no `_writableState` property is attached.
+'use strict';
 
-  // Trying to use the custom `instanceof` for Writable here will also break the
-  // Node.js LazyTransform implementation, which has a non-trivial getter for
-  // `_writableState` that would lead to infinite recursion.
-  if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
-    return new Writable(options);
-  }
+module.exports = Transform;
 
-  this._writableState = new WritableState(options, this);
+var Duplex = require('./_stream_duplex');
 
-  // legacy.
-  this.writable = true;
+/*<replacement>*/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/*</replacement>*/
 
-  if (options) {
-    if (typeof options.write === 'function') this._write = options.write;
+util.inherits(Transform, Duplex);
 
-    if (typeof options.writev === 'function') this._writev = options.writev;
+function afterTransform(er, data) {
+  var ts = this._transformState;
+  ts.transforming = false;
 
-    if (typeof options.destroy === 'function') this._destroy = options.destroy;
+  var cb = ts.writecb;
 
-    if (typeof options.final === 'function') this._final = options.final;
+  if (!cb) {
+    return this.emit('error', new Error('write callback called multiple times'));
   }
 
-  Stream.call(this);
-}
-
-// Otherwise people can pipe Writable streams, which is just wrong.
-Writable.prototype.pipe = function () {
-  this.emit('error', new Error('Cannot pipe, not readable'));
-};
+  ts.writechunk = null;
+  ts.writecb = null;
 
-function writeAfterEnd(stream, cb) {
-  var er = new Error('write after end');
-  // TODO: defer error events consistently everywhere, not just the cb
-  stream.emit('error', er);
-  pna.nextTick(cb, er);
-}
+  if (data != null) // single equals check for both `null` and `undefined`
+    this.push(data);
 
-// Checks that a user-supplied chunk is valid, especially for the particular
-// mode the stream is in. Currently this means that `null` is never accepted
-// and undefined/non-string values are only allowed in object mode.
-function validChunk(stream, state, chunk, cb) {
-  var valid = true;
-  var er = false;
+  cb(er);
 
-  if (chunk === null) {
-    er = new TypeError('May not write null values to stream');
-  } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
-    er = new TypeError('Invalid non-string/buffer chunk');
-  }
-  if (er) {
-    stream.emit('error', er);
-    pna.nextTick(cb, er);
-    valid = false;
+  var rs = this._readableState;
+  rs.reading = false;
+  if (rs.needReadable || rs.length < rs.highWaterMark) {
+    this._read(rs.highWaterMark);
   }
-  return valid;
 }
 
-Writable.prototype.write = function (chunk, encoding, cb) {
-  var state = this._writableState;
-  var ret = false;
-  var isBuf = !state.objectMode && _isUint8Array(chunk);
-
-  if (isBuf && !Buffer.isBuffer(chunk)) {
-    chunk = _uint8ArrayToBuffer(chunk);
-  }
-
-  if (typeof encoding === 'function') {
-    cb = encoding;
-    encoding = null;
-  }
-
-  if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
-
-  if (typeof cb !== 'function') cb = nop;
-
-  if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
-    state.pendingcb++;
-    ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
-  }
+function Transform(options) {
+  if (!(this instanceof Transform)) return new Transform(options);
 
-  return ret;
-};
+  Duplex.call(this, options);
 
-Writable.prototype.cork = function () {
-  var state = this._writableState;
+  this._transformState = {
+    afterTransform: afterTransform.bind(this),
+    needTransform: false,
+    transforming: false,
+    writecb: null,
+    writechunk: null,
+    writeencoding: null
+  };
 
-  state.corked++;
-};
+  // start out asking for a readable event once data is transformed.
+  this._readableState.needReadable = true;
 
-Writable.prototype.uncork = function () {
-  var state = this._writableState;
+  // we have implemented the _read method, and done the other things
+  // that Readable wants before the first _read call, so unset the
+  // sync guard flag.
+  this._readableState.sync = false;
 
-  if (state.corked) {
-    state.corked--;
+  if (options) {
+    if (typeof options.transform === 'function') this._transform = options.transform;
 
-    if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
+    if (typeof options.flush === 'function') this._flush = options.flush;
   }
-};
-
-Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
-  // node::ParseEncoding() requires lower case.
-  if (typeof encoding === 'string') encoding = encoding.toLowerCase();
-  if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
-  this._writableState.defaultEncoding = encoding;
-  return this;
-};
 
-function decodeChunk(state, chunk, encoding) {
-  if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
-    chunk = Buffer.from(chunk, encoding);
-  }
-  return chunk;
+  // When the writable side finishes, then flush out anything remaining.
+  this.on('prefinish', prefinish);
 }
 
-Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
-  // making it explicit this property is not enumerable
-  // because otherwise some prototype manipulation in
-  // userland will fail
-  enumerable: false,
-  get: function () {
-    return this._writableState.highWaterMark;
-  }
-});
-
-// if we're already writing something, then just put this
-// in the queue, and wait our turn.  Otherwise, call _write
-// If we return false, then we need a drain event, so set that flag.
-function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
-  if (!isBuf) {
-    var newChunk = decodeChunk(state, chunk, encoding);
-    if (chunk !== newChunk) {
-      isBuf = true;
-      encoding = 'buffer';
-      chunk = newChunk;
-    }
-  }
-  var len = state.objectMode ? 1 : chunk.length;
-
-  state.length += len;
-
-  var ret = state.length < state.highWaterMark;
-  // we must ensure that previous needDrain will not be reset to false.
-  if (!ret) state.needDrain = true;
+function prefinish() {
+  var _this = this;
 
-  if (state.writing || state.corked) {
-    var last = state.lastBufferedRequest;
-    state.lastBufferedRequest = {
-      chunk: chunk,
-      encoding: encoding,
-      isBuf: isBuf,
-      callback: cb,
-      next: null
-    };
-    if (last) {
-      last.next = state.lastBufferedRequest;
-    } else {
-      state.bufferedRequest = state.lastBufferedRequest;
-    }
-    state.bufferedRequestCount += 1;
+  if (typeof this._flush === 'function') {
+    this._flush(function (er, data) {
+      done(_this, er, data);
+    });
   } else {
-    doWrite(stream, state, false, len, chunk, encoding, cb);
+    done(this, null, null);
   }
-
-  return ret;
 }
 
-function doWrite(stream, state, writev, len, chunk, encoding, cb) {
-  state.writelen = len;
-  state.writecb = cb;
-  state.writing = true;
-  state.sync = true;
-  if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
-  state.sync = false;
-}
+Transform.prototype.push = function (chunk, encoding) {
+  this._transformState.needTransform = false;
+  return Duplex.prototype.push.call(this, chunk, encoding);
+};
 
-function onwriteError(stream, state, sync, er, cb) {
-  --state.pendingcb;
+// This is the part where you do stuff!
+// override this function in implementation classes.
+// 'chunk' is an input chunk.
+//
+// Call `push(newChunk)` to pass along transformed output
+// to the readable side.  You may call 'push' zero or more times.
+//
+// Call `cb(err)` when you are done with this chunk.  If you pass
+// an error, then that'll put the hurt on the whole operation.  If you
+// never call cb(), then you'll never get another chunk.
+Transform.prototype._transform = function (chunk, encoding, cb) {
+  throw new Error('_transform() is not implemented');
+};
 
-  if (sync) {
-    // defer the callback if we are being called synchronously
-    // to avoid piling up things on the stack
-    pna.nextTick(cb, er);
-    // this can emit finish, and it will always happen
-    // after error
-    pna.nextTick(finishMaybe, stream, state);
-    stream._writableState.errorEmitted = true;
-    stream.emit('error', er);
-  } else {
-    // the caller expect this to happen before if
-    // it is async
-    cb(er);
-    stream._writableState.errorEmitted = true;
-    stream.emit('error', er);
-    // this can emit finish, but finish must
-    // always follow error
-    finishMaybe(stream, state);
+Transform.prototype._write = function (chunk, encoding, cb) {
+  var ts = this._transformState;
+  ts.writecb = cb;
+  ts.writechunk = chunk;
+  ts.writeencoding = encoding;
+  if (!ts.transforming) {
+    var rs = this._readableState;
+    if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
   }
-}
+};
+
+// Doesn't matter what the args are here.
+// _transform does all the work.
+// That we got here means that the readable side wants more data.
+Transform.prototype._read = function (n) {
+  var ts = this._transformState;
 
-function onwriteStateUpdate(state) {
-  state.writing = false;
-  state.writecb = null;
-  state.length -= state.writelen;
-  state.writelen = 0;
-}
+  if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
+    ts.transforming = true;
+    this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
+  } else {
+    // mark that we need a transform, so that any data that comes in
+    // will get processed, now that we've asked for it.
+    ts.needTransform = true;
+  }
+};
 
-function onwrite(stream, er) {
-  var state = stream._writableState;
-  var sync = state.sync;
-  var cb = state.writecb;
+Transform.prototype._destroy = function (err, cb) {
+  var _this2 = this;
 
-  onwriteStateUpdate(state);
+  Duplex.prototype._destroy.call(this, err, function (err2) {
+    cb(err2);
+    _this2.emit('close');
+  });
+};
 
-  if (er) onwriteError(stream, state, sync, er, cb);else {
-    // Check if we're actually ready to finish, but don't emit yet
-    var finished = needFinish(state);
+function done(stream, er, data) {
+  if (er) return stream.emit('error', er);
 
-    if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
-      clearBuffer(stream, state);
-    }
+  if (data != null) // single equals check for both `null` and `undefined`
+    stream.push(data);
 
-    if (sync) {
-      /*<replacement>*/
-      asyncWrite(afterWrite, stream, state, finished, cb);
-      /*</replacement>*/
-    } else {
-      afterWrite(stream, state, finished, cb);
-    }
-  }
-}
+  // if there's nothing in the write buffer, then that means
+  // that nothing more will ever be provided
+  if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
 
-function afterWrite(stream, state, finished, cb) {
-  if (!finished) onwriteDrain(stream, state);
-  state.pendingcb--;
-  cb();
-  finishMaybe(stream, state);
-}
+  if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
 
-// Must force callback to be called on nextTick, so that we don't
-// emit 'drain' before the write() consumer gets the 'false' return
-// value, and has a chance to attach a 'drain' listener.
-function onwriteDrain(stream, state) {
-  if (state.length === 0 && state.needDrain) {
-    state.needDrain = false;
-    stream.emit('drain');
-  }
+  return stream.push(null);
 }
+},{"./_stream_duplex":40,"core-util-is":32,"inherits":33}],44:[function(require,module,exports){
+(function (process,global,setImmediate){
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
 
-// if there's something in the buffer waiting, then process it
-function clearBuffer(stream, state) {
-  state.bufferProcessing = true;
-  var entry = state.bufferedRequest;
+// A bit simpler than readable streams.
+// Implement an async ._write(chunk, encoding, cb), and it'll handle all
+// the drain event emission and buffering.
 
-  if (stream._writev && entry && entry.next) {
-    // Fast case, write everything using _writev()
-    var l = state.bufferedRequestCount;
-    var buffer = new Array(l);
-    var holder = state.corkedRequestsFree;
-    holder.entry = entry;
+'use strict';
 
-    var count = 0;
-    var allBuffers = true;
-    while (entry) {
-      buffer[count] = entry;
-      if (!entry.isBuf) allBuffers = false;
-      entry = entry.next;
-      count += 1;
-    }
-    buffer.allBuffers = allBuffers;
+/*<replacement>*/
 
-    doWrite(stream, state, true, state.length, buffer, '', holder.finish);
+var pna = require('process-nextick-args');
+/*</replacement>*/
 
-    // doWrite is almost always async, defer these to save a bit of time
-    // as the hot path ends with doWrite
-    state.pendingcb++;
-    state.lastBufferedRequest = null;
-    if (holder.next) {
-      state.corkedRequestsFree = holder.next;
-      holder.next = null;
-    } else {
-      state.corkedRequestsFree = new CorkedRequest(state);
-    }
-    state.bufferedRequestCount = 0;
-  } else {
-    // Slow case, write chunks one-by-one
-    while (entry) {
-      var chunk = entry.chunk;
-      var encoding = entry.encoding;
-      var cb = entry.callback;
-      var len = state.objectMode ? 1 : chunk.length;
+module.exports = Writable;
 
-      doWrite(stream, state, false, len, chunk, encoding, cb);
-      entry = entry.next;
-      state.bufferedRequestCount--;
-      // if we didn't call the onwrite immediately, then
-      // it means that we need to wait until it does.
-      // also, that means that the chunk and cb are currently
-      // being processed, so move the buffer counter past them.
-      if (state.writing) {
-        break;
-      }
-    }
+/* <replacement> */
+function WriteReq(chunk, encoding, cb) {
+  this.chunk = chunk;
+  this.encoding = encoding;
+  this.callback = cb;
+  this.next = null;
+}
 
-    if (entry === null) state.lastBufferedRequest = null;
-  }
+// It seems a linked list but it is not
+// there will be only 2 of these for each stream
+function CorkedRequest(state) {
+  var _this = this;
 
-  state.bufferedRequest = entry;
-  state.bufferProcessing = false;
+  this.next = null;
+  this.entry = null;
+  this.finish = function () {
+    onCorkedFinish(_this, state);
+  };
 }
+/* </replacement> */
 
-Writable.prototype._write = function (chunk, encoding, cb) {
-  cb(new Error('_write() is not implemented'));
-};
+/*<replacement>*/
+var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
+/*</replacement>*/
 
-Writable.prototype._writev = null;
+/*<replacement>*/
+var Duplex;
+/*</replacement>*/
 
-Writable.prototype.end = function (chunk, encoding, cb) {
-  var state = this._writableState;
+Writable.WritableState = WritableState;
 
-  if (typeof chunk === 'function') {
-    cb = chunk;
-    chunk = null;
-    encoding = null;
-  } else if (typeof encoding === 'function') {
-    cb = encoding;
-    encoding = null;
-  }
+/*<replacement>*/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/*</replacement>*/
 
-  if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
+/*<replacement>*/
+var internalUtil = {
+  deprecate: require('util-deprecate')
+};
+/*</replacement>*/
 
-  // .end() fully uncorks
-  if (state.corked) {
-    state.corked = 1;
-    this.uncork();
-  }
+/*<replacement>*/
+var Stream = require('./internal/streams/stream');
+/*</replacement>*/
 
-  // ignore unnecessary end() calls.
-  if (!state.ending && !state.finished) endWritable(this, state, cb);
-};
+/*<replacement>*/
 
-function needFinish(state) {
-  return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
-}
-function callFinal(stream, state) {
-  stream._final(function (err) {
-    state.pendingcb--;
-    if (err) {
-      stream.emit('error', err);
-    }
-    state.prefinished = true;
-    stream.emit('prefinish');
-    finishMaybe(stream, state);
-  });
+var Buffer = require('safe-buffer').Buffer;
+var OurUint8Array = global.Uint8Array || function () {};
+function _uint8ArrayToBuffer(chunk) {
+  return Buffer.from(chunk);
 }
-function prefinish(stream, state) {
-  if (!state.prefinished && !state.finalCalled) {
-    if (typeof stream._final === 'function') {
-      state.pendingcb++;
-      state.finalCalled = true;
-      pna.nextTick(callFinal, stream, state);
-    } else {
-      state.prefinished = true;
-      stream.emit('prefinish');
-    }
-  }
+function _isUint8Array(obj) {
+  return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
 }
 
-function finishMaybe(stream, state) {
-  var need = needFinish(state);
-  if (need) {
-    prefinish(stream, state);
-    if (state.pendingcb === 0) {
-      state.finished = true;
-      stream.emit('finish');
-    }
-  }
-  return need;
-}
+/*</replacement>*/
+
+var destroyImpl = require('./internal/streams/destroy');
+
+util.inherits(Writable, Stream);
+
+function nop() {}
+
+function WritableState(options, stream) {
+  Duplex = Duplex || require('./_stream_duplex');
 
-function endWritable(stream, state, cb) {
-  state.ending = true;
-  finishMaybe(stream, state);
-  if (cb) {
-    if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
-  }
-  state.ended = true;
-  stream.writable = false;
-}
+  options = options || {};
 
-function onCorkedFinish(corkReq, state, err) {
-  var entry = corkReq.entry;
-  corkReq.entry = null;
-  while (entry) {
-    var cb = entry.callback;
-    state.pendingcb--;
-    cb(err);
-    entry = entry.next;
-  }
-  if (state.corkedRequestsFree) {
-    state.corkedRequestsFree.next = corkReq;
-  } else {
-    state.corkedRequestsFree = corkReq;
-  }
-}
+  // Duplex streams are both readable and writable, but share
+  // the same options object.
+  // However, some cases require setting options to different
+  // values for the readable and the writable sides of the duplex stream.
+  // These options can be provided separately as readableXXX and writableXXX.
+  var isDuplex = stream instanceof Duplex;
 
-Object.defineProperty(Writable.prototype, 'destroyed', {
-  get: function () {
-    if (this._writableState === undefined) {
-      return false;
-    }
-    return this._writableState.destroyed;
-  },
-  set: function (value) {
-    // we ignore the value if the stream
-    // has not been initialized yet
-    if (!this._writableState) {
-      return;
-    }
+  // object stream flag to indicate whether or not this stream
+  // contains buffers or objects.
+  this.objectMode = !!options.objectMode;
 
-    // backward compatibility, the user is explicitly
-    // managing destroyed
-    this._writableState.destroyed = value;
-  }
-});
+  if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
 
-Writable.prototype.destroy = destroyImpl.destroy;
-Writable.prototype._undestroy = destroyImpl.undestroy;
-Writable.prototype._destroy = function (err, cb) {
-  this.end();
-  cb(err);
-};
-}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {},require("timers").setImmediate)
-},{"./_stream_duplex":19,"./internal/streams/destroy":25,"./internal/streams/stream":26,"_process":49,"core-util-is":11,"inherits":12,"process-nextick-args":18,"safe-buffer":28,"timers":50,"util-deprecate":30}],24:[function(require,module,exports){
-'use strict';
+  // the point at which write() starts returning false
+  // Note: 0 is a valid value, means that we always return false if
+  // the entire buffer is not flushed immediately on write()
+  var hwm = options.highWaterMark;
+  var writableHwm = options.writableHighWaterMark;
+  var defaultHwm = this.objectMode ? 16 : 16 * 1024;
 
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+  if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
 
-var Buffer = require('safe-buffer').Buffer;
-var util = require('util');
+  // cast to ints.
+  this.highWaterMark = Math.floor(this.highWaterMark);
 
-function copyBuffer(src, target, offset) {
-  src.copy(target, offset);
-}
+  // if _final has been called
+  this.finalCalled = false;
 
-module.exports = function () {
-  function BufferList() {
-    _classCallCheck(this, BufferList);
+  // drain event flag.
+  this.needDrain = false;
+  // at the start of calling end()
+  this.ending = false;
+  // when end() has been called, and returned
+  this.ended = false;
+  // when 'finish' is emitted
+  this.finished = false;
 
-    this.head = null;
-    this.tail = null;
-    this.length = 0;
-  }
+  // has it been destroyed
+  this.destroyed = false;
 
-  BufferList.prototype.push = function push(v) {
-    var entry = { data: v, next: null };
-    if (this.length > 0) this.tail.next = entry;else this.head = entry;
-    this.tail = entry;
-    ++this.length;
-  };
+  // should we decode strings into buffers before passing to _write?
+  // this is here so that some node-core streams can optimize string
+  // handling at a lower level.
+  var noDecode = options.decodeStrings === false;
+  this.decodeStrings = !noDecode;
 
-  BufferList.prototype.unshift = function unshift(v) {
-    var entry = { data: v, next: this.head };
-    if (this.length === 0) this.tail = entry;
-    this.head = entry;
-    ++this.length;
-  };
+  // Crypto is kind of old and crusty.  Historically, its default string
+  // encoding is 'binary' so we have to make this configurable.
+  // Everything else in the universe uses 'utf8', though.
+  this.defaultEncoding = options.defaultEncoding || 'utf8';
 
-  BufferList.prototype.shift = function shift() {
-    if (this.length === 0) return;
-    var ret = this.head.data;
-    if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
-    --this.length;
-    return ret;
-  };
+  // not an actual buffer we keep track of, but a measurement
+  // of how much we're waiting to get pushed to some underlying
+  // socket or file.
+  this.length = 0;
 
-  BufferList.prototype.clear = function clear() {
-    this.head = this.tail = null;
-    this.length = 0;
-  };
+  // a flag to see when we're in the middle of a write.
+  this.writing = false;
 
-  BufferList.prototype.join = function join(s) {
-    if (this.length === 0) return '';
-    var p = this.head;
-    var ret = '' + p.data;
-    while (p = p.next) {
-      ret += s + p.data;
-    }return ret;
-  };
+  // when true all writes will be buffered until .uncork() call
+  this.corked = 0;
 
-  BufferList.prototype.concat = function concat(n) {
-    if (this.length === 0) return Buffer.alloc(0);
-    if (this.length === 1) return this.head.data;
-    var ret = Buffer.allocUnsafe(n >>> 0);
-    var p = this.head;
-    var i = 0;
-    while (p) {
-      copyBuffer(p.data, ret, i);
-      i += p.data.length;
-      p = p.next;
-    }
-    return ret;
-  };
+  // a flag to be able to tell if the onwrite cb is called immediately,
+  // or on a later tick.  We set this to true at first, because any
+  // actions that shouldn't happen until "later" should generally also
+  // not happen before the first write call.
+  this.sync = true;
 
-  return BufferList;
-}();
+  // a flag to know if we're processing previously buffered items, which
+  // may call the _write() callback in the same tick, so that we don't
+  // end up in an overlapped onwrite situation.
+  this.bufferProcessing = false;
 
-if (util && util.inspect && util.inspect.custom) {
-  module.exports.prototype[util.inspect.custom] = function () {
-    var obj = util.inspect({ length: this.length });
-    return this.constructor.name + ' ' + obj;
+  // the callback that's passed to _write(chunk,cb)
+  this.onwrite = function (er) {
+    onwrite(stream, er);
   };
-}
-},{"safe-buffer":28,"util":43}],25:[function(require,module,exports){
-'use strict';
 
-/*<replacement>*/
+  // the callback that the user supplies to write(chunk,encoding,cb)
+  this.writecb = null;
 
-var pna = require('process-nextick-args');
-/*</replacement>*/
+  // the amount that is being written when _write is called.
+  this.writelen = 0;
 
-// undocumented cb() API, needed for core, not for public API
-function destroy(err, cb) {
-  var _this = this;
+  this.bufferedRequest = null;
+  this.lastBufferedRequest = null;
 
-  var readableDestroyed = this._readableState && this._readableState.destroyed;
-  var writableDestroyed = this._writableState && this._writableState.destroyed;
+  // number of pending user-supplied write callbacks
+  // this must be 0 before 'finish' can be emitted
+  this.pendingcb = 0;
 
-  if (readableDestroyed || writableDestroyed) {
-    if (cb) {
-      cb(err);
-    } else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
-      pna.nextTick(emitErrorNT, this, err);
-    }
-    return this;
-  }
+  // emit prefinish if the only thing we're waiting for is _write cbs
+  // This is relevant for synchronous Transform streams
+  this.prefinished = false;
 
-  // we set destroyed to true before firing error callbacks in order
-  // to make it re-entrance safe in case destroy() is called within callbacks
+  // True if the error was already emitted and should not be thrown again
+  this.errorEmitted = false;
 
-  if (this._readableState) {
-    this._readableState.destroyed = true;
-  }
+  // count buffered requests
+  this.bufferedRequestCount = 0;
 
-  // if this is a duplex stream mark the writable part as destroyed as well
-  if (this._writableState) {
-    this._writableState.destroyed = true;
+  // allocate the first CorkedRequest, there is always
+  // one allocated and free to use, and we maintain at most two
+  this.corkedRequestsFree = new CorkedRequest(this);
+}
+
+WritableState.prototype.getBuffer = function getBuffer() {
+  var current = this.bufferedRequest;
+  var out = [];
+  while (current) {
+    out.push(current);
+    current = current.next;
   }
+  return out;
+};
 
-  this._destroy(err || null, function (err) {
-    if (!cb && err) {
-      pna.nextTick(emitErrorNT, _this, err);
-      if (_this._writableState) {
-        _this._writableState.errorEmitted = true;
-      }
-    } else if (cb) {
-      cb(err);
+(function () {
+  try {
+    Object.defineProperty(WritableState.prototype, 'buffer', {
+      get: internalUtil.deprecate(function () {
+        return this.getBuffer();
+      }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
+    });
+  } catch (_) {}
+})();
+
+// Test _writableState for inheritance to account for Duplex streams,
+// whose prototype chain only points to Readable.
+var realHasInstance;
+if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
+  realHasInstance = Function.prototype[Symbol.hasInstance];
+  Object.defineProperty(Writable, Symbol.hasInstance, {
+    value: function (object) {
+      if (realHasInstance.call(this, object)) return true;
+      if (this !== Writable) return false;
+
+      return object && object._writableState instanceof WritableState;
     }
   });
-
-  return this;
+} else {
+  realHasInstance = function (object) {
+    return object instanceof this;
+  };
 }
 
-function undestroy() {
-  if (this._readableState) {
-    this._readableState.destroyed = false;
-    this._readableState.reading = false;
-    this._readableState.ended = false;
-    this._readableState.endEmitted = false;
-  }
+function Writable(options) {
+  Duplex = Duplex || require('./_stream_duplex');
 
-  if (this._writableState) {
-    this._writableState.destroyed = false;
-    this._writableState.ended = false;
-    this._writableState.ending = false;
-    this._writableState.finished = false;
-    this._writableState.errorEmitted = false;
+  // Writable ctor is applied to Duplexes, too.
+  // `realHasInstance` is necessary because using plain `instanceof`
+  // would return false, as no `_writableState` property is attached.
+
+  // Trying to use the custom `instanceof` for Writable here will also break the
+  // Node.js LazyTransform implementation, which has a non-trivial getter for
+  // `_writableState` that would lead to infinite recursion.
+  if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
+    return new Writable(options);
   }
-}
 
-function emitErrorNT(self, err) {
-  self.emit('error', err);
-}
+  this._writableState = new WritableState(options, this);
 
-module.exports = {
-  destroy: destroy,
-  undestroy: undestroy
-};
-},{"process-nextick-args":18}],26:[function(require,module,exports){
-module.exports = require('events').EventEmitter;
+  // legacy.
+  this.writable = true;
 
-},{"events":45}],27:[function(require,module,exports){
-exports = module.exports = require('./lib/_stream_readable.js');
-exports.Stream = exports;
-exports.Readable = exports;
-exports.Writable = require('./lib/_stream_writable.js');
-exports.Duplex = require('./lib/_stream_duplex.js');
-exports.Transform = require('./lib/_stream_transform.js');
-exports.PassThrough = require('./lib/_stream_passthrough.js');
+  if (options) {
+    if (typeof options.write === 'function') this._write = options.write;
 
-},{"./lib/_stream_duplex.js":19,"./lib/_stream_passthrough.js":20,"./lib/_stream_readable.js":21,"./lib/_stream_transform.js":22,"./lib/_stream_writable.js":23}],28:[function(require,module,exports){
-/* eslint-disable node/no-deprecated-api */
-var buffer = require('buffer')
-var Buffer = buffer.Buffer
+    if (typeof options.writev === 'function') this._writev = options.writev;
 
-// alternative to using Object.keys for old browsers
-function copyProps (src, dst) {
-  for (var key in src) {
-    dst[key] = src[key]
+    if (typeof options.destroy === 'function') this._destroy = options.destroy;
+
+    if (typeof options.final === 'function') this._final = options.final;
   }
-}
-if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
-  module.exports = buffer
-} else {
-  // Copy properties from require('buffer')
-  copyProps(buffer, exports)
-  exports.Buffer = SafeBuffer
-}
 
-function SafeBuffer (arg, encodingOrOffset, length) {
-  return Buffer(arg, encodingOrOffset, length)
+  Stream.call(this);
 }
 
-// Copy static methods from Buffer
-copyProps(Buffer, SafeBuffer)
+// Otherwise people can pipe Writable streams, which is just wrong.
+Writable.prototype.pipe = function () {
+  this.emit('error', new Error('Cannot pipe, not readable'));
+};
 
-SafeBuffer.from = function (arg, encodingOrOffset, length) {
-  if (typeof arg === 'number') {
-    throw new TypeError('Argument must not be a number')
-  }
-  return Buffer(arg, encodingOrOffset, length)
+function writeAfterEnd(stream, cb) {
+  var er = new Error('write after end');
+  // TODO: defer error events consistently everywhere, not just the cb
+  stream.emit('error', er);
+  pna.nextTick(cb, er);
 }
 
-SafeBuffer.alloc = function (size, fill, encoding) {
-  if (typeof size !== 'number') {
-    throw new TypeError('Argument must be a number')
-  }
-  var buf = Buffer(size)
-  if (fill !== undefined) {
-    if (typeof encoding === 'string') {
-      buf.fill(fill, encoding)
-    } else {
-      buf.fill(fill)
-    }
-  } else {
-    buf.fill(0)
-  }
-  return buf
-}
+// Checks that a user-supplied chunk is valid, especially for the particular
+// mode the stream is in. Currently this means that `null` is never accepted
+// and undefined/non-string values are only allowed in object mode.
+function validChunk(stream, state, chunk, cb) {
+  var valid = true;
+  var er = false;
 
-SafeBuffer.allocUnsafe = function (size) {
-  if (typeof size !== 'number') {
-    throw new TypeError('Argument must be a number')
+  if (chunk === null) {
+    er = new TypeError('May not write null values to stream');
+  } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
+    er = new TypeError('Invalid non-string/buffer chunk');
   }
-  return Buffer(size)
-}
-
-SafeBuffer.allocUnsafeSlow = function (size) {
-  if (typeof size !== 'number') {
-    throw new TypeError('Argument must be a number')
+  if (er) {
+    stream.emit('error', er);
+    pna.nextTick(cb, er);
+    valid = false;
   }
-  return buffer.SlowBuffer(size)
+  return valid;
 }
 
-},{"buffer":44}],29:[function(require,module,exports){
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-'use strict';
-
-/*<replacement>*/
-
-var Buffer = require('safe-buffer').Buffer;
-/*</replacement>*/
+Writable.prototype.write = function (chunk, encoding, cb) {
+  var state = this._writableState;
+  var ret = false;
+  var isBuf = !state.objectMode && _isUint8Array(chunk);
 
-var isEncoding = Buffer.isEncoding || function (encoding) {
-  encoding = '' + encoding;
-  switch (encoding && encoding.toLowerCase()) {
-    case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
-      return true;
-    default:
-      return false;
+  if (isBuf && !Buffer.isBuffer(chunk)) {
+    chunk = _uint8ArrayToBuffer(chunk);
   }
-};
 
-function _normalizeEncoding(enc) {
-  if (!enc) return 'utf8';
-  var retried;
-  while (true) {
-    switch (enc) {
-      case 'utf8':
-      case 'utf-8':
-        return 'utf8';
-      case 'ucs2':
-      case 'ucs-2':
-      case 'utf16le':
-      case 'utf-16le':
-        return 'utf16le';
-      case 'latin1':
-      case 'binary':
-        return 'latin1';
-      case 'base64':
-      case 'ascii':
-      case 'hex':
-        return enc;
-      default:
-        if (retried) return; // undefined
-        enc = ('' + enc).toLowerCase();
-        retried = true;
-    }
+  if (typeof encoding === 'function') {
+    cb = encoding;
+    encoding = null;
   }
-};
-
-// Do not cache `Buffer.isEncoding` when checking encoding names as some
-// modules monkey-patch it to support additional encodings
-function normalizeEncoding(enc) {
-  var nenc = _normalizeEncoding(enc);
-  if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
-  return nenc || enc;
-}
 
-// StringDecoder provides an interface for efficiently splitting a series of
-// buffers into a series of JS strings without breaking apart multi-byte
-// characters.
-exports.StringDecoder = StringDecoder;
-function StringDecoder(encoding) {
-  this.encoding = normalizeEncoding(encoding);
-  var nb;
-  switch (this.encoding) {
-    case 'utf16le':
-      this.text = utf16Text;
-      this.end = utf16End;
-      nb = 4;
-      break;
-    case 'utf8':
-      this.fillLast = utf8FillLast;
-      nb = 4;
-      break;
-    case 'base64':
-      this.text = base64Text;
-      this.end = base64End;
-      nb = 3;
-      break;
-    default:
-      this.write = simpleWrite;
-      this.end = simpleEnd;
-      return;
-  }
-  this.lastNeed = 0;
-  this.lastTotal = 0;
-  this.lastChar = Buffer.allocUnsafe(nb);
-}
+  if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
 
-StringDecoder.prototype.write = function (buf) {
-  if (buf.length === 0) return '';
-  var r;
-  var i;
-  if (this.lastNeed) {
-    r = this.fillLast(buf);
-    if (r === undefined) return '';
-    i = this.lastNeed;
-    this.lastNeed = 0;
-  } else {
-    i = 0;
+  if (typeof cb !== 'function') cb = nop;
+
+  if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
+    state.pendingcb++;
+    ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
   }
-  if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
-  return r || '';
+
+  return ret;
 };
 
-StringDecoder.prototype.end = utf8End;
+Writable.prototype.cork = function () {
+  var state = this._writableState;
 
-// Returns only complete characters in a Buffer
-StringDecoder.prototype.text = utf8Text;
+  state.corked++;
+};
 
-// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
-StringDecoder.prototype.fillLast = function (buf) {
-  if (this.lastNeed <= buf.length) {
-    buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
-    return this.lastChar.toString(this.encoding, 0, this.lastTotal);
+Writable.prototype.uncork = function () {
+  var state = this._writableState;
+
+  if (state.corked) {
+    state.corked--;
+
+    if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
   }
-  buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
-  this.lastNeed -= buf.length;
 };
 
-// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
-// continuation byte. If an invalid byte is detected, -2 is returned.
-function utf8CheckByte(byte) {
-  if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
-  return byte >> 6 === 0x02 ? -1 : -2;
-}
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+  // node::ParseEncoding() requires lower case.
+  if (typeof encoding === 'string') encoding = encoding.toLowerCase();
+  if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
+  this._writableState.defaultEncoding = encoding;
+  return this;
+};
 
-// Checks at most 3 bytes at the end of a Buffer in order to detect an
-// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
-// needed to complete the UTF-8 character (if applicable) are returned.
-function utf8CheckIncomplete(self, buf, i) {
-  var j = buf.length - 1;
-  if (j < i) return 0;
-  var nb = utf8CheckByte(buf[j]);
-  if (nb >= 0) {
-    if (nb > 0) self.lastNeed = nb - 1;
-    return nb;
-  }
-  if (--j < i || nb === -2) return 0;
-  nb = utf8CheckByte(buf[j]);
-  if (nb >= 0) {
-    if (nb > 0) self.lastNeed = nb - 2;
-    return nb;
-  }
-  if (--j < i || nb === -2) return 0;
-  nb = utf8CheckByte(buf[j]);
-  if (nb >= 0) {
-    if (nb > 0) {
-      if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
-    }
-    return nb;
+function decodeChunk(state, chunk, encoding) {
+  if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
+    chunk = Buffer.from(chunk, encoding);
   }
-  return 0;
+  return chunk;
 }
 
-// Validates as many continuation bytes for a multi-byte UTF-8 character as
-// needed or are available. If we see a non-continuation byte where we expect
-// one, we "replace" the validated continuation bytes we've seen so far with
-// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
-// behavior. The continuation byte check is included three times in the case
-// where all of the continuation bytes for a character exist in the same buffer.
-// It is also done this way as a slight performance increase instead of using a
-// loop.
-function utf8CheckExtraBytes(self, buf, p) {
-  if ((buf[0] & 0xC0) !== 0x80) {
-    self.lastNeed = 0;
-    return '\ufffd';
+Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
+  // making it explicit this property is not enumerable
+  // because otherwise some prototype manipulation in
+  // userland will fail
+  enumerable: false,
+  get: function () {
+    return this._writableState.highWaterMark;
   }
-  if (self.lastNeed > 1 && buf.length > 1) {
-    if ((buf[1] & 0xC0) !== 0x80) {
-      self.lastNeed = 1;
-      return '\ufffd';
+});
+
+// if we're already writing something, then just put this
+// in the queue, and wait our turn.  Otherwise, call _write
+// If we return false, then we need a drain event, so set that flag.
+function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
+  if (!isBuf) {
+    var newChunk = decodeChunk(state, chunk, encoding);
+    if (chunk !== newChunk) {
+      isBuf = true;
+      encoding = 'buffer';
+      chunk = newChunk;
     }
-    if (self.lastNeed > 2 && buf.length > 2) {
-      if ((buf[2] & 0xC0) !== 0x80) {
-        self.lastNeed = 2;
-        return '\ufffd';
-      }
+  }
+  var len = state.objectMode ? 1 : chunk.length;
+
+  state.length += len;
+
+  var ret = state.length < state.highWaterMark;
+  // we must ensure that previous needDrain will not be reset to false.
+  if (!ret) state.needDrain = true;
+
+  if (state.writing || state.corked) {
+    var last = state.lastBufferedRequest;
+    state.lastBufferedRequest = {
+      chunk: chunk,
+      encoding: encoding,
+      isBuf: isBuf,
+      callback: cb,
+      next: null
+    };
+    if (last) {
+      last.next = state.lastBufferedRequest;
+    } else {
+      state.bufferedRequest = state.lastBufferedRequest;
     }
+    state.bufferedRequestCount += 1;
+  } else {
+    doWrite(stream, state, false, len, chunk, encoding, cb);
   }
+
+  return ret;
 }
 
-// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
-function utf8FillLast(buf) {
-  var p = this.lastTotal - this.lastNeed;
-  var r = utf8CheckExtraBytes(this, buf, p);
-  if (r !== undefined) return r;
-  if (this.lastNeed <= buf.length) {
-    buf.copy(this.lastChar, p, 0, this.lastNeed);
-    return this.lastChar.toString(this.encoding, 0, this.lastTotal);
-  }
-  buf.copy(this.lastChar, p, 0, buf.length);
-  this.lastNeed -= buf.length;
+function doWrite(stream, state, writev, len, chunk, encoding, cb) {
+  state.writelen = len;
+  state.writecb = cb;
+  state.writing = true;
+  state.sync = true;
+  if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
+  state.sync = false;
 }
 
-// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
-// partial character, the character's bytes are buffered until the required
-// number of bytes are available.
-function utf8Text(buf, i) {
-  var total = utf8CheckIncomplete(this, buf, i);
-  if (!this.lastNeed) return buf.toString('utf8', i);
-  this.lastTotal = total;
-  var end = buf.length - (total - this.lastNeed);
-  buf.copy(this.lastChar, 0, end);
-  return buf.toString('utf8', i, end);
+function onwriteError(stream, state, sync, er, cb) {
+  --state.pendingcb;
+
+  if (sync) {
+    // defer the callback if we are being called synchronously
+    // to avoid piling up things on the stack
+    pna.nextTick(cb, er);
+    // this can emit finish, and it will always happen
+    // after error
+    pna.nextTick(finishMaybe, stream, state);
+    stream._writableState.errorEmitted = true;
+    stream.emit('error', er);
+  } else {
+    // the caller expect this to happen before if
+    // it is async
+    cb(er);
+    stream._writableState.errorEmitted = true;
+    stream.emit('error', er);
+    // this can emit finish, but finish must
+    // always follow error
+    finishMaybe(stream, state);
+  }
 }
 
-// For UTF-8, a replacement character is added when ending on a partial
-// character.
-function utf8End(buf) {
-  var r = buf && buf.length ? this.write(buf) : '';
-  if (this.lastNeed) return r + '\ufffd';
-  return r;
+function onwriteStateUpdate(state) {
+  state.writing = false;
+  state.writecb = null;
+  state.length -= state.writelen;
+  state.writelen = 0;
 }
 
-// UTF-16LE typically needs two bytes per character, but even if we have an even
-// number of bytes available, we need to check if we end on a leading/high
-// surrogate. In that case, we need to wait for the next two bytes in order to
-// decode the last character properly.
-function utf16Text(buf, i) {
-  if ((buf.length - i) % 2 === 0) {
-    var r = buf.toString('utf16le', i);
-    if (r) {
-      var c = r.charCodeAt(r.length - 1);
-      if (c >= 0xD800 && c <= 0xDBFF) {
-        this.lastNeed = 2;
-        this.lastTotal = 4;
-        this.lastChar[0] = buf[buf.length - 2];
-        this.lastChar[1] = buf[buf.length - 1];
-        return r.slice(0, -1);
-      }
+function onwrite(stream, er) {
+  var state = stream._writableState;
+  var sync = state.sync;
+  var cb = state.writecb;
+
+  onwriteStateUpdate(state);
+
+  if (er) onwriteError(stream, state, sync, er, cb);else {
+    // Check if we're actually ready to finish, but don't emit yet
+    var finished = needFinish(state);
+
+    if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
+      clearBuffer(stream, state);
+    }
+
+    if (sync) {
+      /*<replacement>*/
+      asyncWrite(afterWrite, stream, state, finished, cb);
+      /*</replacement>*/
+    } else {
+      afterWrite(stream, state, finished, cb);
     }
-    return r;
   }
-  this.lastNeed = 1;
-  this.lastTotal = 2;
-  this.lastChar[0] = buf[buf.length - 1];
-  return buf.toString('utf16le', i, buf.length - 1);
 }
 
-// For UTF-16LE we do not explicitly append special replacement characters if we
-// end on a partial character, we simply let v8 handle that.
-function utf16End(buf) {
-  var r = buf && buf.length ? this.write(buf) : '';
-  if (this.lastNeed) {
-    var end = this.lastTotal - this.lastNeed;
-    return r + this.lastChar.toString('utf16le', 0, end);
+function afterWrite(stream, state, finished, cb) {
+  if (!finished) onwriteDrain(stream, state);
+  state.pendingcb--;
+  cb();
+  finishMaybe(stream, state);
+}
+
+// Must force callback to be called on nextTick, so that we don't
+// emit 'drain' before the write() consumer gets the 'false' return
+// value, and has a chance to attach a 'drain' listener.
+function onwriteDrain(stream, state) {
+  if (state.length === 0 && state.needDrain) {
+    state.needDrain = false;
+    stream.emit('drain');
   }
-  return r;
 }
 
-function base64Text(buf, i) {
-  var n = (buf.length - i) % 3;
-  if (n === 0) return buf.toString('base64', i);
-  this.lastNeed = 3 - n;
-  this.lastTotal = 3;
-  if (n === 1) {
-    this.lastChar[0] = buf[buf.length - 1];
+// if there's something in the buffer waiting, then process it
+function clearBuffer(stream, state) {
+  state.bufferProcessing = true;
+  var entry = state.bufferedRequest;
+
+  if (stream._writev && entry && entry.next) {
+    // Fast case, write everything using _writev()
+    var l = state.bufferedRequestCount;
+    var buffer = new Array(l);
+    var holder = state.corkedRequestsFree;
+    holder.entry = entry;
+
+    var count = 0;
+    var allBuffers = true;
+    while (entry) {
+      buffer[count] = entry;
+      if (!entry.isBuf) allBuffers = false;
+      entry = entry.next;
+      count += 1;
+    }
+    buffer.allBuffers = allBuffers;
+
+    doWrite(stream, state, true, state.length, buffer, '', holder.finish);
+
+    // doWrite is almost always async, defer these to save a bit of time
+    // as the hot path ends with doWrite
+    state.pendingcb++;
+    state.lastBufferedRequest = null;
+    if (holder.next) {
+      state.corkedRequestsFree = holder.next;
+      holder.next = null;
+    } else {
+      state.corkedRequestsFree = new CorkedRequest(state);
+    }
+    state.bufferedRequestCount = 0;
   } else {
-    this.lastChar[0] = buf[buf.length - 2];
-    this.lastChar[1] = buf[buf.length - 1];
+    // Slow case, write chunks one-by-one
+    while (entry) {
+      var chunk = entry.chunk;
+      var encoding = entry.encoding;
+      var cb = entry.callback;
+      var len = state.objectMode ? 1 : chunk.length;
+
+      doWrite(stream, state, false, len, chunk, encoding, cb);
+      entry = entry.next;
+      state.bufferedRequestCount--;
+      // if we didn't call the onwrite immediately, then
+      // it means that we need to wait until it does.
+      // also, that means that the chunk and cb are currently
+      // being processed, so move the buffer counter past them.
+      if (state.writing) {
+        break;
+      }
+    }
+
+    if (entry === null) state.lastBufferedRequest = null;
   }
-  return buf.toString('base64', i, buf.length - n);
-}
 
-function base64End(buf) {
-  var r = buf && buf.length ? this.write(buf) : '';
-  if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
-  return r;
+  state.bufferedRequest = entry;
+  state.bufferProcessing = false;
 }
 
-// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
-function simpleWrite(buf) {
-  return buf.toString(this.encoding);
-}
+Writable.prototype._write = function (chunk, encoding, cb) {
+  cb(new Error('_write() is not implemented'));
+};
 
-function simpleEnd(buf) {
-  return buf && buf.length ? this.write(buf) : '';
-}
-},{"safe-buffer":28}],30:[function(require,module,exports){
-(function (global){
+Writable.prototype._writev = null;
 
-/**
- * Module exports.
- */
+Writable.prototype.end = function (chunk, encoding, cb) {
+  var state = this._writableState;
 
-module.exports = deprecate;
+  if (typeof chunk === 'function') {
+    cb = chunk;
+    chunk = null;
+    encoding = null;
+  } else if (typeof encoding === 'function') {
+    cb = encoding;
+    encoding = null;
+  }
 
-/**
- * Mark that a method should not be used.
- * Returns a modified function which warns once by default.
- *
- * If `localStorage.noDeprecation = true` is set, then it is a no-op.
- *
- * If `localStorage.throwDeprecation = true` is set, then deprecated functions
- * will throw an Error when invoked.
- *
- * If `localStorage.traceDeprecation = true` is set, then deprecated functions
- * will invoke `console.trace()` instead of `console.error()`.
- *
- * @param {Function} fn - the function to deprecate
- * @param {String} msg - the string to print to the console when `fn` is invoked
- * @returns {Function} a new "deprecated" version of `fn`
- * @api public
- */
+  if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
 
-function deprecate (fn, msg) {
-  if (config('noDeprecation')) {
-    return fn;
+  // .end() fully uncorks
+  if (state.corked) {
+    state.corked = 1;
+    this.uncork();
   }
 
-  var warned = false;
-  function deprecated() {
-    if (!warned) {
-      if (config('throwDeprecation')) {
-        throw new Error(msg);
-      } else if (config('traceDeprecation')) {
-        console.trace(msg);
-      } else {
-        console.warn(msg);
-      }
-      warned = true;
+  // ignore unnecessary end() calls.
+  if (!state.ending && !state.finished) endWritable(this, state, cb);
+};
+
+function needFinish(state) {
+  return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
+}
+function callFinal(stream, state) {
+  stream._final(function (err) {
+    state.pendingcb--;
+    if (err) {
+      stream.emit('error', err);
+    }
+    state.prefinished = true;
+    stream.emit('prefinish');
+    finishMaybe(stream, state);
+  });
+}
+function prefinish(stream, state) {
+  if (!state.prefinished && !state.finalCalled) {
+    if (typeof stream._final === 'function') {
+      state.pendingcb++;
+      state.finalCalled = true;
+      pna.nextTick(callFinal, stream, state);
+    } else {
+      state.prefinished = true;
+      stream.emit('prefinish');
     }
-    return fn.apply(this, arguments);
   }
+}
 
-  return deprecated;
+function finishMaybe(stream, state) {
+  var need = needFinish(state);
+  if (need) {
+    prefinish(stream, state);
+    if (state.pendingcb === 0) {
+      state.finished = true;
+      stream.emit('finish');
+    }
+  }
+  return need;
 }
 
-/**
- * Checks `localStorage` for boolean values for the given `name`.
- *
- * @param {String} name
- * @returns {Boolean}
- * @api private
- */
+function endWritable(stream, state, cb) {
+  state.ending = true;
+  finishMaybe(stream, state);
+  if (cb) {
+    if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
+  }
+  state.ended = true;
+  stream.writable = false;
+}
 
-function config (name) {
-  // accessing global.localStorage can trigger a DOMException in sandboxed iframes
-  try {
-    if (!global.localStorage) return false;
-  } catch (_) {
-    return false;
+function onCorkedFinish(corkReq, state, err) {
+  var entry = corkReq.entry;
+  corkReq.entry = null;
+  while (entry) {
+    var cb = entry.callback;
+    state.pendingcb--;
+    cb(err);
+    entry = entry.next;
+  }
+  if (state.corkedRequestsFree) {
+    state.corkedRequestsFree.next = corkReq;
+  } else {
+    state.corkedRequestsFree = corkReq;
   }
-  var val = global.localStorage[name];
-  if (null == val) return false;
-  return String(val).toLowerCase() === 'true';
 }
 
-}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{}],31:[function(require,module,exports){
+Object.defineProperty(Writable.prototype, 'destroyed', {
+  get: function () {
+    if (this._writableState === undefined) {
+      return false;
+    }
+    return this._writableState.destroyed;
+  },
+  set: function (value) {
+    // we ignore the value if the stream
+    // has not been initialized yet
+    if (!this._writableState) {
+      return;
+    }
+
+    // backward compatibility, the user is explicitly
+    // managing destroyed
+    this._writableState.destroyed = value;
+  }
+});
+
+Writable.prototype.destroy = destroyImpl.destroy;
+Writable.prototype._undestroy = destroyImpl.undestroy;
+Writable.prototype._destroy = function (err, cb) {
+  this.end();
+  cb(err);
+};
+}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {},require("timers").setImmediate)
+},{"./_stream_duplex":40,"./internal/streams/destroy":46,"./internal/streams/stream":47,"_process":70,"core-util-is":32,"inherits":33,"process-nextick-args":39,"safe-buffer":49,"timers":71,"util-deprecate":51}],45:[function(require,module,exports){
+arguments[4][15][0].apply(exports,arguments)
+},{"dup":15,"safe-buffer":49,"util":64}],46:[function(require,module,exports){
+arguments[4][16][0].apply(exports,arguments)
+},{"dup":16,"process-nextick-args":39}],47:[function(require,module,exports){
+arguments[4][17][0].apply(exports,arguments)
+},{"dup":17,"events":66}],48:[function(require,module,exports){
+arguments[4][18][0].apply(exports,arguments)
+},{"./lib/_stream_duplex.js":40,"./lib/_stream_passthrough.js":41,"./lib/_stream_readable.js":42,"./lib/_stream_transform.js":43,"./lib/_stream_writable.js":44,"dup":18}],49:[function(require,module,exports){
+arguments[4][19][0].apply(exports,arguments)
+},{"buffer":65,"dup":19}],50:[function(require,module,exports){
+arguments[4][20][0].apply(exports,arguments)
+},{"dup":20,"safe-buffer":49}],51:[function(require,module,exports){
+arguments[4][21][0].apply(exports,arguments)
+},{"dup":21}],52:[function(require,module,exports){
 var v1 = require('./v1');
 var v4 = require('./v4');
 
@@ -5811,7 +8187,7 @@ uuid.v4 = v4;
 
 module.exports = uuid;
 
-},{"./v1":34,"./v4":35}],32:[function(require,module,exports){
+},{"./v1":55,"./v4":56}],53:[function(require,module,exports){
 /**
  * Convert array of 16 byte values to UUID string format of the form:
  * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
@@ -5839,7 +8215,7 @@ function bytesToUuid(buf, offset) {
 
 module.exports = bytesToUuid;
 
-},{}],33:[function(require,module,exports){
+},{}],54:[function(require,module,exports){
 // Unique ID creation requires a high quality random # generator.  In the
 // browser this is a little complicated due to unknown quality of Math.random()
 // and inconsistent support for the `crypto` API.  We do the best we can via
@@ -5875,7 +8251,7 @@ if (getRandomValues) {
   };
 }
 
-},{}],34:[function(require,module,exports){
+},{}],55:[function(require,module,exports){
 var rng = require('./lib/rng');
 var bytesToUuid = require('./lib/bytesToUuid');
 
@@ -5986,7 +8362,7 @@ function v1(options, buf, offset) {
 
 module.exports = v1;
 
-},{"./lib/bytesToUuid":32,"./lib/rng":33}],35:[function(require,module,exports){
+},{"./lib/bytesToUuid":53,"./lib/rng":54}],56:[function(require,module,exports){
 var rng = require('./lib/rng');
 var bytesToUuid = require('./lib/bytesToUuid');
 
@@ -6017,7 +8393,7 @@ function v4(options, buf, offset) {
 
 module.exports = v4;
 
-},{"./lib/bytesToUuid":32,"./lib/rng":33}],36:[function(require,module,exports){
+},{"./lib/bytesToUuid":53,"./lib/rng":54}],57:[function(require,module,exports){
 (function (Buffer){
 const msgpack = require('msgpack5')()
   , encode  = msgpack.encode
@@ -6306,7 +8682,7 @@ Peer.prototype.getUuid = function() {
 module.exports = Peer;
 
 }).call(this,require("buffer").Buffer)
-},{"./utils/uuidParser":37,"buffer":44,"msgpack5":14,"uuid":31}],37:[function(require,module,exports){
+},{"./utils/uuidParser":58,"buffer":65,"msgpack5":35,"uuid":52}],58:[function(require,module,exports){
 // Maps for number <-> hex string conversion
 var _byteToHex = [];
 var _hexToByte = {};
@@ -6361,7 +8737,7 @@ module.exports = {
   parse: parse,
   unparse: unparse
 };
-},{}],38:[function(require,module,exports){
+},{}],59:[function(require,module,exports){
 (function (global){
 'use strict';
 
@@ -6871,16 +9247,16 @@ var objectKeys = Object.keys || function (obj) {
 };
 
 }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"object-assign":48,"util/":41}],39:[function(require,module,exports){
-arguments[4][12][0].apply(exports,arguments)
-},{"dup":12}],40:[function(require,module,exports){
+},{"object-assign":69,"util/":62}],60:[function(require,module,exports){
+arguments[4][3][0].apply(exports,arguments)
+},{"dup":3}],61:[function(require,module,exports){
 module.exports = function isBuffer(arg) {
   return arg && typeof arg === 'object'
     && typeof arg.copy === 'function'
     && typeof arg.fill === 'function'
     && typeof arg.readUInt8 === 'function';
 }
-},{}],41:[function(require,module,exports){
+},{}],62:[function(require,module,exports){
 (function (process,global){
 // Copyright Joyent, Inc. and other Node contributors.
 //
@@ -7470,7 +9846,7 @@ function hasOwnProperty(obj, prop) {
 }
 
 }).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
-},{"./support/isBuffer":40,"_process":49,"inherits":39}],42:[function(require,module,exports){
+},{"./support/isBuffer":61,"_process":70,"inherits":60}],63:[function(require,module,exports){
 'use strict'
 
 exports.byteLength = byteLength
@@ -7624,9 +10000,9 @@ function fromByteArray (uint8) {
   return parts.join('')
 }
 
-},{}],43:[function(require,module,exports){
+},{}],64:[function(require,module,exports){
 
-},{}],44:[function(require,module,exports){
+},{}],65:[function(require,module,exports){
 (function (Buffer){
 /*!
  * The buffer module from node.js, for the browser.
@@ -7640,10 +10016,6 @@ function fromByteArray (uint8) {
 
 var base64 = require('base64-js')
 var ieee754 = require('ieee754')
-var customInspectSymbol =
-  (typeof Symbol === 'function' && typeof Symbol.for === 'function')
-    ? Symbol.for('nodejs.util.inspect.custom')
-    : null
 
 exports.Buffer = Buffer
 exports.SlowBuffer = SlowBuffer
@@ -7680,9 +10052,7 @@ function typedArraySupport () {
   // Can typed array instances can be augmented?
   try {
     var arr = new Uint8Array(1)
-    var proto = { foo: function () { return 42 } }
-    Object.setPrototypeOf(proto, Uint8Array.prototype)
-    Object.setPrototypeOf(arr, proto)
+    arr.__proto__ = { __proto__: Uint8Array.prototype, foo: function () { return 42 } }
     return arr.foo() === 42
   } catch (e) {
     return false
@@ -7711,7 +10081,7 @@ function createBuffer (length) {
   }
   // Return an augmented `Uint8Array` instance
   var buf = new Uint8Array(length)
-  Object.setPrototypeOf(buf, Buffer.prototype)
+  buf.__proto__ = Buffer.prototype
   return buf
 }
 
@@ -7761,7 +10131,7 @@ function from (value, encodingOrOffset, length) {
   }
 
   if (value == null) {
-    throw new TypeError(
+    throw TypeError(
       'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
       'or Array-like Object. Received type ' + (typeof value)
     )
@@ -7813,8 +10183,8 @@ Buffer.from = function (value, encodingOrOffset, length) {
 
 // Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:
 // https://github.com/feross/buffer/pull/148
-Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype)
-Object.setPrototypeOf(Buffer, Uint8Array)
+Buffer.prototype.__proto__ = Uint8Array.prototype
+Buffer.__proto__ = Uint8Array
 
 function assertSize (size) {
   if (typeof size !== 'number') {
@@ -7918,8 +10288,7 @@ function fromArrayBuffer (array, byteOffset, length) {
   }
 
   // Return an augmented `Uint8Array` instance
-  Object.setPrototypeOf(buf, Buffer.prototype)
-
+  buf.__proto__ = Buffer.prototype
   return buf
 }
 
@@ -8241,9 +10610,6 @@ Buffer.prototype.inspect = function inspect () {
   if (this.length > max) str += ' ... '
   return '<Buffer ' + str + '>'
 }
-if (customInspectSymbol) {
-  Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect
-}
 
 Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
   if (isInstance(target, Uint8Array)) {
@@ -8369,7 +10735,7 @@ function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
         return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
       }
     }
-    return arrayIndexOf(buffer, [val], byteOffset, encoding, dir)
+    return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)
   }
 
   throw new TypeError('val must be string, number or Buffer')
@@ -8698,7 +11064,7 @@ function hexSlice (buf, start, end) {
 
   var out = ''
   for (var i = start; i < end; ++i) {
-    out += hexSliceLookupTable[buf[i]]
+    out += toHex(buf[i])
   }
   return out
 }
@@ -8735,8 +11101,7 @@ Buffer.prototype.slice = function slice (start, end) {
 
   var newBuf = this.subarray(start, end)
   // Return an augmented `Uint8Array` instance
-  Object.setPrototypeOf(newBuf, Buffer.prototype)
-
+  newBuf.__proto__ = Buffer.prototype
   return newBuf
 }
 
@@ -9225,8 +11590,6 @@ Buffer.prototype.fill = function fill (val, start, end, encoding) {
     }
   } else if (typeof val === 'number') {
     val = val & 255
-  } else if (typeof val === 'boolean') {
-    val = Number(val)
   }
 
   // Invalid ranges are not set to a default, so can range check early.
@@ -9284,6 +11647,11 @@ function base64clean (str) {
   return str
 }
 
+function toHex (n) {
+  if (n < 16) return '0' + n.toString(16)
+  return n.toString(16)
+}
+
 function utf8ToBytes (string, units) {
   units = units || Infinity
   var codePoint
@@ -9414,22 +11782,8 @@ function numberIsNaN (obj) {
   return obj !== obj // eslint-disable-line no-self-compare
 }
 
-// Create lookup table for `toString('hex')`
-// See: https://github.com/feross/buffer/issues/219
-var hexSliceLookupTable = (function () {
-  var alphabet = '0123456789abcdef'
-  var table = new Array(256)
-  for (var i = 0; i < 16; ++i) {
-    var i16 = i * 16
-    for (var j = 0; j < 16; ++j) {
-      table[i16 + j] = alphabet[i] + alphabet[j]
-    }
-  }
-  return table
-})()
-
 }).call(this,require("buffer").Buffer)
-},{"base64-js":42,"buffer":44,"ieee754":46}],45:[function(require,module,exports){
+},{"base64-js":63,"buffer":65,"ieee754":67}],66:[function(require,module,exports){
 // Copyright Joyent, Inc. and other Node contributors.
 //
 // Permission is hereby granted, free of charge, to any person obtaining a
@@ -9954,7 +12308,7 @@ function functionBindPolyfill(context) {
   };
 }
 
-},{}],46:[function(require,module,exports){
+},{}],67:[function(require,module,exports){
 exports.read = function (buffer, offset, isLE, mLen, nBytes) {
   var e, m
   var eLen = (nBytes * 8) - mLen - 1
@@ -10040,7 +12394,7 @@ exports.write = function (buffer, value, offset, isLE, mLen, nBytes) {
   buffer[offset + i - d] |= s * 128
 }
 
-},{}],47:[function(require,module,exports){
+},{}],68:[function(require,module,exports){
 /*!
  * Determine if an object is a Buffer
  *
@@ -10063,7 +12417,7 @@ function isSlowBuffer (obj) {
   return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isBuffer(obj.slice(0, 0))
 }
 
-},{}],48:[function(require,module,exports){
+},{}],69:[function(require,module,exports){
 /*
 object-assign
 (c) Sindre Sorhus
@@ -10155,7 +12509,7 @@ module.exports = shouldUseNative() ? Object.assign : function (target, source) {
 	return to;
 };
 
-},{}],49:[function(require,module,exports){
+},{}],70:[function(require,module,exports){
 // shim for using process in browser
 var process = module.exports = {};
 
@@ -10341,7 +12695,7 @@ process.chdir = function (dir) {
 };
 process.umask = function() { return 0; };
 
-},{}],50:[function(require,module,exports){
+},{}],71:[function(require,module,exports){
 (function (setImmediate,clearImmediate){
 var nextTick = require('process/browser.js').nextTick;
 var apply = Function.prototype.apply;
@@ -10420,10 +12774,10 @@ exports.clearImmediate = typeof clearImmediate === "function" ? clearImmediate :
   delete immediateIds[id];
 };
 }).call(this,require("timers").setImmediate,require("timers").clearImmediate)
-},{"process/browser.js":49,"timers":50}],51:[function(require,module,exports){
-arguments[4][12][0].apply(exports,arguments)
-},{"dup":12}],52:[function(require,module,exports){
-arguments[4][40][0].apply(exports,arguments)
-},{"dup":40}],53:[function(require,module,exports){
-arguments[4][41][0].apply(exports,arguments)
-},{"./support/isBuffer":52,"_process":49,"dup":41,"inherits":51}]},{},[1]);
+},{"process/browser.js":70,"timers":71}],72:[function(require,module,exports){
+arguments[4][3][0].apply(exports,arguments)
+},{"dup":3}],73:[function(require,module,exports){
+arguments[4][61][0].apply(exports,arguments)
+},{"dup":61}],74:[function(require,module,exports){
+arguments[4][62][0].apply(exports,arguments)
+},{"./support/isBuffer":73,"_process":70,"dup":62,"inherits":72}]},{},[22]);
diff --git a/web-service/public/js/index.js b/web-service/public/js/index.js
index 3f9093481..12956dad8 100644
--- a/web-service/public/js/index.js
+++ b/web-service/public/js/index.js
@@ -1,5 +1,6 @@
 const Peer = require('../../server/src/peer')
 const VideoConverter = require('./lib/dist/video-converter');
+const msgpack = require('msgpack5')();
 
 let current_data = {};
 let peer;
@@ -48,16 +49,18 @@ getAvailableStreams = async () => {
 
 
 createVideoPlayer = () => {
-    const containerDiv = document.getElementById('container')
-    containerDiv.innerHTML = `<h1>Stream from source ${current_data.uri}</h1><br>
+	const containerDiv = document.getElementById('container');
+	containerDiv.innerHTML = '';
+    /*containerDiv.innerHTML = `<h1>Stream from source ${current_data.uri}</h1><br>
         <button onclick="renderThumbnails(); closeStream()">Go back</button>
-        <button onclick="connectToStream()">Start Stream</button><br>
+        <button onclick="connectToStream('${current_data.uri}')">Start Stream</button><br>
         <button onclick="webSocketTest()">WebSocket Test</button><br>
         <video id="ftlab-stream-video" width="640" height="360"></video>`;
     containerDiv.innerHTML += '<br>'
-    containerDiv.innerHTML += ''
+    containerDiv.innerHTML += ''*/
     createPeer();
-    connectToStream();
+	//connectToStream();
+	new FTLStream(peer, current_data.uri, containerDiv);
 }
 
 /**
@@ -106,9 +109,10 @@ createCard = (url, viewers) => {
 
 
 createPeer = () => {
-    // FOR PRODUCTION
-    const ws = new WebSocket("ws://" + location.host + ":" + (location.port == "" ? "80" : location.port) + location.pathname);
-    //const ws = new WebSocket("ws://localhost:8080")
+	// FOR PRODUCTION
+	console.log("HOST", location.host);
+    const ws = new WebSocket("ws://" + location.host + location.pathname);
+	//const ws = new WebSocket("ws://localhost:8080")
     ws.binaryType = "arraybuffer";
     peer = new Peer(ws)
 }
@@ -117,8 +121,84 @@ webSocketTest = () => {
     peer.send("update_cfg", "ftl://utu.fi#reconstruction_default/0/renderer/cool_effect", "true")    
 }
 
+function FTLStream(peer, uri, element) {
+	this.uri = uri;
+	this.peer = peer;
+
+	//this.elements_ = {};
+	//this.converters_ = {};
+
+	//const element = document.getElementById('ftlab-stream-video');
+	this.outer = element;
+	this.element = document.createElement("VIDEO");
+	this.element.setAttribute("width", 640);
+	this.element.setAttribute("height", 360);
+	this.element.setAttribute("controls", true);
+	this.outer.appendChild(this.element);
+	this.play_button = document.createElement("BUTTON");
+	this.play_button.innerHTML = "Play";
+	this.play_button.onclick = () => {
+		this.start(0,0,0);
+	}
+	this.outer.appendChild(this.play_button);
+    this.converter = null;
+
+    let rxcount = 0;
+    let ts = 0;
+	let dts = 0;
+
+    this.peer.bind(uri, (latency, streampckg, pckg) => {
+        if(pckg[0] === 2){  // H264 packet.
+			let id = "id-"+streampckg[1]+"-"+streampckg[2]+"-"+streampckg[3];
+
+			if (this.current == id) {
+				rxcount++;
+				if (rxcount >= 25) {
+					rxcount = 0;
+					peer.send(uri, 0, [1,0,255,0],[255,7,35,0,0,Buffer.alloc(0)]);
+					//peer.send(current_data.uri, 0, [255,7,35,0,0,Buffer.alloc(0)], [1,0,255,0]);
+				}
+			
+				if (this.converter) {
+					/*function decode(value){
+						this.converter.appendRawData(value);
+					}
+					decode(pckg[5]);*/
+					this.converter.appendRawData(pckg[5]);
+					this.converter.play();
+				} else {
+					if (ts > 0) {
+						dts = streampckg[0] - ts;
+						console.log("Framerate = ", 1000/dts);
+						this.converter = new VideoConverter.default(this.element, 30, 1);
+					}
+					ts = streampckg[0];
+				}
+			}
+        } else if (pckg[0] === 103) {
+			console.log(msgpack.decode(pckg[5]));
+		}
+	});
+	
+	//this.start();
+}
 
-connectToStream = () => {
+FTLStream.prototype.start = function(fs, source, channel) {
+	let id = "id-"+fs+"-"+source+"-"+channel;
+	this.current = id;
+
+	if (this.found) {
+		this.peer.send(this.uri, 0, [1,fs,255,channel],[255,7,35,0,0,Buffer.alloc(0)]);
+	} else {
+		this.peer.rpc("find_stream", (res) => {
+			this.found = true;
+			this.peer.send(this.uri, 0, [1,fs,255,channel],[255,7,35,0,0,Buffer.alloc(0)]);
+		}, this.uri);
+	}
+}
+
+
+/*connectToStream = () => {
     const element = document.getElementById('ftlab-stream-video');
     let converter = null;
 
@@ -149,7 +229,9 @@ connectToStream = () => {
                 }
                 ts = streampckg[0];
             }
-        };
+        } else if (pckg[0] === 103) {
+			console.log(msgpack.decode(pckg[5]));
+		}
     })
 
     // Start the transaction
@@ -159,7 +241,7 @@ connectToStream = () => {
         peer.send(current_data.uri, 0, [1,0,255,0],[255,7,35,0,0,Buffer.alloc(0)]);
         //peer.send(current_data.uri, [255,7,35,0,0,Buffer.alloc(0)], [1,0,255,0]);
     }, current_data.uri);
-}
+}*/
 
 closeStream = () => {
     peer.sock.close()
-- 
GitLab