j?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o 0) {
+ throw new Error('Invalid string. Length must be a multiple of 4')
+ }
+
+ // Trim off extra bytes after placeholder bytes are found
+ // See: https://github.com/beatgammit/base64-js/issues/42
+ var validLen = b64.indexOf('=')
+ if (validLen === -1) validLen = len
+
+ var placeHoldersLen = validLen === len
+ ? 0
+ : 4 - (validLen % 4)
+
+ return [validLen, placeHoldersLen]
+}
+
+// base64 is 4/3 + up to two characters of the original data
+function byteLength (b64) {
+ var lens = getLens(b64)
+ var validLen = lens[0]
+ var placeHoldersLen = lens[1]
+ return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
+}
+
+function _byteLength (b64, validLen, placeHoldersLen) {
+ return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
+}
+
+function toByteArray (b64) {
+ var tmp
+ var lens = getLens(b64)
+ var validLen = lens[0]
+ var placeHoldersLen = lens[1]
+
+ var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))
+
+ var curByte = 0
+
+ // if there are placeholders, only get up to the last complete 4 chars
+ var len = placeHoldersLen > 0
+ ? validLen - 4
+ : validLen
+
+ var i
+ for (i = 0; i < len; i += 4) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 18) |
+ (revLookup[b64.charCodeAt(i + 1)] << 12) |
+ (revLookup[b64.charCodeAt(i + 2)] << 6) |
+ revLookup[b64.charCodeAt(i + 3)]
+ arr[curByte++] = (tmp >> 16) & 0xFF
+ arr[curByte++] = (tmp >> 8) & 0xFF
+ arr[curByte++] = tmp & 0xFF
+ }
+
+ if (placeHoldersLen === 2) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 2) |
+ (revLookup[b64.charCodeAt(i + 1)] >> 4)
+ arr[curByte++] = tmp & 0xFF
+ }
+
+ if (placeHoldersLen === 1) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 10) |
+ (revLookup[b64.charCodeAt(i + 1)] << 4) |
+ (revLookup[b64.charCodeAt(i + 2)] >> 2)
+ arr[curByte++] = (tmp >> 8) & 0xFF
+ arr[curByte++] = tmp & 0xFF
+ }
+
+ return arr
+}
+
+function tripletToBase64 (num) {
+ return lookup[num >> 18 & 0x3F] +
+ lookup[num >> 12 & 0x3F] +
+ lookup[num >> 6 & 0x3F] +
+ lookup[num & 0x3F]
+}
+
+function encodeChunk (uint8, start, end) {
+ var tmp
+ var output = []
+ for (var i = start; i < end; i += 3) {
+ tmp =
+ ((uint8[i] << 16) & 0xFF0000) +
+ ((uint8[i + 1] << 8) & 0xFF00) +
+ (uint8[i + 2] & 0xFF)
+ output.push(tripletToBase64(tmp))
+ }
+ return output.join('')
+}
+
+function fromByteArray (uint8) {
+ var tmp
+ var len = uint8.length
+ var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
+ var parts = []
+ var maxChunkLength = 16383 // must be multiple of 3
+
+ // go through the array every three bytes, we'll deal with trailing stuff later
+ for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
+ parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
+ }
+
+ // pad the end with zeros, but make sure to not forget the extra bytes
+ if (extraBytes === 1) {
+ tmp = uint8[len - 1]
+ parts.push(
+ lookup[tmp >> 2] +
+ lookup[(tmp << 4) & 0x3F] +
+ '=='
+ )
+ } else if (extraBytes === 2) {
+ tmp = (uint8[len - 2] << 8) + uint8[len - 1]
+ parts.push(
+ lookup[tmp >> 10] +
+ lookup[(tmp >> 4) & 0x3F] +
+ lookup[(tmp << 2) & 0x3F] +
+ '='
+ )
+ }
+
+ return parts.join('')
+}
diff --git a/node_modules/base64-js/package.json b/node_modules/base64-js/package.json
new file mode 100644
index 00000000..c3972e39
--- /dev/null
+++ b/node_modules/base64-js/package.json
@@ -0,0 +1,47 @@
+{
+ "name": "base64-js",
+ "description": "Base64 encoding/decoding in pure JS",
+ "version": "1.5.1",
+ "author": "T. Jameson Little ",
+ "typings": "index.d.ts",
+ "bugs": {
+ "url": "https://github.com/beatgammit/base64-js/issues"
+ },
+ "devDependencies": {
+ "babel-minify": "^0.5.1",
+ "benchmark": "^2.1.4",
+ "browserify": "^16.3.0",
+ "standard": "*",
+ "tape": "4.x"
+ },
+ "homepage": "https://github.com/beatgammit/base64-js",
+ "keywords": [
+ "base64"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/beatgammit/base64-js.git"
+ },
+ "scripts": {
+ "build": "browserify -s base64js -r ./ | minify > base64js.min.js",
+ "lint": "standard",
+ "test": "npm run lint && npm run unit",
+ "unit": "tape test/*.js"
+ },
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+}
diff --git a/node_modules/bin-build/index.js b/node_modules/bin-build/index.js
new file mode 100644
index 00000000..525968ac
--- /dev/null
+++ b/node_modules/bin-build/index.js
@@ -0,0 +1,43 @@
+'use strict';
+const decompress = require('decompress');
+const download = require('download');
+const execa = require('execa');
+const pMapSeries = require('p-map-series');
+const tempfile = require('tempfile');
+
+const exec = (cmd, cwd) => pMapSeries(cmd, x => execa.shell(x, {cwd}));
+
+exports.directory = (dir, cmd) => {
+ if (typeof dir !== 'string') {
+ return Promise.reject(new TypeError(`Expected a \`string\`, got \`${typeof dir}\``));
+ }
+
+ return exec(cmd, dir);
+};
+
+exports.file = (file, cmd, opts) => {
+ opts = Object.assign({strip: 1}, opts);
+
+ if (typeof file !== 'string') {
+ return Promise.reject(new TypeError(`Expected a \`string\`, got \`${typeof file}\``));
+ }
+
+ const tmp = tempfile();
+
+ return decompress(file, tmp, opts).then(() => exec(cmd, tmp));
+};
+
+exports.url = (url, cmd, opts) => {
+ opts = Object.assign({
+ extract: true,
+ strip: 1
+ }, opts);
+
+ if (typeof url !== 'string') {
+ return Promise.reject(new TypeError(`Expected a \`string\`, got \`${typeof url}\``));
+ }
+
+ const tmp = tempfile();
+
+ return download(url, tmp, opts).then(() => exec(cmd, tmp));
+};
diff --git a/node_modules/bin-build/license b/node_modules/bin-build/license
new file mode 100644
index 00000000..0f8cf79c
--- /dev/null
+++ b/node_modules/bin-build/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Kevin Martensson (github.com/kevva)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/bin-build/package.json b/node_modules/bin-build/package.json
new file mode 100644
index 00000000..f685c169
--- /dev/null
+++ b/node_modules/bin-build/package.json
@@ -0,0 +1,43 @@
+{
+ "name": "bin-build",
+ "version": "3.0.0",
+ "description": "Easily build binaries",
+ "license": "MIT",
+ "repository": "kevva/bin-build",
+ "author": {
+ "name": "Kevin Mårtensson",
+ "email": "kevinmartensson@gmail.com",
+ "url": "https://github.com/kevva"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "binary",
+ "build",
+ "make"
+ ],
+ "dependencies": {
+ "decompress": "^4.0.0",
+ "download": "^6.2.2",
+ "execa": "^0.7.0",
+ "p-map-series": "^1.0.0",
+ "tempfile": "^2.0.0"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "del": "^3.0.0",
+ "nock": "^9.0.0",
+ "path-exists": "^3.0.0",
+ "xo": "*"
+ },
+ "xo": {
+ "esnext": true
+ }
+}
diff --git a/node_modules/bin-build/readme.md b/node_modules/bin-build/readme.md
new file mode 100644
index 00000000..31b559b3
--- /dev/null
+++ b/node_modules/bin-build/readme.md
@@ -0,0 +1,103 @@
+# bin-build [](https://travis-ci.org/kevva/bin-build)
+
+> Easily build binaries
+
+
+## Install
+
+```
+$ npm install --save bin-build
+```
+
+
+## Usage
+
+```js
+const binBuild = require('bin-build');
+
+binBuild.url('http://www.lcdf.org/gifsicle/gifsicle-1.80.tar.gz', [
+ './configure --disable-gifview --disable-gifdiff',
+ 'make install'
+]).then(() => {
+ console.log('gifsicle built successfully');
+});
+
+binBuild.file('gifsicle-1.80.tar.gz', [
+ './configure --disable-gifview --disable-gifdiff',
+ 'make install'
+]).then(() => {
+ console.log('gifsicle built successfully');
+});
+```
+
+
+## API
+
+### binBuild.directory(directory, commands)
+
+#### directory
+
+Type: `string`
+
+Path to a directory containing the source code.
+
+#### commands
+
+Type: `Array`
+
+Commands to run when building.
+
+### binBuild.file(file, commands, [options])
+
+#### file
+
+Type: `string`
+
+Path to a archive file containing the source code.
+
+#### commands
+
+Type: `Array`
+
+Commands to run when building.
+
+#### options
+
+Type: `Object`
+
+##### strip
+
+Type: `number`
+Default: `1`
+
+Strip a number of leading paths from file names on extraction.
+
+### binBuild.url(url, commands, [options])
+
+#### url
+
+Type: `string`
+
+URL to a archive file containing the source code.
+
+#### commands
+
+Type: `Array`
+
+Commands to run when building.
+
+#### options
+
+Type: `Object`
+
+##### strip
+
+Type: `number`
+Default: `1`
+
+Strip a number of leading paths from file names on extraction.
+
+
+## License
+
+MIT © [Kevin Mårtensson](https://github.com/kevva)
diff --git a/node_modules/bin-check/index.js b/node_modules/bin-check/index.js
new file mode 100644
index 00000000..49100bd0
--- /dev/null
+++ b/node_modules/bin-check/index.js
@@ -0,0 +1,31 @@
+'use strict';
+const execa = require('execa');
+const executable = require('executable');
+
+module.exports = (bin, args) => {
+ if (!Array.isArray(args)) {
+ args = ['--help'];
+ }
+
+ return executable(bin)
+ .then(works => {
+ if (!works) {
+ throw new Error(`Couldn't execute the \`${bin}\` binary. Make sure it has the right permissions.`);
+ }
+
+ return execa(bin, args);
+ })
+ .then(res => res.code === 0);
+};
+
+module.exports.sync = (bin, args) => {
+ if (!Array.isArray(args)) {
+ args = ['--help'];
+ }
+
+ if (!executable.sync(bin)) {
+ throw new Error(`Couldn't execute the \`${bin}\` binary. Make sure it has the right permissions.`);
+ }
+
+ return execa.sync(bin, args).status === 0;
+};
diff --git a/node_modules/bin-check/license b/node_modules/bin-check/license
new file mode 100644
index 00000000..e0e91582
--- /dev/null
+++ b/node_modules/bin-check/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Kevin Mårtensson (github.com/kevva)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/bin-check/package.json b/node_modules/bin-check/package.json
new file mode 100644
index 00000000..279b342b
--- /dev/null
+++ b/node_modules/bin-check/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "bin-check",
+ "version": "4.1.0",
+ "description": "Check if a binary is working",
+ "license": "MIT",
+ "repository": "kevva/bin-check",
+ "author": {
+ "name": "Kevin Mårtensson",
+ "email": "kevinmartensson@gmail.com",
+ "url": "https://github.com/kevva"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "binary",
+ "check",
+ "executable",
+ "test"
+ ],
+ "dependencies": {
+ "execa": "^0.7.0",
+ "executable": "^4.1.0"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-check/readme.md b/node_modules/bin-check/readme.md
new file mode 100644
index 00000000..111f1f2e
--- /dev/null
+++ b/node_modules/bin-check/readme.md
@@ -0,0 +1,51 @@
+# bin-check [](https://travis-ci.org/kevva/bin-check)
+
+> Check if a binary is working by checking its exit code
+
+
+## Install
+
+```
+$ npm install bin-check
+```
+
+
+## Usage
+
+```js
+const binCheck = require('bin-check');
+
+binCheck('/bin/sh', ['--version']).then(works => {
+ console.log(works);
+ //=> true
+});
+```
+
+
+## API
+
+### binCheck(binary, [arguments])
+
+Returns a `Promise` for a `boolean`.
+
+### binCheck.sync(binary, [arguments])
+
+Returns a `boolean`.
+
+#### binary
+
+Type: `string`
+
+Path to the binary.
+
+#### arguments
+
+Type: `Array`
+Default: `['--help']`
+
+Arguments to run the binary with.
+
+
+## License
+
+MIT © [Kevin Mårtensson](https://github.com/kevva)
diff --git a/node_modules/bin-version-check/index.js b/node_modules/bin-version-check/index.js
new file mode 100644
index 00000000..5d3ae825
--- /dev/null
+++ b/node_modules/bin-version-check/index.js
@@ -0,0 +1,22 @@
+'use strict';
+const semver = require('semver');
+const binVersion = require('bin-version');
+const semverTruncate = require('semver-truncate');
+
+module.exports = (binary, semverRange, options) => {
+ if (typeof binary !== 'string' || typeof semverRange !== 'string') {
+ return Promise.reject(new Error('`binary` and `semverRange` arguments required'));
+ }
+
+ if (!semver.validRange(semverRange)) {
+ return Promise.reject(new Error('Invalid version range'));
+ }
+
+ return binVersion(binary, options).then(binaryVersion => {
+ if (!semver.satisfies(semverTruncate(binaryVersion, 'patch'), semverRange)) {
+ const error = new Error(`${binary} ${binaryVersion} doesn't satisfy the version requirement of ${semverRange}`);
+ error.name = 'InvalidBinaryVersion';
+ throw error;
+ }
+ });
+};
diff --git a/node_modules/bin-version-check/license b/node_modules/bin-version-check/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-version-check/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-version-check/package.json b/node_modules/bin-version-check/package.json
new file mode 100644
index 00000000..18367986
--- /dev/null
+++ b/node_modules/bin-version-check/package.json
@@ -0,0 +1,43 @@
+{
+ "name": "bin-version-check",
+ "version": "4.0.0",
+ "description": "Check whether a binary version satisfies a semver range",
+ "license": "MIT",
+ "repository": "sindresorhus/bin-version-check",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "cli",
+ "bin",
+ "binary",
+ "executable",
+ "version",
+ "semver",
+ "semantic",
+ "range",
+ "satisfy",
+ "check",
+ "validate"
+ ],
+ "dependencies": {
+ "bin-version": "^3.0.0",
+ "semver": "^5.6.0",
+ "semver-truncate": "^1.1.2"
+ },
+ "devDependencies": {
+ "ava": "^1.0.0-rc.1",
+ "xo": "^0.23.0"
+ }
+}
diff --git a/node_modules/bin-version-check/readme.md b/node_modules/bin-version-check/readme.md
new file mode 100644
index 00000000..6c48c928
--- /dev/null
+++ b/node_modules/bin-version-check/readme.md
@@ -0,0 +1,71 @@
+# bin-version-check [](https://travis-ci.org/sindresorhus/bin-version-check)
+
+> Check whether a binary version satisfies a [semver range](https://github.com/npm/node-semver#ranges)
+
+Useful when you have a thing that only works with specific versions of a binary.
+
+
+## Install
+
+```
+$ npm install bin-version-check
+```
+
+
+## Usage
+
+```
+$ curl --version
+curl 7.30.0 (x86_64-apple-darwin13.0)
+```
+
+```js
+const binVersionCheck = require('bin-version-check');
+
+(async () => {
+ try {
+ await binVersionCheck('curl', '>=8');
+ } catch (error) {
+ console.log(error);
+ //=> 'InvalidBinVersion: curl 7.30.0 doesn't satisfy the version requirement of >=8'
+ }
+})();
+```
+
+
+## API
+
+### binVersionCheck(binary, semverRange, [options])
+
+#### binary
+
+Type: `string`
+
+Name or path of the binary to check.
+
+#### semverRange
+
+Type: `string`
+
+[Semver range](https://github.com/npm/node-semver#ranges) to check against.
+
+#### options
+
+Type: `Object`
+
+##### args
+
+Type: `string[]`
+Default: `['--version']`
+
+CLI arguments used to get the binary version.
+
+
+## Related
+
+- [bin-version-check-cli](https://github.com/sindresorhus/bin-version-check-cli) - CLI for this module
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-version/index.d.ts b/node_modules/bin-version/index.d.ts
new file mode 100644
index 00000000..00287aec
--- /dev/null
+++ b/node_modules/bin-version/index.d.ts
@@ -0,0 +1,43 @@
+declare namespace binVersion {
+ interface Options {
+ /**
+ The arguments to pass to `binary` so that it will print its version.
+
+ @default ['--version']
+ */
+ args?: string[];
+ }
+}
+
+/**
+Get the version of a binary in [semver](https://github.com/npm/node-semver) format.
+
+@param binary - The name of or path to the binary to get the version from.
+@returns The version of the `binary`.
+
+@example
+```
+import binVersion = require('bin-version');
+
+(async () => {
+ // $ curl --version
+ // curl 7.30.0 (x86_64-apple-darwin13.0)
+
+ console.log(await binVersion('curl'));
+ //=> '7.30.0'
+
+
+ // $ openssl version
+ // OpenSSL 1.0.2d 9 Jul 2015
+
+ console.log(await binVersion('openssl', {args: ['version']}));
+ //=> '1.0.2'
+})();
+```
+*/
+declare function binVersion(
+ binary: string,
+ options?: binVersion.Options
+): Promise;
+
+export = binVersion;
diff --git a/node_modules/bin-version/index.js b/node_modules/bin-version/index.js
new file mode 100644
index 00000000..303e996f
--- /dev/null
+++ b/node_modules/bin-version/index.js
@@ -0,0 +1,15 @@
+'use strict';
+const execa = require('execa');
+const findVersions = require('find-versions');
+
+module.exports = (binary, options = {}) => {
+ return execa(binary, options.args || ['--version'])
+ .then(result => findVersions(result.stdout || result.stderr, {loose: true})[0])
+ .catch(error => {
+ if (error.code === 'ENOENT') {
+ error.message = `Couldn't find the \`${binary}\` binary. Make sure it's installed and in your $PATH.`;
+ }
+
+ throw error;
+ });
+};
diff --git a/node_modules/bin-version/license b/node_modules/bin-version/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-version/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-version/node_modules/cross-spawn/CHANGELOG.md b/node_modules/bin-version/node_modules/cross-spawn/CHANGELOG.md
new file mode 100644
index 00000000..ded9620b
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/CHANGELOG.md
@@ -0,0 +1,100 @@
+# Change Log
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+
+## [6.0.5](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.4...v6.0.5) (2018-03-02)
+
+
+### Bug Fixes
+
+* avoid using deprecated Buffer constructor ([#94](https://github.com/moxystudio/node-cross-spawn/issues/94)) ([d5770df](https://github.com/moxystudio/node-cross-spawn/commit/d5770df)), closes [/nodejs.org/api/deprecations.html#deprecations_dep0005](https://github.com//nodejs.org/api/deprecations.html/issues/deprecations_dep0005)
+
+
+
+
+## [6.0.4](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.3...v6.0.4) (2018-01-31)
+
+
+### Bug Fixes
+
+* fix paths being incorrectly normalized on unix ([06ee3c6](https://github.com/moxystudio/node-cross-spawn/commit/06ee3c6)), closes [#90](https://github.com/moxystudio/node-cross-spawn/issues/90)
+
+
+
+
+## [6.0.3](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.2...v6.0.3) (2018-01-23)
+
+
+
+
+## [6.0.2](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.1...v6.0.2) (2018-01-23)
+
+
+
+
+## [6.0.1](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.0...v6.0.1) (2018-01-23)
+
+
+
+
+# [6.0.0](https://github.com/moxystudio/node-cross-spawn/compare/5.1.0...6.0.0) (2018-01-23)
+
+
+### Bug Fixes
+
+* fix certain arguments not being correctly escaped or causing batch syntax error ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10)), closes [#82](https://github.com/moxystudio/node-cross-spawn/issues/82) [#51](https://github.com/moxystudio/node-cross-spawn/issues/51)
+* fix commands as posix relatixe paths not working correctly, e.g.: `./my-command` ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10))
+* fix `options` argument being mutated ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10))
+* fix commands resolution when PATH was actually Path ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10))
+
+
+### Features
+
+* improve compliance with node's ENOENT errors ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10))
+* improve detection of node's shell option support ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10))
+
+
+### Chores
+
+* upgrade tooling
+* upgrate project to es6 (node v4)
+
+
+### BREAKING CHANGES
+
+* remove support for older nodejs versions, only `node >= 4` is supported
+
+
+
+## [5.1.0](https://github.com/moxystudio/node-cross-spawn/compare/5.0.1...5.1.0) (2017-02-26)
+
+
+### Bug Fixes
+
+* fix `options.shell` support for NodeJS [v4.8](https://github.com/nodejs/node/blob/master/doc/changelogs/CHANGELOG_V4.md#4.8.0)
+
+
+
+## [5.0.1](https://github.com/moxystudio/node-cross-spawn/compare/5.0.0...5.0.1) (2016-11-04)
+
+
+### Bug Fixes
+
+* fix `options.shell` support for NodeJS v7
+
+
+
+# [5.0.0](https://github.com/moxystudio/node-cross-spawn/compare/4.0.2...5.0.0) (2016-10-30)
+
+
+## Features
+
+* add support for `options.shell`
+* improve parsing of shebangs by using [`shebang-command`](https://github.com/kevva/shebang-command) module
+
+
+## Chores
+
+* refactor some code to make it more clear
+* update README caveats
diff --git a/node_modules/bin-version/node_modules/cross-spawn/LICENSE b/node_modules/bin-version/node_modules/cross-spawn/LICENSE
new file mode 100644
index 00000000..8407b9a3
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2018 Made With MOXY Lda
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/bin-version/node_modules/cross-spawn/README.md b/node_modules/bin-version/node_modules/cross-spawn/README.md
new file mode 100644
index 00000000..e895cd7a
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/README.md
@@ -0,0 +1,94 @@
+# cross-spawn
+
+[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Build status][appveyor-image]][appveyor-url] [![Coverage Status][codecov-image]][codecov-url] [![Dependency status][david-dm-image]][david-dm-url] [![Dev Dependency status][david-dm-dev-image]][david-dm-dev-url] [![Greenkeeper badge][greenkeeper-image]][greenkeeper-url]
+
+[npm-url]:https://npmjs.org/package/cross-spawn
+[downloads-image]:http://img.shields.io/npm/dm/cross-spawn.svg
+[npm-image]:http://img.shields.io/npm/v/cross-spawn.svg
+[travis-url]:https://travis-ci.org/moxystudio/node-cross-spawn
+[travis-image]:http://img.shields.io/travis/moxystudio/node-cross-spawn/master.svg
+[appveyor-url]:https://ci.appveyor.com/project/satazor/node-cross-spawn
+[appveyor-image]:https://img.shields.io/appveyor/ci/satazor/node-cross-spawn/master.svg
+[codecov-url]:https://codecov.io/gh/moxystudio/node-cross-spawn
+[codecov-image]:https://img.shields.io/codecov/c/github/moxystudio/node-cross-spawn/master.svg
+[david-dm-url]:https://david-dm.org/moxystudio/node-cross-spawn
+[david-dm-image]:https://img.shields.io/david/moxystudio/node-cross-spawn.svg
+[david-dm-dev-url]:https://david-dm.org/moxystudio/node-cross-spawn?type=dev
+[david-dm-dev-image]:https://img.shields.io/david/dev/moxystudio/node-cross-spawn.svg
+[greenkeeper-image]:https://badges.greenkeeper.io/moxystudio/node-cross-spawn.svg
+[greenkeeper-url]:https://greenkeeper.io/
+
+A cross platform solution to node's spawn and spawnSync.
+
+
+## Installation
+
+`$ npm install cross-spawn`
+
+
+## Why
+
+Node has issues when using spawn on Windows:
+
+- It ignores [PATHEXT](https://github.com/joyent/node/issues/2318)
+- It does not support [shebangs](https://en.wikipedia.org/wiki/Shebang_(Unix))
+- Has problems running commands with [spaces](https://github.com/nodejs/node/issues/7367)
+- Has problems running commands with posix relative paths (e.g.: `./my-folder/my-executable`)
+- Has an [issue](https://github.com/moxystudio/node-cross-spawn/issues/82) with command shims (files in `node_modules/.bin/`), where arguments with quotes and parenthesis would result in [invalid syntax error](https://github.com/moxystudio/node-cross-spawn/blob/e77b8f22a416db46b6196767bcd35601d7e11d54/test/index.test.js#L149)
+- No `options.shell` support on node `` where `` must not contain any arguments.
+If you would like to have the shebang support improved, feel free to contribute via a pull-request.
+
+Remember to always test your code on Windows!
+
+
+## Tests
+
+`$ npm test`
+`$ npm test -- --watch` during development
+
+## License
+
+Released under the [MIT License](http://www.opensource.org/licenses/mit-license.php).
diff --git a/node_modules/bin-version/node_modules/cross-spawn/index.js b/node_modules/bin-version/node_modules/cross-spawn/index.js
new file mode 100644
index 00000000..5509742c
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/index.js
@@ -0,0 +1,39 @@
+'use strict';
+
+const cp = require('child_process');
+const parse = require('./lib/parse');
+const enoent = require('./lib/enoent');
+
+function spawn(command, args, options) {
+ // Parse the arguments
+ const parsed = parse(command, args, options);
+
+ // Spawn the child process
+ const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
+
+ // Hook into child process "exit" event to emit an error if the command
+ // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ enoent.hookChildProcess(spawned, parsed);
+
+ return spawned;
+}
+
+function spawnSync(command, args, options) {
+ // Parse the arguments
+ const parsed = parse(command, args, options);
+
+ // Spawn the child process
+ const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
+
+ // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
+
+ return result;
+}
+
+module.exports = spawn;
+module.exports.spawn = spawn;
+module.exports.sync = spawnSync;
+
+module.exports._parse = parse;
+module.exports._enoent = enoent;
diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/enoent.js b/node_modules/bin-version/node_modules/cross-spawn/lib/enoent.js
new file mode 100644
index 00000000..14df9b62
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/lib/enoent.js
@@ -0,0 +1,59 @@
+'use strict';
+
+const isWin = process.platform === 'win32';
+
+function notFoundError(original, syscall) {
+ return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), {
+ code: 'ENOENT',
+ errno: 'ENOENT',
+ syscall: `${syscall} ${original.command}`,
+ path: original.command,
+ spawnargs: original.args,
+ });
+}
+
+function hookChildProcess(cp, parsed) {
+ if (!isWin) {
+ return;
+ }
+
+ const originalEmit = cp.emit;
+
+ cp.emit = function (name, arg1) {
+ // If emitting "exit" event and exit code is 1, we need to check if
+ // the command exists and emit an "error" instead
+ // See https://github.com/IndigoUnited/node-cross-spawn/issues/16
+ if (name === 'exit') {
+ const err = verifyENOENT(arg1, parsed, 'spawn');
+
+ if (err) {
+ return originalEmit.call(cp, 'error', err);
+ }
+ }
+
+ return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params
+ };
+}
+
+function verifyENOENT(status, parsed) {
+ if (isWin && status === 1 && !parsed.file) {
+ return notFoundError(parsed.original, 'spawn');
+ }
+
+ return null;
+}
+
+function verifyENOENTSync(status, parsed) {
+ if (isWin && status === 1 && !parsed.file) {
+ return notFoundError(parsed.original, 'spawnSync');
+ }
+
+ return null;
+}
+
+module.exports = {
+ hookChildProcess,
+ verifyENOENT,
+ verifyENOENTSync,
+ notFoundError,
+};
diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/parse.js b/node_modules/bin-version/node_modules/cross-spawn/lib/parse.js
new file mode 100644
index 00000000..962827a9
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/lib/parse.js
@@ -0,0 +1,125 @@
+'use strict';
+
+const path = require('path');
+const niceTry = require('nice-try');
+const resolveCommand = require('./util/resolveCommand');
+const escape = require('./util/escape');
+const readShebang = require('./util/readShebang');
+const semver = require('semver');
+
+const isWin = process.platform === 'win32';
+const isExecutableRegExp = /\.(?:com|exe)$/i;
+const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i;
+
+// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0
+const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false;
+
+function detectShebang(parsed) {
+ parsed.file = resolveCommand(parsed);
+
+ const shebang = parsed.file && readShebang(parsed.file);
+
+ if (shebang) {
+ parsed.args.unshift(parsed.file);
+ parsed.command = shebang;
+
+ return resolveCommand(parsed);
+ }
+
+ return parsed.file;
+}
+
+function parseNonShell(parsed) {
+ if (!isWin) {
+ return parsed;
+ }
+
+ // Detect & add support for shebangs
+ const commandFile = detectShebang(parsed);
+
+ // We don't need a shell if the command filename is an executable
+ const needsShell = !isExecutableRegExp.test(commandFile);
+
+ // If a shell is required, use cmd.exe and take care of escaping everything correctly
+ // Note that `forceShell` is an hidden option used only in tests
+ if (parsed.options.forceShell || needsShell) {
+ // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
+ // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
+ // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
+ // we need to double escape them
+ const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
+
+ // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
+ // This is necessary otherwise it will always fail with ENOENT in those cases
+ parsed.command = path.normalize(parsed.command);
+
+ // Escape command & arguments
+ parsed.command = escape.command(parsed.command);
+ parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
+
+ const shellCommand = [parsed.command].concat(parsed.args).join(' ');
+
+ parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
+ parsed.command = process.env.comspec || 'cmd.exe';
+ parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
+ }
+
+ return parsed;
+}
+
+function parseShell(parsed) {
+ // If node supports the shell option, there's no need to mimic its behavior
+ if (supportsShellOption) {
+ return parsed;
+ }
+
+ // Mimic node shell option
+ // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335
+ const shellCommand = [parsed.command].concat(parsed.args).join(' ');
+
+ if (isWin) {
+ parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe';
+ parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
+ parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
+ } else {
+ if (typeof parsed.options.shell === 'string') {
+ parsed.command = parsed.options.shell;
+ } else if (process.platform === 'android') {
+ parsed.command = '/system/bin/sh';
+ } else {
+ parsed.command = '/bin/sh';
+ }
+
+ parsed.args = ['-c', shellCommand];
+ }
+
+ return parsed;
+}
+
+function parse(command, args, options) {
+ // Normalize arguments, similar to nodejs
+ if (args && !Array.isArray(args)) {
+ options = args;
+ args = null;
+ }
+
+ args = args ? args.slice(0) : []; // Clone array to avoid changing the original
+ options = Object.assign({}, options); // Clone object to avoid changing the original
+
+ // Build our parsed object
+ const parsed = {
+ command,
+ args,
+ options,
+ file: undefined,
+ original: {
+ command,
+ args,
+ },
+ };
+
+ // Delegate further parsing to shell or non-shell
+ return options.shell ? parseShell(parsed) : parseNonShell(parsed);
+}
+
+module.exports = parse;
diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/util/escape.js b/node_modules/bin-version/node_modules/cross-spawn/lib/util/escape.js
new file mode 100644
index 00000000..b0bb84c3
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/lib/util/escape.js
@@ -0,0 +1,45 @@
+'use strict';
+
+// See http://www.robvanderwoude.com/escapechars.php
+const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g;
+
+function escapeCommand(arg) {
+ // Escape meta chars
+ arg = arg.replace(metaCharsRegExp, '^$1');
+
+ return arg;
+}
+
+function escapeArgument(arg, doubleEscapeMetaChars) {
+ // Convert to string
+ arg = `${arg}`;
+
+ // Algorithm below is based on https://qntm.org/cmd
+
+ // Sequence of backslashes followed by a double quote:
+ // double up all the backslashes and escape the double quote
+ arg = arg.replace(/(\\*)"/g, '$1$1\\"');
+
+ // Sequence of backslashes followed by the end of the string
+ // (which will become a double quote later):
+ // double up all the backslashes
+ arg = arg.replace(/(\\*)$/, '$1$1');
+
+ // All other backslashes occur literally
+
+ // Quote the whole thing:
+ arg = `"${arg}"`;
+
+ // Escape meta chars
+ arg = arg.replace(metaCharsRegExp, '^$1');
+
+ // Double escape meta chars if necessary
+ if (doubleEscapeMetaChars) {
+ arg = arg.replace(metaCharsRegExp, '^$1');
+ }
+
+ return arg;
+}
+
+module.exports.command = escapeCommand;
+module.exports.argument = escapeArgument;
diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/util/readShebang.js b/node_modules/bin-version/node_modules/cross-spawn/lib/util/readShebang.js
new file mode 100644
index 00000000..bd4f1280
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/lib/util/readShebang.js
@@ -0,0 +1,32 @@
+'use strict';
+
+const fs = require('fs');
+const shebangCommand = require('shebang-command');
+
+function readShebang(command) {
+ // Read the first 150 bytes from the file
+ const size = 150;
+ let buffer;
+
+ if (Buffer.alloc) {
+ // Node.js v4.5+ / v5.10+
+ buffer = Buffer.alloc(size);
+ } else {
+ // Old Node.js API
+ buffer = new Buffer(size);
+ buffer.fill(0); // zero-fill
+ }
+
+ let fd;
+
+ try {
+ fd = fs.openSync(command, 'r');
+ fs.readSync(fd, buffer, 0, size, 0);
+ fs.closeSync(fd);
+ } catch (e) { /* Empty */ }
+
+ // Attempt to extract shebang (null is returned if not a shebang)
+ return shebangCommand(buffer.toString());
+}
+
+module.exports = readShebang;
diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/util/resolveCommand.js b/node_modules/bin-version/node_modules/cross-spawn/lib/util/resolveCommand.js
new file mode 100644
index 00000000..2fd5ad27
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/lib/util/resolveCommand.js
@@ -0,0 +1,47 @@
+'use strict';
+
+const path = require('path');
+const which = require('which');
+const pathKey = require('path-key')();
+
+function resolveCommandAttempt(parsed, withoutPathExt) {
+ const cwd = process.cwd();
+ const hasCustomCwd = parsed.options.cwd != null;
+
+ // If a custom `cwd` was specified, we need to change the process cwd
+ // because `which` will do stat calls but does not support a custom cwd
+ if (hasCustomCwd) {
+ try {
+ process.chdir(parsed.options.cwd);
+ } catch (err) {
+ /* Empty */
+ }
+ }
+
+ let resolved;
+
+ try {
+ resolved = which.sync(parsed.command, {
+ path: (parsed.options.env || process.env)[pathKey],
+ pathExt: withoutPathExt ? path.delimiter : undefined,
+ });
+ } catch (e) {
+ /* Empty */
+ } finally {
+ process.chdir(cwd);
+ }
+
+ // If we successfully resolved, ensure that an absolute path is returned
+ // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
+ if (resolved) {
+ resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved);
+ }
+
+ return resolved;
+}
+
+function resolveCommand(parsed) {
+ return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true);
+}
+
+module.exports = resolveCommand;
diff --git a/node_modules/bin-version/node_modules/cross-spawn/package.json b/node_modules/bin-version/node_modules/cross-spawn/package.json
new file mode 100644
index 00000000..1a69be8c
--- /dev/null
+++ b/node_modules/bin-version/node_modules/cross-spawn/package.json
@@ -0,0 +1,76 @@
+{
+ "name": "cross-spawn",
+ "version": "6.0.5",
+ "description": "Cross platform child_process#spawn and child_process#spawnSync",
+ "keywords": [
+ "spawn",
+ "spawnSync",
+ "windows",
+ "cross-platform",
+ "path-ext",
+ "shebang",
+ "cmd",
+ "execute"
+ ],
+ "author": "André Cruz ",
+ "homepage": "https://github.com/moxystudio/node-cross-spawn",
+ "repository": {
+ "type": "git",
+ "url": "git@github.com:moxystudio/node-cross-spawn.git"
+ },
+ "license": "MIT",
+ "main": "index.js",
+ "files": [
+ "lib"
+ ],
+ "scripts": {
+ "lint": "eslint .",
+ "test": "jest --env node --coverage",
+ "prerelease": "npm t && npm run lint",
+ "release": "standard-version",
+ "precommit": "lint-staged",
+ "commitmsg": "commitlint -e $GIT_PARAMS"
+ },
+ "standard-version": {
+ "scripts": {
+ "posttag": "git push --follow-tags origin master && npm publish"
+ }
+ },
+ "lint-staged": {
+ "*.js": [
+ "eslint --fix",
+ "git add"
+ ]
+ },
+ "commitlint": {
+ "extends": [
+ "@commitlint/config-conventional"
+ ]
+ },
+ "dependencies": {
+ "nice-try": "^1.0.4",
+ "path-key": "^2.0.1",
+ "semver": "^5.5.0",
+ "shebang-command": "^1.2.0",
+ "which": "^1.2.9"
+ },
+ "devDependencies": {
+ "@commitlint/cli": "^6.0.0",
+ "@commitlint/config-conventional": "^6.0.2",
+ "babel-core": "^6.26.0",
+ "babel-jest": "^22.1.0",
+ "babel-preset-moxy": "^2.2.1",
+ "eslint": "^4.3.0",
+ "eslint-config-moxy": "^5.0.0",
+ "husky": "^0.14.3",
+ "jest": "^22.0.0",
+ "lint-staged": "^7.0.0",
+ "mkdirp": "^0.5.1",
+ "regenerator-runtime": "^0.11.1",
+ "rimraf": "^2.6.2",
+ "standard-version": "^4.2.0"
+ },
+ "engines": {
+ "node": ">=4.8"
+ }
+}
diff --git a/node_modules/bin-version/node_modules/execa/index.js b/node_modules/bin-version/node_modules/execa/index.js
new file mode 100644
index 00000000..aad9ac88
--- /dev/null
+++ b/node_modules/bin-version/node_modules/execa/index.js
@@ -0,0 +1,361 @@
+'use strict';
+const path = require('path');
+const childProcess = require('child_process');
+const crossSpawn = require('cross-spawn');
+const stripEof = require('strip-eof');
+const npmRunPath = require('npm-run-path');
+const isStream = require('is-stream');
+const _getStream = require('get-stream');
+const pFinally = require('p-finally');
+const onExit = require('signal-exit');
+const errname = require('./lib/errname');
+const stdio = require('./lib/stdio');
+
+const TEN_MEGABYTES = 1000 * 1000 * 10;
+
+function handleArgs(cmd, args, opts) {
+ let parsed;
+
+ opts = Object.assign({
+ extendEnv: true,
+ env: {}
+ }, opts);
+
+ if (opts.extendEnv) {
+ opts.env = Object.assign({}, process.env, opts.env);
+ }
+
+ if (opts.__winShell === true) {
+ delete opts.__winShell;
+ parsed = {
+ command: cmd,
+ args,
+ options: opts,
+ file: cmd,
+ original: {
+ cmd,
+ args
+ }
+ };
+ } else {
+ parsed = crossSpawn._parse(cmd, args, opts);
+ }
+
+ opts = Object.assign({
+ maxBuffer: TEN_MEGABYTES,
+ buffer: true,
+ stripEof: true,
+ preferLocal: true,
+ localDir: parsed.options.cwd || process.cwd(),
+ encoding: 'utf8',
+ reject: true,
+ cleanup: true
+ }, parsed.options);
+
+ opts.stdio = stdio(opts);
+
+ if (opts.preferLocal) {
+ opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir}));
+ }
+
+ if (opts.detached) {
+ // #115
+ opts.cleanup = false;
+ }
+
+ if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') {
+ // #116
+ parsed.args.unshift('/q');
+ }
+
+ return {
+ cmd: parsed.command,
+ args: parsed.args,
+ opts,
+ parsed
+ };
+}
+
+function handleInput(spawned, input) {
+ if (input === null || input === undefined) {
+ return;
+ }
+
+ if (isStream(input)) {
+ input.pipe(spawned.stdin);
+ } else {
+ spawned.stdin.end(input);
+ }
+}
+
+function handleOutput(opts, val) {
+ if (val && opts.stripEof) {
+ val = stripEof(val);
+ }
+
+ return val;
+}
+
+function handleShell(fn, cmd, opts) {
+ let file = '/bin/sh';
+ let args = ['-c', cmd];
+
+ opts = Object.assign({}, opts);
+
+ if (process.platform === 'win32') {
+ opts.__winShell = true;
+ file = process.env.comspec || 'cmd.exe';
+ args = ['/s', '/c', `"${cmd}"`];
+ opts.windowsVerbatimArguments = true;
+ }
+
+ if (opts.shell) {
+ file = opts.shell;
+ delete opts.shell;
+ }
+
+ return fn(file, args, opts);
+}
+
+function getStream(process, stream, {encoding, buffer, maxBuffer}) {
+ if (!process[stream]) {
+ return null;
+ }
+
+ let ret;
+
+ if (!buffer) {
+ // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10
+ ret = new Promise((resolve, reject) => {
+ process[stream]
+ .once('end', resolve)
+ .once('error', reject);
+ });
+ } else if (encoding) {
+ ret = _getStream(process[stream], {
+ encoding,
+ maxBuffer
+ });
+ } else {
+ ret = _getStream.buffer(process[stream], {maxBuffer});
+ }
+
+ return ret.catch(err => {
+ err.stream = stream;
+ err.message = `${stream} ${err.message}`;
+ throw err;
+ });
+}
+
+function makeError(result, options) {
+ const {stdout, stderr} = result;
+
+ let err = result.error;
+ const {code, signal} = result;
+
+ const {parsed, joinedCmd} = options;
+ const timedOut = options.timedOut || false;
+
+ if (!err) {
+ let output = '';
+
+ if (Array.isArray(parsed.opts.stdio)) {
+ if (parsed.opts.stdio[2] !== 'inherit') {
+ output += output.length > 0 ? stderr : `\n${stderr}`;
+ }
+
+ if (parsed.opts.stdio[1] !== 'inherit') {
+ output += `\n${stdout}`;
+ }
+ } else if (parsed.opts.stdio !== 'inherit') {
+ output = `\n${stderr}${stdout}`;
+ }
+
+ err = new Error(`Command failed: ${joinedCmd}${output}`);
+ err.code = code < 0 ? errname(code) : code;
+ }
+
+ err.stdout = stdout;
+ err.stderr = stderr;
+ err.failed = true;
+ err.signal = signal || null;
+ err.cmd = joinedCmd;
+ err.timedOut = timedOut;
+
+ return err;
+}
+
+function joinCmd(cmd, args) {
+ let joinedCmd = cmd;
+
+ if (Array.isArray(args) && args.length > 0) {
+ joinedCmd += ' ' + args.join(' ');
+ }
+
+ return joinedCmd;
+}
+
+module.exports = (cmd, args, opts) => {
+ const parsed = handleArgs(cmd, args, opts);
+ const {encoding, buffer, maxBuffer} = parsed.opts;
+ const joinedCmd = joinCmd(cmd, args);
+
+ let spawned;
+ try {
+ spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts);
+ } catch (err) {
+ return Promise.reject(err);
+ }
+
+ let removeExitHandler;
+ if (parsed.opts.cleanup) {
+ removeExitHandler = onExit(() => {
+ spawned.kill();
+ });
+ }
+
+ let timeoutId = null;
+ let timedOut = false;
+
+ const cleanup = () => {
+ if (timeoutId) {
+ clearTimeout(timeoutId);
+ timeoutId = null;
+ }
+
+ if (removeExitHandler) {
+ removeExitHandler();
+ }
+ };
+
+ if (parsed.opts.timeout > 0) {
+ timeoutId = setTimeout(() => {
+ timeoutId = null;
+ timedOut = true;
+ spawned.kill(parsed.opts.killSignal);
+ }, parsed.opts.timeout);
+ }
+
+ const processDone = new Promise(resolve => {
+ spawned.on('exit', (code, signal) => {
+ cleanup();
+ resolve({code, signal});
+ });
+
+ spawned.on('error', err => {
+ cleanup();
+ resolve({error: err});
+ });
+
+ if (spawned.stdin) {
+ spawned.stdin.on('error', err => {
+ cleanup();
+ resolve({error: err});
+ });
+ }
+ });
+
+ function destroy() {
+ if (spawned.stdout) {
+ spawned.stdout.destroy();
+ }
+
+ if (spawned.stderr) {
+ spawned.stderr.destroy();
+ }
+ }
+
+ const handlePromise = () => pFinally(Promise.all([
+ processDone,
+ getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}),
+ getStream(spawned, 'stderr', {encoding, buffer, maxBuffer})
+ ]).then(arr => {
+ const result = arr[0];
+ result.stdout = arr[1];
+ result.stderr = arr[2];
+
+ if (result.error || result.code !== 0 || result.signal !== null) {
+ const err = makeError(result, {
+ joinedCmd,
+ parsed,
+ timedOut
+ });
+
+ // TODO: missing some timeout logic for killed
+ // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203
+ // err.killed = spawned.killed || killed;
+ err.killed = err.killed || spawned.killed;
+
+ if (!parsed.opts.reject) {
+ return err;
+ }
+
+ throw err;
+ }
+
+ return {
+ stdout: handleOutput(parsed.opts, result.stdout),
+ stderr: handleOutput(parsed.opts, result.stderr),
+ code: 0,
+ failed: false,
+ killed: false,
+ signal: null,
+ cmd: joinedCmd,
+ timedOut: false
+ };
+ }), destroy);
+
+ crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed);
+
+ handleInput(spawned, parsed.opts.input);
+
+ spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected);
+ spawned.catch = onrejected => handlePromise().catch(onrejected);
+
+ return spawned;
+};
+
+// TODO: set `stderr: 'ignore'` when that option is implemented
+module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout);
+
+// TODO: set `stdout: 'ignore'` when that option is implemented
+module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr);
+
+module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts);
+
+module.exports.sync = (cmd, args, opts) => {
+ const parsed = handleArgs(cmd, args, opts);
+ const joinedCmd = joinCmd(cmd, args);
+
+ if (isStream(parsed.opts.input)) {
+ throw new TypeError('The `input` option cannot be a stream in sync mode');
+ }
+
+ const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts);
+ result.code = result.status;
+
+ if (result.error || result.status !== 0 || result.signal !== null) {
+ const err = makeError(result, {
+ joinedCmd,
+ parsed
+ });
+
+ if (!parsed.opts.reject) {
+ return err;
+ }
+
+ throw err;
+ }
+
+ return {
+ stdout: handleOutput(parsed.opts, result.stdout),
+ stderr: handleOutput(parsed.opts, result.stderr),
+ code: 0,
+ failed: false,
+ signal: null,
+ cmd: joinedCmd,
+ timedOut: false
+ };
+};
+
+module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts);
diff --git a/node_modules/bin-version/node_modules/execa/lib/errname.js b/node_modules/bin-version/node_modules/execa/lib/errname.js
new file mode 100644
index 00000000..e367837b
--- /dev/null
+++ b/node_modules/bin-version/node_modules/execa/lib/errname.js
@@ -0,0 +1,39 @@
+'use strict';
+// Older verions of Node.js might not have `util.getSystemErrorName()`.
+// In that case, fall back to a deprecated internal.
+const util = require('util');
+
+let uv;
+
+if (typeof util.getSystemErrorName === 'function') {
+ module.exports = util.getSystemErrorName;
+} else {
+ try {
+ uv = process.binding('uv');
+
+ if (typeof uv.errname !== 'function') {
+ throw new TypeError('uv.errname is not a function');
+ }
+ } catch (err) {
+ console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err);
+ uv = null;
+ }
+
+ module.exports = code => errname(uv, code);
+}
+
+// Used for testing the fallback behavior
+module.exports.__test__ = errname;
+
+function errname(uv, code) {
+ if (uv) {
+ return uv.errname(code);
+ }
+
+ if (!(code < 0)) {
+ throw new Error('err >= 0');
+ }
+
+ return `Unknown system error ${code}`;
+}
+
diff --git a/node_modules/bin-version/node_modules/execa/lib/stdio.js b/node_modules/bin-version/node_modules/execa/lib/stdio.js
new file mode 100644
index 00000000..a82d4683
--- /dev/null
+++ b/node_modules/bin-version/node_modules/execa/lib/stdio.js
@@ -0,0 +1,41 @@
+'use strict';
+const alias = ['stdin', 'stdout', 'stderr'];
+
+const hasAlias = opts => alias.some(x => Boolean(opts[x]));
+
+module.exports = opts => {
+ if (!opts) {
+ return null;
+ }
+
+ if (opts.stdio && hasAlias(opts)) {
+ throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`);
+ }
+
+ if (typeof opts.stdio === 'string') {
+ return opts.stdio;
+ }
+
+ const stdio = opts.stdio || [];
+
+ if (!Array.isArray(stdio)) {
+ throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
+ }
+
+ const result = [];
+ const len = Math.max(stdio.length, alias.length);
+
+ for (let i = 0; i < len; i++) {
+ let value = null;
+
+ if (stdio[i] !== undefined) {
+ value = stdio[i];
+ } else if (opts[alias[i]] !== undefined) {
+ value = opts[alias[i]];
+ }
+
+ result[i] = value;
+ }
+
+ return result;
+};
diff --git a/node_modules/bin-version/node_modules/execa/license b/node_modules/bin-version/node_modules/execa/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-version/node_modules/execa/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-version/node_modules/execa/package.json b/node_modules/bin-version/node_modules/execa/package.json
new file mode 100644
index 00000000..ad98225c
--- /dev/null
+++ b/node_modules/bin-version/node_modules/execa/package.json
@@ -0,0 +1,69 @@
+{
+ "name": "execa",
+ "version": "1.0.0",
+ "description": "A better `child_process`",
+ "license": "MIT",
+ "repository": "sindresorhus/execa",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && nyc ava"
+ },
+ "files": [
+ "index.js",
+ "lib"
+ ],
+ "keywords": [
+ "exec",
+ "child",
+ "process",
+ "execute",
+ "fork",
+ "execfile",
+ "spawn",
+ "file",
+ "shell",
+ "bin",
+ "binary",
+ "binaries",
+ "npm",
+ "path",
+ "local"
+ ],
+ "dependencies": {
+ "cross-spawn": "^6.0.0",
+ "get-stream": "^4.0.0",
+ "is-stream": "^1.1.0",
+ "npm-run-path": "^2.0.0",
+ "p-finally": "^1.0.0",
+ "signal-exit": "^3.0.0",
+ "strip-eof": "^1.0.0"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "cat-names": "^1.0.2",
+ "coveralls": "^3.0.1",
+ "delay": "^3.0.0",
+ "is-running": "^2.0.0",
+ "nyc": "^13.0.1",
+ "tempfile": "^2.0.0",
+ "xo": "*"
+ },
+ "nyc": {
+ "reporter": [
+ "text",
+ "lcov"
+ ],
+ "exclude": [
+ "**/fixtures/**",
+ "**/test.js",
+ "**/test/**"
+ ]
+ }
+}
diff --git a/node_modules/bin-version/node_modules/execa/readme.md b/node_modules/bin-version/node_modules/execa/readme.md
new file mode 100644
index 00000000..f3f533d9
--- /dev/null
+++ b/node_modules/bin-version/node_modules/execa/readme.md
@@ -0,0 +1,327 @@
+# execa [](https://travis-ci.org/sindresorhus/execa) [](https://ci.appveyor.com/project/sindresorhus/execa/branch/master) [](https://coveralls.io/github/sindresorhus/execa?branch=master)
+
+> A better [`child_process`](https://nodejs.org/api/child_process.html)
+
+
+## Why
+
+- Promise interface.
+- [Strips EOF](https://github.com/sindresorhus/strip-eof) from the output so you don't have to `stdout.trim()`.
+- Supports [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) binaries cross-platform.
+- [Improved Windows support.](https://github.com/IndigoUnited/node-cross-spawn#why)
+- Higher max buffer. 10 MB instead of 200 KB.
+- [Executes locally installed binaries by name.](#preferlocal)
+- [Cleans up spawned processes when the parent process dies.](#cleanup)
+
+
+## Install
+
+```
+$ npm install execa
+```
+
+
+
+
+
+
+## Usage
+
+```js
+const execa = require('execa');
+
+(async () => {
+ const {stdout} = await execa('echo', ['unicorns']);
+ console.log(stdout);
+ //=> 'unicorns'
+})();
+```
+
+Additional examples:
+
+```js
+const execa = require('execa');
+
+(async () => {
+ // Pipe the child process stdout to the current stdout
+ execa('echo', ['unicorns']).stdout.pipe(process.stdout);
+
+
+ // Run a shell command
+ const {stdout} = await execa.shell('echo unicorns');
+ //=> 'unicorns'
+
+
+ // Catching an error
+ try {
+ await execa.shell('exit 3');
+ } catch (error) {
+ console.log(error);
+ /*
+ {
+ message: 'Command failed: /bin/sh -c exit 3'
+ killed: false,
+ code: 3,
+ signal: null,
+ cmd: '/bin/sh -c exit 3',
+ stdout: '',
+ stderr: '',
+ timedOut: false
+ }
+ */
+ }
+})();
+
+// Catching an error with a sync method
+try {
+ execa.shellSync('exit 3');
+} catch (error) {
+ console.log(error);
+ /*
+ {
+ message: 'Command failed: /bin/sh -c exit 3'
+ code: 3,
+ signal: null,
+ cmd: '/bin/sh -c exit 3',
+ stdout: '',
+ stderr: '',
+ timedOut: false
+ }
+ */
+}
+```
+
+
+## API
+
+### execa(file, [arguments], [options])
+
+Execute a file.
+
+Think of this as a mix of `child_process.execFile` and `child_process.spawn`.
+
+Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties.
+
+### execa.stdout(file, [arguments], [options])
+
+Same as `execa()`, but returns only `stdout`.
+
+### execa.stderr(file, [arguments], [options])
+
+Same as `execa()`, but returns only `stderr`.
+
+### execa.shell(command, [options])
+
+Execute a command through the system shell. Prefer `execa()` whenever possible, as it's both faster and safer.
+
+Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess).
+
+The `child_process` instance is enhanced to also be promise for a result object with `stdout` and `stderr` properties.
+
+### execa.sync(file, [arguments], [options])
+
+Execute a file synchronously.
+
+Returns the same result object as [`child_process.spawnSync`](https://nodejs.org/api/child_process.html#child_process_child_process_spawnsync_command_args_options).
+
+This method throws an `Error` if the command fails.
+
+### execa.shellSync(file, [options])
+
+Execute a command synchronously through the system shell.
+
+Returns the same result object as [`child_process.spawnSync`](https://nodejs.org/api/child_process.html#child_process_child_process_spawnsync_command_args_options).
+
+### options
+
+Type: `Object`
+
+#### cwd
+
+Type: `string`
+Default: `process.cwd()`
+
+Current working directory of the child process.
+
+#### env
+
+Type: `Object`
+Default: `process.env`
+
+Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this.
+
+#### extendEnv
+
+Type: `boolean`
+Default: `true`
+
+Set to `false` if you don't want to extend the environment variables when providing the `env` property.
+
+#### argv0
+
+Type: `string`
+
+Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified.
+
+#### stdio
+
+Type: `string[]` `string`
+Default: `pipe`
+
+Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration.
+
+#### detached
+
+Type: `boolean`
+
+Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached).
+
+#### uid
+
+Type: `number`
+
+Sets the user identity of the process.
+
+#### gid
+
+Type: `number`
+
+Sets the group identity of the process.
+
+#### shell
+
+Type: `boolean` `string`
+Default: `false`
+
+If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows.
+
+#### stripEof
+
+Type: `boolean`
+Default: `true`
+
+[Strip EOF](https://github.com/sindresorhus/strip-eof) (last newline) from the output.
+
+#### preferLocal
+
+Type: `boolean`
+Default: `true`
+
+Prefer locally installed binaries when looking for a binary to execute.
+If you `$ npm install foo`, you can then `execa('foo')`.
+
+#### localDir
+
+Type: `string`
+Default: `process.cwd()`
+
+Preferred path to find locally installed binaries in (use with `preferLocal`).
+
+#### input
+
+Type: `string` `Buffer` `stream.Readable`
+
+Write some input to the `stdin` of your binary.
+Streams are not allowed when using the synchronous methods.
+
+#### reject
+
+Type: `boolean`
+Default: `true`
+
+Setting this to `false` resolves the promise with the error instead of rejecting it.
+
+#### cleanup
+
+Type: `boolean`
+Default: `true`
+
+Keep track of the spawned process and `kill` it when the parent process exits.
+
+#### encoding
+
+Type: `string`
+Default: `utf8`
+
+Specify the character encoding used to decode the `stdout` and `stderr` output.
+
+#### timeout
+
+Type: `number`
+Default: `0`
+
+If timeout is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than timeout milliseconds.
+
+#### buffer
+
+Type: `boolean`
+Default: `true`
+
+Buffer the output from the spawned process. When buffering is disabled you must consume the output of the `stdout` and `stderr` streams because the promise will not be resolved/rejected until they have completed.
+
+#### maxBuffer
+
+Type: `number`
+Default: `10000000` (10MB)
+
+Largest amount of data in bytes allowed on `stdout` or `stderr`.
+
+#### killSignal
+
+Type: `string` `number`
+Default: `SIGTERM`
+
+Signal value to be used when the spawned process will be killed.
+
+#### stdin
+
+Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe`
+
+Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
+
+#### stdout
+
+Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe`
+
+Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
+
+#### stderr
+
+Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe`
+
+Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
+
+#### windowsVerbatimArguments
+
+Type: `boolean`
+Default: `false`
+
+If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`.
+
+
+## Tips
+
+### Save and pipe output from a child process
+
+Let's say you want to show the output of a child process in real-time while also saving it to a variable.
+
+```js
+const execa = require('execa');
+const getStream = require('get-stream');
+
+const stream = execa('echo', ['foo']).stdout;
+
+stream.pipe(process.stdout);
+
+getStream(stream).then(value => {
+ console.log('child output:', value);
+});
+```
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-version/node_modules/get-stream/buffer-stream.js b/node_modules/bin-version/node_modules/get-stream/buffer-stream.js
new file mode 100644
index 00000000..4121c8e5
--- /dev/null
+++ b/node_modules/bin-version/node_modules/get-stream/buffer-stream.js
@@ -0,0 +1,51 @@
+'use strict';
+const {PassThrough} = require('stream');
+
+module.exports = options => {
+ options = Object.assign({}, options);
+
+ const {array} = options;
+ let {encoding} = options;
+ const buffer = encoding === 'buffer';
+ let objectMode = false;
+
+ if (array) {
+ objectMode = !(encoding || buffer);
+ } else {
+ encoding = encoding || 'utf8';
+ }
+
+ if (buffer) {
+ encoding = null;
+ }
+
+ let len = 0;
+ const ret = [];
+ const stream = new PassThrough({objectMode});
+
+ if (encoding) {
+ stream.setEncoding(encoding);
+ }
+
+ stream.on('data', chunk => {
+ ret.push(chunk);
+
+ if (objectMode) {
+ len = ret.length;
+ } else {
+ len += chunk.length;
+ }
+ });
+
+ stream.getBufferedValue = () => {
+ if (array) {
+ return ret;
+ }
+
+ return buffer ? Buffer.concat(ret, len) : ret.join('');
+ };
+
+ stream.getBufferedLength = () => len;
+
+ return stream;
+};
diff --git a/node_modules/bin-version/node_modules/get-stream/index.js b/node_modules/bin-version/node_modules/get-stream/index.js
new file mode 100644
index 00000000..7e5584a6
--- /dev/null
+++ b/node_modules/bin-version/node_modules/get-stream/index.js
@@ -0,0 +1,50 @@
+'use strict';
+const pump = require('pump');
+const bufferStream = require('./buffer-stream');
+
+class MaxBufferError extends Error {
+ constructor() {
+ super('maxBuffer exceeded');
+ this.name = 'MaxBufferError';
+ }
+}
+
+function getStream(inputStream, options) {
+ if (!inputStream) {
+ return Promise.reject(new Error('Expected a stream'));
+ }
+
+ options = Object.assign({maxBuffer: Infinity}, options);
+
+ const {maxBuffer} = options;
+
+ let stream;
+ return new Promise((resolve, reject) => {
+ const rejectPromise = error => {
+ if (error) { // A null check
+ error.bufferedData = stream.getBufferedValue();
+ }
+ reject(error);
+ };
+
+ stream = pump(inputStream, bufferStream(options), error => {
+ if (error) {
+ rejectPromise(error);
+ return;
+ }
+
+ resolve();
+ });
+
+ stream.on('data', () => {
+ if (stream.getBufferedLength() > maxBuffer) {
+ rejectPromise(new MaxBufferError());
+ }
+ });
+ }).then(() => stream.getBufferedValue());
+}
+
+module.exports = getStream;
+module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'}));
+module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true}));
+module.exports.MaxBufferError = MaxBufferError;
diff --git a/node_modules/bin-version/node_modules/get-stream/license b/node_modules/bin-version/node_modules/get-stream/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-version/node_modules/get-stream/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-version/node_modules/get-stream/package.json b/node_modules/bin-version/node_modules/get-stream/package.json
new file mode 100644
index 00000000..619651cd
--- /dev/null
+++ b/node_modules/bin-version/node_modules/get-stream/package.json
@@ -0,0 +1,46 @@
+{
+ "name": "get-stream",
+ "version": "4.1.0",
+ "description": "Get a stream as a string, buffer, or array",
+ "license": "MIT",
+ "repository": "sindresorhus/get-stream",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js",
+ "buffer-stream.js"
+ ],
+ "keywords": [
+ "get",
+ "stream",
+ "promise",
+ "concat",
+ "string",
+ "text",
+ "buffer",
+ "read",
+ "data",
+ "consume",
+ "readable",
+ "readablestream",
+ "array",
+ "object"
+ ],
+ "dependencies": {
+ "pump": "^3.0.0"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "into-stream": "^3.0.0",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-version/node_modules/get-stream/readme.md b/node_modules/bin-version/node_modules/get-stream/readme.md
new file mode 100644
index 00000000..b87a4d37
--- /dev/null
+++ b/node_modules/bin-version/node_modules/get-stream/readme.md
@@ -0,0 +1,123 @@
+# get-stream [](https://travis-ci.org/sindresorhus/get-stream)
+
+> Get a stream as a string, buffer, or array
+
+
+## Install
+
+```
+$ npm install get-stream
+```
+
+
+## Usage
+
+```js
+const fs = require('fs');
+const getStream = require('get-stream');
+
+(async () => {
+ const stream = fs.createReadStream('unicorn.txt');
+
+ console.log(await getStream(stream));
+ /*
+ ,,))))))));,
+ __)))))))))))))),
+ \|/ -\(((((''''((((((((.
+ -*-==//////(('' . `)))))),
+ /|\ ))| o ;-. '((((( ,(,
+ ( `| / ) ;))))' ,_))^;(~
+ | | | ,))((((_ _____------~~~-. %,;(;(>';'~
+ o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
+ ; ''''```` `: `:::|\,__,%% );`'; ~
+ | _ ) / `:|`----' `-'
+ ______/\/~ | / /
+ /~;;.____/;;' / ___--,-( `;;;/
+ / // _;______;'------~~~~~ /;;/\ /
+ // | | / ; \;;,\
+ (<_ | ; /',/-----' _>
+ \_| ||_ //~;~~~~~~~~~
+ `\_| (,~~
+ \~\
+ ~~
+ */
+})();
+```
+
+
+## API
+
+The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode.
+
+### getStream(stream, [options])
+
+Get the `stream` as a string.
+
+#### options
+
+Type: `Object`
+
+##### encoding
+
+Type: `string`
+Default: `utf8`
+
+[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream.
+
+##### maxBuffer
+
+Type: `number`
+Default: `Infinity`
+
+Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error.
+
+### getStream.buffer(stream, [options])
+
+Get the `stream` as a buffer.
+
+It honors the `maxBuffer` option as above, but it refers to byte length rather than string length.
+
+### getStream.array(stream, [options])
+
+Get the `stream` as an array of values.
+
+It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen:
+
+- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes).
+
+- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array.
+
+- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array.
+
+
+## Errors
+
+If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error.
+
+```js
+(async () => {
+ try {
+ await getStream(streamThatErrorsAtTheEnd('unicorn'));
+ } catch (error) {
+ console.log(error.bufferedData);
+ //=> 'unicorn'
+ }
+})()
+```
+
+
+## FAQ
+
+### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)?
+
+This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package.
+
+
+## Related
+
+- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-version/package.json b/node_modules/bin-version/package.json
new file mode 100644
index 00000000..b3b6d09c
--- /dev/null
+++ b/node_modules/bin-version/package.json
@@ -0,0 +1,40 @@
+{
+ "name": "bin-version",
+ "version": "3.1.0",
+ "description": "Get the version of a binary in semver format",
+ "license": "MIT",
+ "repository": "sindresorhus/bin-version",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts"
+ ],
+ "keywords": [
+ "bin",
+ "binary",
+ "executable",
+ "version",
+ "semver",
+ "semantic",
+ "cli"
+ ],
+ "dependencies": {
+ "execa": "^1.0.0",
+ "find-versions": "^3.0.0"
+ },
+ "devDependencies": {
+ "ava": "^1.4.1",
+ "tsd": "^0.7.2",
+ "xo": "^0.24.0"
+ }
+}
diff --git a/node_modules/bin-version/readme.md b/node_modules/bin-version/readme.md
new file mode 100644
index 00000000..4152faab
--- /dev/null
+++ b/node_modules/bin-version/readme.md
@@ -0,0 +1,72 @@
+# bin-version [](https://travis-ci.com/sindresorhus/bin-version)
+
+> Get the version of a binary in [semver](https://github.com/npm/node-semver) format
+
+
+## Install
+
+```
+$ npm install bin-version
+```
+
+
+## Usage
+
+```
+$ curl --version
+curl 7.30.0 (x86_64-apple-darwin13.0)
+```
+
+```js
+const binVersion = require('bin-version');
+
+(async () => {
+ console.log(await binVersion('curl'));
+ //=> '7.30.0'
+})();
+```
+
+```
+$ openssl version
+OpenSSL 1.0.2d 9 Jul 2015
+```
+
+```js
+(async () => {
+ console.log(await binVersion('openssl', {args: ['version']}));
+ //=> '1.0.2'
+})();
+```
+
+## API
+
+### binVersion(binary, [options])
+
+Returns a `Promise` with the version of the `binary`.
+
+#### binary
+
+Type: `string`
+
+The name of or path to the binary to get the version from.
+
+#### options
+
+Type: `object`
+
+##### args
+
+Type: `string[]`
+Default: `['--version']`
+
+The arguments to pass to `binary` so that it will print its version.
+
+## Related
+
+- [bin-version-cli](https://github.com/sindresorhus/bin-version-cli) - CLI for this module
+- [find-versions](https://github.com/sindresorhus/find-versions) - Find semver versions in a string
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/index.js b/node_modules/bin-wrapper/index.js
new file mode 100644
index 00000000..1aba001f
--- /dev/null
+++ b/node_modules/bin-wrapper/index.js
@@ -0,0 +1,208 @@
+'use strict';
+const fs = require('fs');
+const path = require('path');
+const url = require('url');
+const pify = require('pify');
+const importLazy = require('import-lazy')(require);
+
+const binCheck = importLazy('bin-check');
+const binVersionCheck = importLazy('bin-version-check');
+const download = importLazy('download');
+const osFilterObj = importLazy('os-filter-obj');
+
+const statAsync = pify(fs.stat);
+const chmodAsync = pify(fs.chmod);
+
+/**
+ * Initialize a new `BinWrapper`
+ *
+ * @param {Object} options
+ * @api public
+ */
+module.exports = class BinWrapper {
+ constructor(options = {}) {
+ this.options = options;
+
+ if (this.options.strip <= 0) {
+ this.options.strip = 0;
+ } else if (!this.options.strip) {
+ this.options.strip = 1;
+ }
+ }
+
+ /**
+ * Get or set files to download
+ *
+ * @param {String} src
+ * @param {String} os
+ * @param {String} arch
+ * @api public
+ */
+ src(src, os, arch) {
+ if (arguments.length === 0) {
+ return this._src;
+ }
+
+ this._src = this._src || [];
+ this._src.push({
+ url: src,
+ os,
+ arch
+ });
+
+ return this;
+ }
+
+ /**
+ * Get or set the destination
+ *
+ * @param {String} dest
+ * @api public
+ */
+ dest(dest) {
+ if (arguments.length === 0) {
+ return this._dest;
+ }
+
+ this._dest = dest;
+ return this;
+ }
+
+ /**
+ * Get or set the binary
+ *
+ * @param {String} bin
+ * @api public
+ */
+ use(bin) {
+ if (arguments.length === 0) {
+ return this._use;
+ }
+
+ this._use = bin;
+ return this;
+ }
+
+ /**
+ * Get or set a semver range to test the binary against
+ *
+ * @param {String} range
+ * @api public
+ */
+ version(range) {
+ if (arguments.length === 0) {
+ return this._version;
+ }
+
+ this._version = range;
+ return this;
+ }
+
+ /**
+ * Get path to the binary
+ *
+ * @api public
+ */
+ path() {
+ return path.join(this.dest(), this.use());
+ }
+
+ /**
+ * Run
+ *
+ * @param {Array} cmd
+ * @api public
+ */
+ run(cmd = ['--version']) {
+ return this.findExisting().then(() => {
+ if (this.options.skipCheck) {
+ return;
+ }
+
+ return this.runCheck(cmd);
+ });
+ }
+
+ /**
+ * Run binary check
+ *
+ * @param {Array} cmd
+ * @api private
+ */
+ runCheck(cmd) {
+ return binCheck(this.path(), cmd).then(works => {
+ if (!works) {
+ throw new Error(`The \`${this.path()}\` binary doesn't seem to work correctly`);
+ }
+
+ if (this.version()) {
+ return binVersionCheck(this.path(), this.version());
+ }
+
+ return Promise.resolve();
+ });
+ }
+
+ /**
+ * Find existing files
+ *
+ * @api private
+ */
+ findExisting() {
+ return statAsync(this.path()).catch(error => {
+ if (error && error.code === 'ENOENT') {
+ return this.download();
+ }
+
+ return Promise.reject(error);
+ });
+ }
+
+ /**
+ * Download files
+ *
+ * @api private
+ */
+ download() {
+ const files = osFilterObj(this.src() || []);
+ const urls = [];
+
+ if (files.length === 0) {
+ return Promise.reject(new Error('No binary found matching your system. It\'s probably not supported.'));
+ }
+
+ files.forEach(file => urls.push(file.url));
+
+ return Promise.all(urls.map(url => download(url, this.dest(), {
+ extract: true,
+ strip: this.options.strip
+ }))).then(result => {
+ const resultingFiles = flatten(result.map((item, index) => {
+ if (Array.isArray(item)) {
+ return item.map(file => file.path);
+ }
+
+ const parsedUrl = url.parse(files[index].url);
+ const parsedPath = path.parse(parsedUrl.pathname);
+
+ return parsedPath.base;
+ }));
+
+ return Promise.all(resultingFiles.map(fileName => {
+ return chmodAsync(path.join(this.dest(), fileName), 0o755);
+ }));
+ });
+ }
+};
+
+function flatten(arr) {
+ return arr.reduce((acc, elem) => {
+ if (Array.isArray(elem)) {
+ acc.push(...elem);
+ } else {
+ acc.push(elem);
+ }
+
+ return acc;
+ }, []);
+}
diff --git a/node_modules/bin-wrapper/license b/node_modules/bin-wrapper/license
new file mode 100644
index 00000000..1c7c9750
--- /dev/null
+++ b/node_modules/bin-wrapper/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Kevin Mårtensson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/download/index.js b/node_modules/bin-wrapper/node_modules/download/index.js
new file mode 100644
index 00000000..342ff476
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/download/index.js
@@ -0,0 +1,119 @@
+'use strict';
+const fs = require('fs');
+const path = require('path');
+const url = require('url');
+const caw = require('caw');
+const contentDisposition = require('content-disposition');
+const archiveType = require('archive-type');
+const decompress = require('decompress');
+const filenamify = require('filenamify');
+const getStream = require('get-stream');
+const got = require('got');
+const makeDir = require('make-dir');
+const pify = require('pify');
+const pEvent = require('p-event');
+const fileType = require('file-type');
+const extName = require('ext-name');
+
+const fsP = pify(fs);
+const filenameFromPath = res => path.basename(url.parse(res.requestUrl).pathname);
+
+const getExtFromMime = res => {
+ const header = res.headers['content-type'];
+
+ if (!header) {
+ return null;
+ }
+
+ const exts = extName.mime(header);
+
+ if (exts.length !== 1) {
+ return null;
+ }
+
+ return exts[0].ext;
+};
+
+const getFilename = (res, data) => {
+ const header = res.headers['content-disposition'];
+
+ if (header) {
+ const parsed = contentDisposition.parse(header);
+
+ if (parsed.parameters && parsed.parameters.filename) {
+ return parsed.parameters.filename;
+ }
+ }
+
+ let filename = filenameFromPath(res);
+
+ if (!path.extname(filename)) {
+ const ext = (fileType(data) || {}).ext || getExtFromMime(res);
+
+ if (ext) {
+ filename = `${filename}.${ext}`;
+ }
+ }
+
+ return filename;
+};
+
+const getProtocolFromUri = uri => {
+ let {protocol} = url.parse(uri);
+
+ if (protocol) {
+ protocol = protocol.slice(0, -1);
+ }
+
+ return protocol;
+};
+
+module.exports = (uri, output, opts) => {
+ if (typeof output === 'object') {
+ opts = output;
+ output = null;
+ }
+
+ const protocol = getProtocolFromUri(uri);
+
+ opts = Object.assign({
+ encoding: null,
+ rejectUnauthorized: process.env.npm_config_strict_ssl !== 'false'
+ }, opts);
+
+ const agent = caw(opts.proxy, {protocol});
+ const stream = got.stream(uri, Object.assign({agent}, opts))
+ .on('redirect', (response, nextOptions) => {
+ const redirectProtocol = getProtocolFromUri(nextOptions.href);
+ if (redirectProtocol && redirectProtocol !== protocol) {
+ nextOptions.agent = caw(opts.proxy, {protocol: redirectProtocol});
+ }
+ });
+
+ const promise = pEvent(stream, 'response').then(res => {
+ const encoding = opts.encoding === null ? 'buffer' : opts.encoding;
+ return Promise.all([getStream(stream, {encoding}), res]);
+ }).then(result => {
+ const [data, res] = result;
+
+ if (!output) {
+ return opts.extract && archiveType(data) ? decompress(data, opts) : data;
+ }
+
+ const filename = opts.filename || filenamify(getFilename(res, data));
+ const outputFilepath = path.join(output, filename);
+
+ if (opts.extract && archiveType(data)) {
+ return decompress(data, path.dirname(outputFilepath), opts);
+ }
+
+ return makeDir(path.dirname(outputFilepath))
+ .then(() => fsP.writeFile(outputFilepath, data))
+ .then(() => data);
+ });
+
+ stream.then = promise.then.bind(promise);
+ stream.catch = promise.catch.bind(promise);
+
+ return stream;
+};
diff --git a/node_modules/bin-wrapper/node_modules/download/license b/node_modules/bin-wrapper/node_modules/download/license
new file mode 100644
index 00000000..db6bc32c
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/download/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Kevin Mårtensson (github.com/kevva)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/download/node_modules/pify/index.js b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/index.js
new file mode 100644
index 00000000..1dee43ad
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/index.js
@@ -0,0 +1,84 @@
+'use strict';
+
+const processFn = (fn, opts) => function () {
+ const P = opts.promiseModule;
+ const args = new Array(arguments.length);
+
+ for (let i = 0; i < arguments.length; i++) {
+ args[i] = arguments[i];
+ }
+
+ return new P((resolve, reject) => {
+ if (opts.errorFirst) {
+ args.push(function (err, result) {
+ if (opts.multiArgs) {
+ const results = new Array(arguments.length - 1);
+
+ for (let i = 1; i < arguments.length; i++) {
+ results[i - 1] = arguments[i];
+ }
+
+ if (err) {
+ results.unshift(err);
+ reject(results);
+ } else {
+ resolve(results);
+ }
+ } else if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ });
+ } else {
+ args.push(function (result) {
+ if (opts.multiArgs) {
+ const results = new Array(arguments.length - 1);
+
+ for (let i = 0; i < arguments.length; i++) {
+ results[i] = arguments[i];
+ }
+
+ resolve(results);
+ } else {
+ resolve(result);
+ }
+ });
+ }
+
+ fn.apply(this, args);
+ });
+};
+
+module.exports = (obj, opts) => {
+ opts = Object.assign({
+ exclude: [/.+(Sync|Stream)$/],
+ errorFirst: true,
+ promiseModule: Promise
+ }, opts);
+
+ const filter = key => {
+ const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key);
+ return opts.include ? opts.include.some(match) : !opts.exclude.some(match);
+ };
+
+ let ret;
+ if (typeof obj === 'function') {
+ ret = function () {
+ if (opts.excludeMain) {
+ return obj.apply(this, arguments);
+ }
+
+ return processFn(obj, opts).apply(this, arguments);
+ };
+ } else {
+ ret = Object.create(Object.getPrototypeOf(obj));
+ }
+
+ for (const key in obj) { // eslint-disable-line guard-for-in
+ const x = obj[key];
+ ret[key] = typeof x === 'function' && filter(key) ? processFn(x, opts) : x;
+ }
+
+ return ret;
+};
diff --git a/node_modules/bin-wrapper/node_modules/download/node_modules/pify/license b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/download/node_modules/pify/package.json b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/package.json
new file mode 100644
index 00000000..468d8576
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/package.json
@@ -0,0 +1,51 @@
+{
+ "name": "pify",
+ "version": "3.0.0",
+ "description": "Promisify a callback-style function",
+ "license": "MIT",
+ "repository": "sindresorhus/pify",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava && npm run optimization-test",
+ "optimization-test": "node --allow-natives-syntax optimization-test.js"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "promise",
+ "promises",
+ "promisify",
+ "all",
+ "denodify",
+ "denodeify",
+ "callback",
+ "cb",
+ "node",
+ "then",
+ "thenify",
+ "convert",
+ "transform",
+ "wrap",
+ "wrapper",
+ "bind",
+ "to",
+ "async",
+ "await",
+ "es2015",
+ "bluebird"
+ ],
+ "devDependencies": {
+ "ava": "*",
+ "pinkie-promise": "^2.0.0",
+ "v8-natives": "^1.0.0",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/download/node_modules/pify/readme.md b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/readme.md
new file mode 100644
index 00000000..376ca4e5
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/readme.md
@@ -0,0 +1,131 @@
+# pify [](https://travis-ci.org/sindresorhus/pify)
+
+> Promisify a callback-style function
+
+
+## Install
+
+```
+$ npm install --save pify
+```
+
+
+## Usage
+
+```js
+const fs = require('fs');
+const pify = require('pify');
+
+// Promisify a single function
+pify(fs.readFile)('package.json', 'utf8').then(data => {
+ console.log(JSON.parse(data).name);
+ //=> 'pify'
+});
+
+// Promisify all methods in a module
+pify(fs).readFile('package.json', 'utf8').then(data => {
+ console.log(JSON.parse(data).name);
+ //=> 'pify'
+});
+```
+
+
+## API
+
+### pify(input, [options])
+
+Returns a `Promise` wrapped version of the supplied function or module.
+
+#### input
+
+Type: `Function` `Object`
+
+Callback-style function or module whose methods you want to promisify.
+
+#### options
+
+##### multiArgs
+
+Type: `boolean`
+Default: `false`
+
+By default, the promisified function will only return the second argument from the callback, which works fine for most APIs. This option can be useful for modules like `request` that return multiple arguments. Turning this on will make it return an array of all arguments from the callback, excluding the error argument, instead of just the second argument. This also applies to rejections, where it returns an array of all the callback arguments, including the error.
+
+```js
+const request = require('request');
+const pify = require('pify');
+
+pify(request, {multiArgs: true})('https://sindresorhus.com').then(result => {
+ const [httpResponse, body] = result;
+});
+```
+
+##### include
+
+Type: `string[]` `RegExp[]`
+
+Methods in a module to promisify. Remaining methods will be left untouched.
+
+##### exclude
+
+Type: `string[]` `RegExp[]`
+Default: `[/.+(Sync|Stream)$/]`
+
+Methods in a module **not** to promisify. Methods with names ending with `'Sync'` are excluded by default.
+
+##### excludeMain
+
+Type: `boolean`
+Default: `false`
+
+If given module is a function itself, it will be promisified. Turn this option on if you want to promisify only methods of the module.
+
+```js
+const pify = require('pify');
+
+function fn() {
+ return true;
+}
+
+fn.method = (data, callback) => {
+ setImmediate(() => {
+ callback(null, data);
+ });
+};
+
+// Promisify methods but not `fn()`
+const promiseFn = pify(fn, {excludeMain: true});
+
+if (promiseFn()) {
+ promiseFn.method('hi').then(data => {
+ console.log(data);
+ });
+}
+```
+
+##### errorFirst
+
+Type: `boolean`
+Default: `true`
+
+Whether the callback has an error as the first argument. You'll want to set this to `false` if you're dealing with an API that doesn't have an error as the first argument, like `fs.exists()`, some browser APIs, Chrome Extension APIs, etc.
+
+##### promiseModule
+
+Type: `Function`
+
+Custom promise module to use instead of the native one.
+
+Check out [`pinkie-promise`](https://github.com/floatdrop/pinkie-promise) if you need a tiny promise polyfill.
+
+
+## Related
+
+- [p-event](https://github.com/sindresorhus/p-event) - Promisify an event by waiting for it to be emitted
+- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently
+- [More…](https://github.com/sindresorhus/promise-fun)
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/node_modules/download/package.json b/node_modules/bin-wrapper/node_modules/download/package.json
new file mode 100644
index 00000000..0b45cf6e
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/download/package.json
@@ -0,0 +1,51 @@
+{
+ "name": "download",
+ "version": "7.1.0",
+ "description": "Download and extract files",
+ "license": "MIT",
+ "repository": "kevva/download",
+ "author": {
+ "email": "kevinmartensson@gmail.com",
+ "name": "Kevin Mårtensson",
+ "url": "github.com/kevva"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "download",
+ "extract",
+ "http",
+ "request",
+ "url"
+ ],
+ "dependencies": {
+ "archive-type": "^4.0.0",
+ "caw": "^2.0.1",
+ "content-disposition": "^0.5.2",
+ "decompress": "^4.2.0",
+ "ext-name": "^5.0.0",
+ "file-type": "^8.1.0",
+ "filenamify": "^2.0.0",
+ "get-stream": "^3.0.0",
+ "got": "^8.3.1",
+ "make-dir": "^1.2.0",
+ "p-event": "^2.1.0",
+ "pify": "^3.0.0"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "is-zip": "^1.0.0",
+ "nock": "^9.2.5",
+ "path-exists": "^3.0.0",
+ "random-buffer": "^0.1.0",
+ "rimraf": "^2.6.2",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/download/readme.md b/node_modules/bin-wrapper/node_modules/download/readme.md
new file mode 100644
index 00000000..dfb3fcf2
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/download/readme.md
@@ -0,0 +1,86 @@
+# download [](https://travis-ci.org/kevva/download)
+
+> Download and extract files
+
+*See [download-cli](https://github.com/kevva/download-cli) for the command-line version.*
+
+
+## Install
+
+```
+$ npm install download
+```
+
+
+## Usage
+
+```js
+const fs = require('fs');
+const download = require('download');
+
+download('http://unicorn.com/foo.jpg', 'dist').then(() => {
+ console.log('done!');
+});
+
+download('http://unicorn.com/foo.jpg').then(data => {
+ fs.writeFileSync('dist/foo.jpg', data);
+});
+
+download('unicorn.com/foo.jpg').pipe(fs.createWriteStream('dist/foo.jpg'));
+
+Promise.all([
+ 'unicorn.com/foo.jpg',
+ 'cats.com/dancing.gif'
+].map(x => download(x, 'dist'))).then(() => {
+ console.log('files downloaded!');
+});
+```
+
+
+## API
+
+### download(url, [destination], [options])
+
+Returns both a `Promise` and a [Duplex stream](https://nodejs.org/api/stream.html#stream_class_stream_duplex) with [additional events](https://github.com/sindresorhus/got#streams-1).
+
+#### url
+
+Type: `string`
+
+URL to download.
+
+#### destination
+
+Type: `string`
+
+Path to where your file will be written.
+
+#### options
+
+Type: `Object`
+
+Same options as [`got`](https://github.com/sindresorhus/got#options) and [`decompress`](https://github.com/kevva/decompress#options) in addition to the ones below.
+
+##### extract
+
+Type: `boolean`
+Default: `false`
+
+If set to `true`, try extracting the file using [`decompress`](https://github.com/kevva/decompress).
+
+##### filename
+
+Type: `string`
+
+Name of the saved file.
+
+##### proxy
+
+Type: `string`
+
+Proxy endpoint.
+
+
+## License
+
+MIT © [Kevin Mårtensson](https://github.com/kevva)
diff --git a/node_modules/bin-wrapper/node_modules/file-type/index.js b/node_modules/bin-wrapper/node_modules/file-type/index.js
new file mode 100644
index 00000000..669fb558
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/file-type/index.js
@@ -0,0 +1,809 @@
+'use strict';
+const toBytes = s => [...s].map(c => c.charCodeAt(0));
+const xpiZipFilename = toBytes('META-INF/mozilla.rsa');
+const oxmlContentTypes = toBytes('[Content_Types].xml');
+const oxmlRels = toBytes('_rels/.rels');
+
+module.exports = input => {
+ const buf = input instanceof Uint8Array ? input : new Uint8Array(input);
+
+ if (!(buf && buf.length > 1)) {
+ return null;
+ }
+
+ const check = (header, options) => {
+ options = Object.assign({
+ offset: 0
+ }, options);
+
+ for (let i = 0; i < header.length; i++) {
+ // If a bitmask is set
+ if (options.mask) {
+ // If header doesn't equal `buf` with bits masked off
+ if (header[i] !== (options.mask[i] & buf[i + options.offset])) {
+ return false;
+ }
+ } else if (header[i] !== buf[i + options.offset]) {
+ return false;
+ }
+ }
+
+ return true;
+ };
+
+ const checkString = (header, options) => check(toBytes(header), options);
+
+ if (check([0xFF, 0xD8, 0xFF])) {
+ return {
+ ext: 'jpg',
+ mime: 'image/jpeg'
+ };
+ }
+
+ if (check([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A])) {
+ return {
+ ext: 'png',
+ mime: 'image/png'
+ };
+ }
+
+ if (check([0x47, 0x49, 0x46])) {
+ return {
+ ext: 'gif',
+ mime: 'image/gif'
+ };
+ }
+
+ if (check([0x57, 0x45, 0x42, 0x50], {offset: 8})) {
+ return {
+ ext: 'webp',
+ mime: 'image/webp'
+ };
+ }
+
+ if (check([0x46, 0x4C, 0x49, 0x46])) {
+ return {
+ ext: 'flif',
+ mime: 'image/flif'
+ };
+ }
+
+ // Needs to be before `tif` check
+ if (
+ (check([0x49, 0x49, 0x2A, 0x0]) || check([0x4D, 0x4D, 0x0, 0x2A])) &&
+ check([0x43, 0x52], {offset: 8})
+ ) {
+ return {
+ ext: 'cr2',
+ mime: 'image/x-canon-cr2'
+ };
+ }
+
+ if (
+ check([0x49, 0x49, 0x2A, 0x0]) ||
+ check([0x4D, 0x4D, 0x0, 0x2A])
+ ) {
+ return {
+ ext: 'tif',
+ mime: 'image/tiff'
+ };
+ }
+
+ if (check([0x42, 0x4D])) {
+ return {
+ ext: 'bmp',
+ mime: 'image/bmp'
+ };
+ }
+
+ if (check([0x49, 0x49, 0xBC])) {
+ return {
+ ext: 'jxr',
+ mime: 'image/vnd.ms-photo'
+ };
+ }
+
+ if (check([0x38, 0x42, 0x50, 0x53])) {
+ return {
+ ext: 'psd',
+ mime: 'image/vnd.adobe.photoshop'
+ };
+ }
+
+ // Zip-based file formats
+ // Need to be before the `zip` check
+ if (check([0x50, 0x4B, 0x3, 0x4])) {
+ if (
+ check([0x6D, 0x69, 0x6D, 0x65, 0x74, 0x79, 0x70, 0x65, 0x61, 0x70, 0x70, 0x6C, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6F, 0x6E, 0x2F, 0x65, 0x70, 0x75, 0x62, 0x2B, 0x7A, 0x69, 0x70], {offset: 30})
+ ) {
+ return {
+ ext: 'epub',
+ mime: 'application/epub+zip'
+ };
+ }
+
+ // Assumes signed `.xpi` from addons.mozilla.org
+ if (check(xpiZipFilename, {offset: 30})) {
+ return {
+ ext: 'xpi',
+ mime: 'application/x-xpinstall'
+ };
+ }
+
+ if (checkString('mimetypeapplication/vnd.oasis.opendocument.text', {offset: 30})) {
+ return {
+ ext: 'odt',
+ mime: 'application/vnd.oasis.opendocument.text'
+ };
+ }
+
+ if (checkString('mimetypeapplication/vnd.oasis.opendocument.spreadsheet', {offset: 30})) {
+ return {
+ ext: 'ods',
+ mime: 'application/vnd.oasis.opendocument.spreadsheet'
+ };
+ }
+
+ if (checkString('mimetypeapplication/vnd.oasis.opendocument.presentation', {offset: 30})) {
+ return {
+ ext: 'odp',
+ mime: 'application/vnd.oasis.opendocument.presentation'
+ };
+ }
+
+ // https://github.com/file/file/blob/master/magic/Magdir/msooxml
+ if (check(oxmlContentTypes, {offset: 30}) || check(oxmlRels, {offset: 30})) {
+ const sliced = buf.subarray(4, 4 + 2000);
+ const nextZipHeaderIndex = arr => arr.findIndex((el, i, arr) => arr[i] === 0x50 && arr[i + 1] === 0x4B && arr[i + 2] === 0x3 && arr[i + 3] === 0x4);
+ const header2Pos = nextZipHeaderIndex(sliced);
+
+ if (header2Pos !== -1) {
+ const slicedAgain = buf.subarray(header2Pos + 8, header2Pos + 8 + 1000);
+ const header3Pos = nextZipHeaderIndex(slicedAgain);
+
+ if (header3Pos !== -1) {
+ const offset = 8 + header2Pos + header3Pos + 30;
+
+ if (checkString('word/', {offset})) {
+ return {
+ ext: 'docx',
+ mime: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
+ };
+ }
+
+ if (checkString('ppt/', {offset})) {
+ return {
+ ext: 'pptx',
+ mime: 'application/vnd.openxmlformats-officedocument.presentationml.presentation'
+ };
+ }
+
+ if (checkString('xl/', {offset})) {
+ return {
+ ext: 'xlsx',
+ mime: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
+ };
+ }
+ }
+ }
+ }
+ }
+
+ if (
+ check([0x50, 0x4B]) &&
+ (buf[2] === 0x3 || buf[2] === 0x5 || buf[2] === 0x7) &&
+ (buf[3] === 0x4 || buf[3] === 0x6 || buf[3] === 0x8)
+ ) {
+ return {
+ ext: 'zip',
+ mime: 'application/zip'
+ };
+ }
+
+ if (check([0x75, 0x73, 0x74, 0x61, 0x72], {offset: 257})) {
+ return {
+ ext: 'tar',
+ mime: 'application/x-tar'
+ };
+ }
+
+ if (
+ check([0x52, 0x61, 0x72, 0x21, 0x1A, 0x7]) &&
+ (buf[6] === 0x0 || buf[6] === 0x1)
+ ) {
+ return {
+ ext: 'rar',
+ mime: 'application/x-rar-compressed'
+ };
+ }
+
+ if (check([0x1F, 0x8B, 0x8])) {
+ return {
+ ext: 'gz',
+ mime: 'application/gzip'
+ };
+ }
+
+ if (check([0x42, 0x5A, 0x68])) {
+ return {
+ ext: 'bz2',
+ mime: 'application/x-bzip2'
+ };
+ }
+
+ if (check([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])) {
+ return {
+ ext: '7z',
+ mime: 'application/x-7z-compressed'
+ };
+ }
+
+ if (check([0x78, 0x01])) {
+ return {
+ ext: 'dmg',
+ mime: 'application/x-apple-diskimage'
+ };
+ }
+
+ if (check([0x33, 0x67, 0x70, 0x35]) || // 3gp5
+ (
+ check([0x0, 0x0, 0x0]) && check([0x66, 0x74, 0x79, 0x70], {offset: 4}) &&
+ (
+ check([0x6D, 0x70, 0x34, 0x31], {offset: 8}) || // MP41
+ check([0x6D, 0x70, 0x34, 0x32], {offset: 8}) || // MP42
+ check([0x69, 0x73, 0x6F, 0x6D], {offset: 8}) || // ISOM
+ check([0x69, 0x73, 0x6F, 0x32], {offset: 8}) || // ISO2
+ check([0x6D, 0x6D, 0x70, 0x34], {offset: 8}) || // MMP4
+ check([0x4D, 0x34, 0x56], {offset: 8}) || // M4V
+ check([0x64, 0x61, 0x73, 0x68], {offset: 8}) // DASH
+ )
+ )) {
+ return {
+ ext: 'mp4',
+ mime: 'video/mp4'
+ };
+ }
+
+ if (check([0x4D, 0x54, 0x68, 0x64])) {
+ return {
+ ext: 'mid',
+ mime: 'audio/midi'
+ };
+ }
+
+ // https://github.com/threatstack/libmagic/blob/master/magic/Magdir/matroska
+ if (check([0x1A, 0x45, 0xDF, 0xA3])) {
+ const sliced = buf.subarray(4, 4 + 4096);
+ const idPos = sliced.findIndex((el, i, arr) => arr[i] === 0x42 && arr[i + 1] === 0x82);
+
+ if (idPos !== -1) {
+ const docTypePos = idPos + 3;
+ const findDocType = type => [...type].every((c, i) => sliced[docTypePos + i] === c.charCodeAt(0));
+
+ if (findDocType('matroska')) {
+ return {
+ ext: 'mkv',
+ mime: 'video/x-matroska'
+ };
+ }
+
+ if (findDocType('webm')) {
+ return {
+ ext: 'webm',
+ mime: 'video/webm'
+ };
+ }
+ }
+ }
+
+ if (check([0x0, 0x0, 0x0, 0x14, 0x66, 0x74, 0x79, 0x70, 0x71, 0x74, 0x20, 0x20]) ||
+ check([0x66, 0x72, 0x65, 0x65], {offset: 4}) ||
+ check([0x66, 0x74, 0x79, 0x70, 0x71, 0x74, 0x20, 0x20], {offset: 4}) ||
+ check([0x6D, 0x64, 0x61, 0x74], {offset: 4}) || // MJPEG
+ check([0x77, 0x69, 0x64, 0x65], {offset: 4})) {
+ return {
+ ext: 'mov',
+ mime: 'video/quicktime'
+ };
+ }
+
+ // RIFF file format which might be AVI, WAV, QCP, etc
+ if (check([0x52, 0x49, 0x46, 0x46])) {
+ if (check([0x41, 0x56, 0x49], {offset: 8})) {
+ return {
+ ext: 'avi',
+ mime: 'video/x-msvideo'
+ };
+ }
+ if (check([0x57, 0x41, 0x56, 0x45], {offset: 8})) {
+ return {
+ ext: 'wav',
+ mime: 'audio/x-wav'
+ };
+ }
+ // QLCM, QCP file
+ if (check([0x51, 0x4C, 0x43, 0x4D], {offset: 8})) {
+ return {
+ ext: 'qcp',
+ mime: 'audio/qcelp'
+ };
+ }
+ }
+
+ if (check([0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9])) {
+ return {
+ ext: 'wmv',
+ mime: 'video/x-ms-wmv'
+ };
+ }
+
+ if (
+ check([0x0, 0x0, 0x1, 0xBA]) ||
+ check([0x0, 0x0, 0x1, 0xB3])
+ ) {
+ return {
+ ext: 'mpg',
+ mime: 'video/mpeg'
+ };
+ }
+
+ if (check([0x66, 0x74, 0x79, 0x70, 0x33, 0x67], {offset: 4})) {
+ return {
+ ext: '3gp',
+ mime: 'video/3gpp'
+ };
+ }
+
+ // Check for MPEG header at different starting offsets
+ for (let start = 0; start < 2 && start < (buf.length - 16); start++) {
+ if (
+ check([0x49, 0x44, 0x33], {offset: start}) || // ID3 header
+ check([0xFF, 0xE2], {offset: start, mask: [0xFF, 0xE2]}) // MPEG 1 or 2 Layer 3 header
+ ) {
+ return {
+ ext: 'mp3',
+ mime: 'audio/mpeg'
+ };
+ }
+
+ if (
+ check([0xFF, 0xE4], {offset: start, mask: [0xFF, 0xE4]}) // MPEG 1 or 2 Layer 2 header
+ ) {
+ return {
+ ext: 'mp2',
+ mime: 'audio/mpeg'
+ };
+ }
+
+ if (
+ check([0xFF, 0xF8], {offset: start, mask: [0xFF, 0xFC]}) // MPEG 2 layer 0 using ADTS
+ ) {
+ return {
+ ext: 'mp2',
+ mime: 'audio/mpeg'
+ };
+ }
+
+ if (
+ check([0xFF, 0xF0], {offset: start, mask: [0xFF, 0xFC]}) // MPEG 4 layer 0 using ADTS
+ ) {
+ return {
+ ext: 'mp4',
+ mime: 'audio/mpeg'
+ };
+ }
+ }
+
+ if (
+ check([0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x41], {offset: 4}) ||
+ check([0x4D, 0x34, 0x41, 0x20])
+ ) {
+ return {
+ ext: 'm4a',
+ mime: 'audio/m4a'
+ };
+ }
+
+ // Needs to be before `ogg` check
+ if (check([0x4F, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64], {offset: 28})) {
+ return {
+ ext: 'opus',
+ mime: 'audio/opus'
+ };
+ }
+
+ // If 'OggS' in first bytes, then OGG container
+ if (check([0x4F, 0x67, 0x67, 0x53])) {
+ // This is a OGG container
+
+ // If ' theora' in header.
+ if (check([0x80, 0x74, 0x68, 0x65, 0x6F, 0x72, 0x61], {offset: 28})) {
+ return {
+ ext: 'ogv',
+ mime: 'video/ogg'
+ };
+ }
+ // If '\x01video' in header.
+ if (check([0x01, 0x76, 0x69, 0x64, 0x65, 0x6F, 0x00], {offset: 28})) {
+ return {
+ ext: 'ogm',
+ mime: 'video/ogg'
+ };
+ }
+ // If ' FLAC' in header https://xiph.org/flac/faq.html
+ if (check([0x7F, 0x46, 0x4C, 0x41, 0x43], {offset: 28})) {
+ return {
+ ext: 'oga',
+ mime: 'audio/ogg'
+ };
+ }
+
+ // 'Speex ' in header https://en.wikipedia.org/wiki/Speex
+ if (check([0x53, 0x70, 0x65, 0x65, 0x78, 0x20, 0x20], {offset: 28})) {
+ return {
+ ext: 'spx',
+ mime: 'audio/ogg'
+ };
+ }
+
+ // If '\x01vorbis' in header
+ if (check([0x01, 0x76, 0x6F, 0x72, 0x62, 0x69, 0x73], {offset: 28})) {
+ return {
+ ext: 'ogg',
+ mime: 'audio/ogg'
+ };
+ }
+
+ // Default OGG container https://www.iana.org/assignments/media-types/application/ogg
+ return {
+ ext: 'ogx',
+ mime: 'application/ogg'
+ };
+ }
+
+ if (check([0x66, 0x4C, 0x61, 0x43])) {
+ return {
+ ext: 'flac',
+ mime: 'audio/x-flac'
+ };
+ }
+
+ if (check([0x4D, 0x41, 0x43, 0x20])) {
+ return {
+ ext: 'ape',
+ mime: 'audio/ape'
+ };
+ }
+
+ if (check([0x23, 0x21, 0x41, 0x4D, 0x52, 0x0A])) {
+ return {
+ ext: 'amr',
+ mime: 'audio/amr'
+ };
+ }
+
+ if (check([0x25, 0x50, 0x44, 0x46])) {
+ return {
+ ext: 'pdf',
+ mime: 'application/pdf'
+ };
+ }
+
+ if (check([0x4D, 0x5A])) {
+ return {
+ ext: 'exe',
+ mime: 'application/x-msdownload'
+ };
+ }
+
+ if (
+ (buf[0] === 0x43 || buf[0] === 0x46) &&
+ check([0x57, 0x53], {offset: 1})
+ ) {
+ return {
+ ext: 'swf',
+ mime: 'application/x-shockwave-flash'
+ };
+ }
+
+ if (check([0x7B, 0x5C, 0x72, 0x74, 0x66])) {
+ return {
+ ext: 'rtf',
+ mime: 'application/rtf'
+ };
+ }
+
+ if (check([0x00, 0x61, 0x73, 0x6D])) {
+ return {
+ ext: 'wasm',
+ mime: 'application/wasm'
+ };
+ }
+
+ if (
+ check([0x77, 0x4F, 0x46, 0x46]) &&
+ (
+ check([0x00, 0x01, 0x00, 0x00], {offset: 4}) ||
+ check([0x4F, 0x54, 0x54, 0x4F], {offset: 4})
+ )
+ ) {
+ return {
+ ext: 'woff',
+ mime: 'font/woff'
+ };
+ }
+
+ if (
+ check([0x77, 0x4F, 0x46, 0x32]) &&
+ (
+ check([0x00, 0x01, 0x00, 0x00], {offset: 4}) ||
+ check([0x4F, 0x54, 0x54, 0x4F], {offset: 4})
+ )
+ ) {
+ return {
+ ext: 'woff2',
+ mime: 'font/woff2'
+ };
+ }
+
+ if (
+ check([0x4C, 0x50], {offset: 34}) &&
+ (
+ check([0x00, 0x00, 0x01], {offset: 8}) ||
+ check([0x01, 0x00, 0x02], {offset: 8}) ||
+ check([0x02, 0x00, 0x02], {offset: 8})
+ )
+ ) {
+ return {
+ ext: 'eot',
+ mime: 'application/octet-stream'
+ };
+ }
+
+ if (check([0x00, 0x01, 0x00, 0x00, 0x00])) {
+ return {
+ ext: 'ttf',
+ mime: 'font/ttf'
+ };
+ }
+
+ if (check([0x4F, 0x54, 0x54, 0x4F, 0x00])) {
+ return {
+ ext: 'otf',
+ mime: 'font/otf'
+ };
+ }
+
+ if (check([0x00, 0x00, 0x01, 0x00])) {
+ return {
+ ext: 'ico',
+ mime: 'image/x-icon'
+ };
+ }
+
+ if (check([0x00, 0x00, 0x02, 0x00])) {
+ return {
+ ext: 'cur',
+ mime: 'image/x-icon'
+ };
+ }
+
+ if (check([0x46, 0x4C, 0x56, 0x01])) {
+ return {
+ ext: 'flv',
+ mime: 'video/x-flv'
+ };
+ }
+
+ if (check([0x25, 0x21])) {
+ return {
+ ext: 'ps',
+ mime: 'application/postscript'
+ };
+ }
+
+ if (check([0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])) {
+ return {
+ ext: 'xz',
+ mime: 'application/x-xz'
+ };
+ }
+
+ if (check([0x53, 0x51, 0x4C, 0x69])) {
+ return {
+ ext: 'sqlite',
+ mime: 'application/x-sqlite3'
+ };
+ }
+
+ if (check([0x4E, 0x45, 0x53, 0x1A])) {
+ return {
+ ext: 'nes',
+ mime: 'application/x-nintendo-nes-rom'
+ };
+ }
+
+ if (check([0x43, 0x72, 0x32, 0x34])) {
+ return {
+ ext: 'crx',
+ mime: 'application/x-google-chrome-extension'
+ };
+ }
+
+ if (
+ check([0x4D, 0x53, 0x43, 0x46]) ||
+ check([0x49, 0x53, 0x63, 0x28])
+ ) {
+ return {
+ ext: 'cab',
+ mime: 'application/vnd.ms-cab-compressed'
+ };
+ }
+
+ // Needs to be before `ar` check
+ if (check([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A, 0x64, 0x65, 0x62, 0x69, 0x61, 0x6E, 0x2D, 0x62, 0x69, 0x6E, 0x61, 0x72, 0x79])) {
+ return {
+ ext: 'deb',
+ mime: 'application/x-deb'
+ };
+ }
+
+ if (check([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E])) {
+ return {
+ ext: 'ar',
+ mime: 'application/x-unix-archive'
+ };
+ }
+
+ if (check([0xED, 0xAB, 0xEE, 0xDB])) {
+ return {
+ ext: 'rpm',
+ mime: 'application/x-rpm'
+ };
+ }
+
+ if (
+ check([0x1F, 0xA0]) ||
+ check([0x1F, 0x9D])
+ ) {
+ return {
+ ext: 'Z',
+ mime: 'application/x-compress'
+ };
+ }
+
+ if (check([0x4C, 0x5A, 0x49, 0x50])) {
+ return {
+ ext: 'lz',
+ mime: 'application/x-lzip'
+ };
+ }
+
+ if (check([0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1])) {
+ return {
+ ext: 'msi',
+ mime: 'application/x-msi'
+ };
+ }
+
+ if (check([0x06, 0x0E, 0x2B, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0D, 0x01, 0x02, 0x01, 0x01, 0x02])) {
+ return {
+ ext: 'mxf',
+ mime: 'application/mxf'
+ };
+ }
+
+ if (check([0x47], {offset: 4}) && (check([0x47], {offset: 192}) || check([0x47], {offset: 196}))) {
+ return {
+ ext: 'mts',
+ mime: 'video/mp2t'
+ };
+ }
+
+ if (check([0x42, 0x4C, 0x45, 0x4E, 0x44, 0x45, 0x52])) {
+ return {
+ ext: 'blend',
+ mime: 'application/x-blender'
+ };
+ }
+
+ if (check([0x42, 0x50, 0x47, 0xFB])) {
+ return {
+ ext: 'bpg',
+ mime: 'image/bpg'
+ };
+ }
+
+ if (check([0x00, 0x00, 0x00, 0x0C, 0x6A, 0x50, 0x20, 0x20, 0x0D, 0x0A, 0x87, 0x0A])) {
+ // JPEG-2000 family
+
+ if (check([0x6A, 0x70, 0x32, 0x20], {offset: 20})) {
+ return {
+ ext: 'jp2',
+ mime: 'image/jp2'
+ };
+ }
+
+ if (check([0x6A, 0x70, 0x78, 0x20], {offset: 20})) {
+ return {
+ ext: 'jpx',
+ mime: 'image/jpx'
+ };
+ }
+
+ if (check([0x6A, 0x70, 0x6D, 0x20], {offset: 20})) {
+ return {
+ ext: 'jpm',
+ mime: 'image/jpm'
+ };
+ }
+
+ if (check([0x6D, 0x6A, 0x70, 0x32], {offset: 20})) {
+ return {
+ ext: 'mj2',
+ mime: 'image/mj2'
+ };
+ }
+ }
+
+ if (check([0x46, 0x4F, 0x52, 0x4D, 0x00])) {
+ return {
+ ext: 'aif',
+ mime: 'audio/aiff'
+ };
+ }
+
+ if (checkString(' (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/file-type/package.json b/node_modules/bin-wrapper/node_modules/file-type/package.json
new file mode 100644
index 00000000..1aec8d42
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/file-type/package.json
@@ -0,0 +1,123 @@
+{
+ "name": "file-type",
+ "version": "8.1.0",
+ "description": "Detect the file type of a Buffer/Uint8Array",
+ "license": "MIT",
+ "repository": "sindresorhus/file-type",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "mime",
+ "file",
+ "type",
+ "archive",
+ "image",
+ "img",
+ "pic",
+ "picture",
+ "flash",
+ "photo",
+ "video",
+ "detect",
+ "check",
+ "is",
+ "exif",
+ "exe",
+ "binary",
+ "buffer",
+ "uint8array",
+ "jpg",
+ "png",
+ "gif",
+ "webp",
+ "flif",
+ "cr2",
+ "tif",
+ "bmp",
+ "jxr",
+ "psd",
+ "zip",
+ "tar",
+ "rar",
+ "gz",
+ "bz2",
+ "7z",
+ "dmg",
+ "mp4",
+ "m4v",
+ "mid",
+ "mkv",
+ "webm",
+ "mov",
+ "avi",
+ "mpg",
+ "mp2",
+ "mp3",
+ "m4a",
+ "ogg",
+ "opus",
+ "flac",
+ "wav",
+ "amr",
+ "pdf",
+ "epub",
+ "mobi",
+ "swf",
+ "rtf",
+ "woff",
+ "woff2",
+ "eot",
+ "ttf",
+ "otf",
+ "ico",
+ "flv",
+ "ps",
+ "xz",
+ "sqlite",
+ "xpi",
+ "cab",
+ "deb",
+ "ar",
+ "rpm",
+ "Z",
+ "lz",
+ "msi",
+ "mxf",
+ "mts",
+ "wasm",
+ "webassembly",
+ "blend",
+ "bpg",
+ "docx",
+ "pptx",
+ "xlsx",
+ "3gp",
+ "jp2",
+ "jpm",
+ "jpx",
+ "mj2",
+ "aif",
+ "odt",
+ "ods",
+ "odp",
+ "xml",
+ "heic"
+ ],
+ "devDependencies": {
+ "ava": "*",
+ "read-chunk": "^2.0.0",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/file-type/readme.md b/node_modules/bin-wrapper/node_modules/file-type/readme.md
new file mode 100644
index 00000000..bd04b8dc
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/file-type/readme.md
@@ -0,0 +1,186 @@
+# file-type [](https://travis-ci.org/sindresorhus/file-type)
+
+> Detect the file type of a Buffer/Uint8Array
+
+The file type is detected by checking the [magic number](http://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer.
+
+
+## Install
+
+```
+$ npm install file-type
+```
+
+
+
+
+
+
+## Usage
+
+##### Node.js
+
+```js
+const readChunk = require('read-chunk');
+const fileType = require('file-type');
+const buffer = readChunk.sync('unicorn.png', 0, 4100);
+
+fileType(buffer);
+//=> {ext: 'png', mime: 'image/png'}
+```
+
+Or from a remote location:
+
+```js
+const http = require('http');
+const fileType = require('file-type');
+const url = 'http://assets-cdn.github.com/images/spinners/octocat-spinner-32.gif';
+
+http.get(url, res => {
+ res.once('data', chunk => {
+ res.destroy();
+ console.log(fileType(chunk));
+ //=> {ext: 'gif', mime: 'image/gif'}
+ });
+});
+```
+
+##### Browser
+
+```js
+const xhr = new XMLHttpRequest();
+xhr.open('GET', 'unicorn.png');
+xhr.responseType = 'arraybuffer';
+
+xhr.onload = () => {
+ fileType(new Uint8Array(this.response));
+ //=> {ext: 'png', mime: 'image/png'}
+};
+
+xhr.send();
+```
+
+
+## API
+
+### fileType(input)
+
+Returns an `Object` with:
+
+- `ext` - One of the [supported file types](#supported-file-types)
+- `mime` - The [MIME type](http://en.wikipedia.org/wiki/Internet_media_type)
+
+Or `null` when no match.
+
+#### input
+
+Type: `Buffer` `Uint8Array`
+
+It only needs the first 4100 bytes.
+
+
+## Supported file types
+
+- [`jpg`](https://en.wikipedia.org/wiki/JPEG)
+- [`png`](https://en.wikipedia.org/wiki/Portable_Network_Graphics)
+- [`gif`](https://en.wikipedia.org/wiki/GIF)
+- [`webp`](https://en.wikipedia.org/wiki/WebP)
+- [`flif`](https://en.wikipedia.org/wiki/Free_Lossless_Image_Format)
+- [`cr2`](http://fileinfo.com/extension/cr2)
+- [`tif`](https://en.wikipedia.org/wiki/Tagged_Image_File_Format)
+- [`bmp`](https://en.wikipedia.org/wiki/BMP_file_format)
+- [`jxr`](https://en.wikipedia.org/wiki/JPEG_XR)
+- [`psd`](https://en.wikipedia.org/wiki/Adobe_Photoshop#File_format)
+- [`zip`](https://en.wikipedia.org/wiki/Zip_(file_format))
+- [`tar`](https://en.wikipedia.org/wiki/Tar_(computing)#File_format)
+- [`rar`](https://en.wikipedia.org/wiki/RAR_(file_format))
+- [`gz`](https://en.wikipedia.org/wiki/Gzip)
+- [`bz2`](https://en.wikipedia.org/wiki/Bzip2)
+- [`7z`](https://en.wikipedia.org/wiki/7z)
+- [`dmg`](https://en.wikipedia.org/wiki/Apple_Disk_Image)
+- [`mp4`](https://en.wikipedia.org/wiki/MPEG-4_Part_14#Filename_extensions)
+- [`m4v`](https://en.wikipedia.org/wiki/M4V)
+- [`mid`](https://en.wikipedia.org/wiki/MIDI)
+- [`mkv`](https://en.wikipedia.org/wiki/Matroska)
+- [`webm`](https://en.wikipedia.org/wiki/WebM)
+- [`mov`](https://en.wikipedia.org/wiki/QuickTime_File_Format)
+- [`avi`](https://en.wikipedia.org/wiki/Audio_Video_Interleave)
+- [`wmv`](https://en.wikipedia.org/wiki/Windows_Media_Video)
+- [`mpg`](https://en.wikipedia.org/wiki/MPEG-1)
+- [`mp2`](https://en.wikipedia.org/wiki/MPEG-1_Audio_Layer_II)
+- [`mp3`](https://en.wikipedia.org/wiki/MP3)
+- [`m4a`](https://en.wikipedia.org/wiki/MPEG-4_Part_14#.MP4_versus_.M4A)
+- [`ogg`](https://en.wikipedia.org/wiki/Ogg)
+- [`opus`](https://en.wikipedia.org/wiki/Opus_(audio_format))
+- [`flac`](https://en.wikipedia.org/wiki/FLAC)
+- [`wav`](https://en.wikipedia.org/wiki/WAV)
+- [`qcp`](https://en.wikipedia.org/wiki/QCP)
+- [`amr`](https://en.wikipedia.org/wiki/Adaptive_Multi-Rate_audio_codec)
+- [`pdf`](https://en.wikipedia.org/wiki/Portable_Document_Format)
+- [`epub`](https://en.wikipedia.org/wiki/EPUB)
+- [`mobi`](https://en.wikipedia.org/wiki/Mobipocket) - Mobipocket
+- [`exe`](https://en.wikipedia.org/wiki/.exe)
+- [`swf`](https://en.wikipedia.org/wiki/SWF)
+- [`rtf`](https://en.wikipedia.org/wiki/Rich_Text_Format)
+- [`woff`](https://en.wikipedia.org/wiki/Web_Open_Font_Format)
+- [`woff2`](https://en.wikipedia.org/wiki/Web_Open_Font_Format)
+- [`eot`](https://en.wikipedia.org/wiki/Embedded_OpenType)
+- [`ttf`](https://en.wikipedia.org/wiki/TrueType)
+- [`otf`](https://en.wikipedia.org/wiki/OpenType)
+- [`ico`](https://en.wikipedia.org/wiki/ICO_(file_format))
+- [`flv`](https://en.wikipedia.org/wiki/Flash_Video)
+- [`ps`](https://en.wikipedia.org/wiki/Postscript)
+- [`xz`](https://en.wikipedia.org/wiki/Xz)
+- [`sqlite`](https://www.sqlite.org/fileformat2.html)
+- [`nes`](http://fileinfo.com/extension/nes)
+- [`crx`](https://developer.chrome.com/extensions/crx)
+- [`xpi`](https://en.wikipedia.org/wiki/XPInstall)
+- [`cab`](https://en.wikipedia.org/wiki/Cabinet_(file_format))
+- [`deb`](https://en.wikipedia.org/wiki/Deb_(file_format))
+- [`ar`](https://en.wikipedia.org/wiki/Ar_(Unix))
+- [`rpm`](http://fileinfo.com/extension/rpm)
+- [`Z`](http://fileinfo.com/extension/z)
+- [`lz`](https://en.wikipedia.org/wiki/Lzip)
+- [`msi`](https://en.wikipedia.org/wiki/Windows_Installer)
+- [`mxf`](https://en.wikipedia.org/wiki/Material_Exchange_Format)
+- [`mts`](https://en.wikipedia.org/wiki/.m2ts)
+- [`wasm`](https://en.wikipedia.org/wiki/WebAssembly)
+- [`blend`](https://wiki.blender.org/index.php/Dev:Source/Architecture/File_Format)
+- [`bpg`](https://bellard.org/bpg/)
+- [`docx`](https://en.wikipedia.org/wiki/Office_Open_XML)
+- [`pptx`](https://en.wikipedia.org/wiki/Office_Open_XML)
+- [`xlsx`](https://en.wikipedia.org/wiki/Office_Open_XML)
+- [`3gp`](https://en.wikipedia.org/wiki/3GP_and_3G2)
+- [`jp2`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
+- [`jpm`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
+- [`jpx`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000
+- [`mj2`](https://en.wikipedia.org/wiki/Motion_JPEG_2000) - Motion JPEG 2000
+- [`aif`](https://en.wikipedia.org/wiki/Audio_Interchange_File_Format)
+- [`odt`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for word processing
+- [`ods`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for spreadsheets
+- [`odp`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for presentations
+- [`xml`](https://en.wikipedia.org/wiki/XML)
+- [`heic`](http://nokiatech.github.io/heif/technical.html)
+- [`cur`](https://en.wikipedia.org/wiki/ICO_(file_format))
+- [`ktx`](https://www.khronos.org/opengles/sdk/tools/KTX/file_format_spec/)
+- [`ape`](https://en.wikipedia.org/wiki/Monkey%27s_Audio) - Monkey's Audio
+
+*SVG isn't included as it requires the whole file to be read, but you can get it [here](https://github.com/sindresorhus/is-svg).*
+
+*Pull request welcome for additional commonly used file types.*
+
+
+## Related
+
+- [file-type-cli](https://github.com/sindresorhus/file-type-cli) - CLI for this module
+
+
+## Created by
+
+- [Sindre Sorhus](https://github.com/sindresorhus)
+- [Mikael Finstad](https://github.com/mifi)
+
+
+## License
+
+MIT
diff --git a/node_modules/bin-wrapper/node_modules/get-stream/buffer-stream.js b/node_modules/bin-wrapper/node_modules/get-stream/buffer-stream.js
new file mode 100644
index 00000000..ae45d3d9
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/get-stream/buffer-stream.js
@@ -0,0 +1,51 @@
+'use strict';
+const PassThrough = require('stream').PassThrough;
+
+module.exports = opts => {
+ opts = Object.assign({}, opts);
+
+ const array = opts.array;
+ let encoding = opts.encoding;
+ const buffer = encoding === 'buffer';
+ let objectMode = false;
+
+ if (array) {
+ objectMode = !(encoding || buffer);
+ } else {
+ encoding = encoding || 'utf8';
+ }
+
+ if (buffer) {
+ encoding = null;
+ }
+
+ let len = 0;
+ const ret = [];
+ const stream = new PassThrough({objectMode});
+
+ if (encoding) {
+ stream.setEncoding(encoding);
+ }
+
+ stream.on('data', chunk => {
+ ret.push(chunk);
+
+ if (objectMode) {
+ len = ret.length;
+ } else {
+ len += chunk.length;
+ }
+ });
+
+ stream.getBufferedValue = () => {
+ if (array) {
+ return ret;
+ }
+
+ return buffer ? Buffer.concat(ret, len) : ret.join('');
+ };
+
+ stream.getBufferedLength = () => len;
+
+ return stream;
+};
diff --git a/node_modules/bin-wrapper/node_modules/get-stream/index.js b/node_modules/bin-wrapper/node_modules/get-stream/index.js
new file mode 100644
index 00000000..2dc5ee96
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/get-stream/index.js
@@ -0,0 +1,51 @@
+'use strict';
+const bufferStream = require('./buffer-stream');
+
+function getStream(inputStream, opts) {
+ if (!inputStream) {
+ return Promise.reject(new Error('Expected a stream'));
+ }
+
+ opts = Object.assign({maxBuffer: Infinity}, opts);
+
+ const maxBuffer = opts.maxBuffer;
+ let stream;
+ let clean;
+
+ const p = new Promise((resolve, reject) => {
+ const error = err => {
+ if (err) { // null check
+ err.bufferedData = stream.getBufferedValue();
+ }
+
+ reject(err);
+ };
+
+ stream = bufferStream(opts);
+ inputStream.once('error', error);
+ inputStream.pipe(stream);
+
+ stream.on('data', () => {
+ if (stream.getBufferedLength() > maxBuffer) {
+ reject(new Error('maxBuffer exceeded'));
+ }
+ });
+ stream.once('error', error);
+ stream.on('end', resolve);
+
+ clean = () => {
+ // some streams doesn't implement the `stream.Readable` interface correctly
+ if (inputStream.unpipe) {
+ inputStream.unpipe(stream);
+ }
+ };
+ });
+
+ p.then(clean, clean);
+
+ return p.then(() => stream.getBufferedValue());
+}
+
+module.exports = getStream;
+module.exports.buffer = (stream, opts) => getStream(stream, Object.assign({}, opts, {encoding: 'buffer'}));
+module.exports.array = (stream, opts) => getStream(stream, Object.assign({}, opts, {array: true}));
diff --git a/node_modules/bin-wrapper/node_modules/get-stream/license b/node_modules/bin-wrapper/node_modules/get-stream/license
new file mode 100644
index 00000000..654d0bfe
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/get-stream/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/get-stream/package.json b/node_modules/bin-wrapper/node_modules/get-stream/package.json
new file mode 100644
index 00000000..2f2adf0d
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/get-stream/package.json
@@ -0,0 +1,48 @@
+{
+ "name": "get-stream",
+ "version": "3.0.0",
+ "description": "Get a stream as a string, buffer, or array",
+ "license": "MIT",
+ "repository": "sindresorhus/get-stream",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js",
+ "buffer-stream.js"
+ ],
+ "keywords": [
+ "get",
+ "stream",
+ "promise",
+ "concat",
+ "string",
+ "str",
+ "text",
+ "buffer",
+ "read",
+ "data",
+ "consume",
+ "readable",
+ "readablestream",
+ "array",
+ "object",
+ "obj"
+ ],
+ "devDependencies": {
+ "ava": "*",
+ "into-stream": "^3.0.0",
+ "xo": "*"
+ },
+ "xo": {
+ "esnext": true
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/get-stream/readme.md b/node_modules/bin-wrapper/node_modules/get-stream/readme.md
new file mode 100644
index 00000000..73b188fb
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/get-stream/readme.md
@@ -0,0 +1,117 @@
+# get-stream [](https://travis-ci.org/sindresorhus/get-stream)
+
+> Get a stream as a string, buffer, or array
+
+
+## Install
+
+```
+$ npm install --save get-stream
+```
+
+
+## Usage
+
+```js
+const fs = require('fs');
+const getStream = require('get-stream');
+const stream = fs.createReadStream('unicorn.txt');
+
+getStream(stream).then(str => {
+ console.log(str);
+ /*
+ ,,))))))));,
+ __)))))))))))))),
+ \|/ -\(((((''''((((((((.
+ -*-==//////(('' . `)))))),
+ /|\ ))| o ;-. '((((( ,(,
+ ( `| / ) ;))))' ,_))^;(~
+ | | | ,))((((_ _____------~~~-. %,;(;(>';'~
+ o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
+ ; ''''```` `: `:::|\,__,%% );`'; ~
+ | _ ) / `:|`----' `-'
+ ______/\/~ | / /
+ /~;;.____/;;' / ___--,-( `;;;/
+ / // _;______;'------~~~~~ /;;/\ /
+ // | | / ; \;;,\
+ (<_ | ; /',/-----' _>
+ \_| ||_ //~;~~~~~~~~~
+ `\_| (,~~
+ \~\
+ ~~
+ */
+});
+```
+
+
+## API
+
+The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode.
+
+### getStream(stream, [options])
+
+Get the `stream` as a string.
+
+#### options
+
+##### encoding
+
+Type: `string`
+Default: `utf8`
+
+[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream.
+
+##### maxBuffer
+
+Type: `number`
+Default: `Infinity`
+
+Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected.
+
+### getStream.buffer(stream, [options])
+
+Get the `stream` as a buffer.
+
+It honors the `maxBuffer` option as above, but it refers to byte length rather than string length.
+
+### getStream.array(stream, [options])
+
+Get the `stream` as an array of values.
+
+It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen:
+
+- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes).
+
+- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array.
+
+- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array.
+
+
+## Errors
+
+If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error.
+
+```js
+getStream(streamThatErrorsAtTheEnd('unicorn'))
+ .catch(err => {
+ console.log(err.bufferedData);
+ //=> 'unicorn'
+ });
+```
+
+
+## FAQ
+
+### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)?
+
+This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package.
+
+
+## Related
+
+- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/node_modules/got/errors.js b/node_modules/bin-wrapper/node_modules/got/errors.js
new file mode 100644
index 00000000..ad833881
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/errors.js
@@ -0,0 +1,92 @@
+'use strict';
+const urlLib = require('url');
+const http = require('http');
+const PCancelable = require('p-cancelable');
+const is = require('@sindresorhus/is');
+
+class GotError extends Error {
+ constructor(message, error, opts) {
+ super(message);
+ Error.captureStackTrace(this, this.constructor);
+ this.name = 'GotError';
+
+ if (!is.undefined(error.code)) {
+ this.code = error.code;
+ }
+
+ Object.assign(this, {
+ host: opts.host,
+ hostname: opts.hostname,
+ method: opts.method,
+ path: opts.path,
+ protocol: opts.protocol,
+ url: opts.href
+ });
+ }
+}
+
+module.exports.GotError = GotError;
+
+module.exports.CacheError = class extends GotError {
+ constructor(error, opts) {
+ super(error.message, error, opts);
+ this.name = 'CacheError';
+ }
+};
+
+module.exports.RequestError = class extends GotError {
+ constructor(error, opts) {
+ super(error.message, error, opts);
+ this.name = 'RequestError';
+ }
+};
+
+module.exports.ReadError = class extends GotError {
+ constructor(error, opts) {
+ super(error.message, error, opts);
+ this.name = 'ReadError';
+ }
+};
+
+module.exports.ParseError = class extends GotError {
+ constructor(error, statusCode, opts, data) {
+ super(`${error.message} in "${urlLib.format(opts)}": \n${data.slice(0, 77)}...`, error, opts);
+ this.name = 'ParseError';
+ this.statusCode = statusCode;
+ this.statusMessage = http.STATUS_CODES[this.statusCode];
+ }
+};
+
+module.exports.HTTPError = class extends GotError {
+ constructor(statusCode, statusMessage, headers, opts) {
+ if (statusMessage) {
+ statusMessage = statusMessage.replace(/\r?\n/g, ' ').trim();
+ } else {
+ statusMessage = http.STATUS_CODES[statusCode];
+ }
+ super(`Response code ${statusCode} (${statusMessage})`, {}, opts);
+ this.name = 'HTTPError';
+ this.statusCode = statusCode;
+ this.statusMessage = statusMessage;
+ this.headers = headers;
+ }
+};
+
+module.exports.MaxRedirectsError = class extends GotError {
+ constructor(statusCode, redirectUrls, opts) {
+ super('Redirected 10 times. Aborting.', {}, opts);
+ this.name = 'MaxRedirectsError';
+ this.statusCode = statusCode;
+ this.statusMessage = http.STATUS_CODES[this.statusCode];
+ this.redirectUrls = redirectUrls;
+ }
+};
+
+module.exports.UnsupportedProtocolError = class extends GotError {
+ constructor(opts) {
+ super(`Unsupported protocol "${opts.protocol}"`, {}, opts);
+ this.name = 'UnsupportedProtocolError';
+ }
+};
+
+module.exports.CancelError = PCancelable.CancelError;
diff --git a/node_modules/bin-wrapper/node_modules/got/index.js b/node_modules/bin-wrapper/node_modules/got/index.js
new file mode 100644
index 00000000..9d83b771
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/index.js
@@ -0,0 +1,675 @@
+'use strict';
+const EventEmitter = require('events');
+const http = require('http');
+const https = require('https');
+const PassThrough = require('stream').PassThrough;
+const Transform = require('stream').Transform;
+const urlLib = require('url');
+const fs = require('fs');
+const querystring = require('querystring');
+const CacheableRequest = require('cacheable-request');
+const duplexer3 = require('duplexer3');
+const intoStream = require('into-stream');
+const is = require('@sindresorhus/is');
+const getStream = require('get-stream');
+const timedOut = require('timed-out');
+const urlParseLax = require('url-parse-lax');
+const urlToOptions = require('url-to-options');
+const lowercaseKeys = require('lowercase-keys');
+const decompressResponse = require('decompress-response');
+const mimicResponse = require('mimic-response');
+const isRetryAllowed = require('is-retry-allowed');
+const isURL = require('isurl');
+const PCancelable = require('p-cancelable');
+const pTimeout = require('p-timeout');
+const pify = require('pify');
+const Buffer = require('safe-buffer').Buffer;
+const pkg = require('./package.json');
+const errors = require('./errors');
+
+const getMethodRedirectCodes = new Set([300, 301, 302, 303, 304, 305, 307, 308]);
+const allMethodRedirectCodes = new Set([300, 303, 307, 308]);
+
+const isFormData = body => is.nodeStream(body) && is.function(body.getBoundary);
+
+const getBodySize = opts => {
+ const body = opts.body;
+
+ if (opts.headers['content-length']) {
+ return Number(opts.headers['content-length']);
+ }
+
+ if (!body && !opts.stream) {
+ return 0;
+ }
+
+ if (is.string(body)) {
+ return Buffer.byteLength(body);
+ }
+
+ if (isFormData(body)) {
+ return pify(body.getLength.bind(body))();
+ }
+
+ if (body instanceof fs.ReadStream) {
+ return pify(fs.stat)(body.path).then(stat => stat.size);
+ }
+
+ if (is.nodeStream(body) && is.buffer(body._buffer)) {
+ return body._buffer.length;
+ }
+
+ return null;
+};
+
+function requestAsEventEmitter(opts) {
+ opts = opts || {};
+
+ const ee = new EventEmitter();
+ const requestUrl = opts.href || urlLib.resolve(urlLib.format(opts), opts.path);
+ const redirects = [];
+ const agents = is.object(opts.agent) ? opts.agent : null;
+ let retryCount = 0;
+ let redirectUrl;
+ let uploadBodySize;
+ let uploaded = 0;
+
+ const get = opts => {
+ if (opts.protocol !== 'http:' && opts.protocol !== 'https:') {
+ ee.emit('error', new got.UnsupportedProtocolError(opts));
+ return;
+ }
+
+ let fn = opts.protocol === 'https:' ? https : http;
+
+ if (agents) {
+ const protocolName = opts.protocol === 'https:' ? 'https' : 'http';
+ opts.agent = agents[protocolName] || opts.agent;
+ }
+
+ if (opts.useElectronNet && process.versions.electron) {
+ const electron = require('electron');
+ fn = electron.net || electron.remote.net;
+ }
+
+ let progressInterval;
+
+ const cacheableRequest = new CacheableRequest(fn.request, opts.cache);
+ const cacheReq = cacheableRequest(opts, res => {
+ clearInterval(progressInterval);
+
+ ee.emit('uploadProgress', {
+ percent: 1,
+ transferred: uploaded,
+ total: uploadBodySize
+ });
+
+ const statusCode = res.statusCode;
+
+ res.url = redirectUrl || requestUrl;
+ res.requestUrl = requestUrl;
+
+ const followRedirect = opts.followRedirect && 'location' in res.headers;
+ const redirectGet = followRedirect && getMethodRedirectCodes.has(statusCode);
+ const redirectAll = followRedirect && allMethodRedirectCodes.has(statusCode);
+
+ if (redirectAll || (redirectGet && (opts.method === 'GET' || opts.method === 'HEAD'))) {
+ res.resume();
+
+ if (statusCode === 303) {
+ // Server responded with "see other", indicating that the resource exists at another location,
+ // and the client should request it from that location via GET or HEAD.
+ opts.method = 'GET';
+ }
+
+ if (redirects.length >= 10) {
+ ee.emit('error', new got.MaxRedirectsError(statusCode, redirects, opts), null, res);
+ return;
+ }
+
+ const bufferString = Buffer.from(res.headers.location, 'binary').toString();
+
+ redirectUrl = urlLib.resolve(urlLib.format(opts), bufferString);
+
+ redirects.push(redirectUrl);
+
+ const redirectOpts = Object.assign({}, opts, urlLib.parse(redirectUrl));
+
+ ee.emit('redirect', res, redirectOpts);
+
+ get(redirectOpts);
+
+ return;
+ }
+
+ setImmediate(() => {
+ try {
+ getResponse(res, opts, ee, redirects);
+ } catch (e) {
+ ee.emit('error', e);
+ }
+ });
+ });
+
+ cacheReq.on('error', err => {
+ if (err instanceof CacheableRequest.RequestError) {
+ ee.emit('error', new got.RequestError(err, opts));
+ } else {
+ ee.emit('error', new got.CacheError(err, opts));
+ }
+ });
+
+ cacheReq.once('request', req => {
+ let aborted = false;
+ req.once('abort', _ => {
+ aborted = true;
+ });
+
+ req.once('error', err => {
+ clearInterval(progressInterval);
+
+ if (aborted) {
+ return;
+ }
+
+ const backoff = opts.retries(++retryCount, err);
+
+ if (backoff) {
+ setTimeout(get, backoff, opts);
+ return;
+ }
+
+ ee.emit('error', new got.RequestError(err, opts));
+ });
+
+ ee.once('request', req => {
+ ee.emit('uploadProgress', {
+ percent: 0,
+ transferred: 0,
+ total: uploadBodySize
+ });
+
+ const socket = req.connection;
+ if (socket) {
+ // `._connecting` was the old property which was made public in node v6.1.0
+ const isConnecting = socket.connecting === undefined ? socket._connecting : socket.connecting;
+
+ const onSocketConnect = () => {
+ const uploadEventFrequency = 150;
+
+ progressInterval = setInterval(() => {
+ if (socket.destroyed) {
+ clearInterval(progressInterval);
+ return;
+ }
+
+ const lastUploaded = uploaded;
+ const headersSize = req._header ? Buffer.byteLength(req._header) : 0;
+ uploaded = socket.bytesWritten - headersSize;
+
+ // Prevent the known issue of `bytesWritten` being larger than body size
+ if (uploadBodySize && uploaded > uploadBodySize) {
+ uploaded = uploadBodySize;
+ }
+
+ // Don't emit events with unchanged progress and
+ // prevent last event from being emitted, because
+ // it's emitted when `response` is emitted
+ if (uploaded === lastUploaded || uploaded === uploadBodySize) {
+ return;
+ }
+
+ ee.emit('uploadProgress', {
+ percent: uploadBodySize ? uploaded / uploadBodySize : 0,
+ transferred: uploaded,
+ total: uploadBodySize
+ });
+ }, uploadEventFrequency);
+ };
+
+ // Only subscribe to 'connect' event if we're actually connecting a new
+ // socket, otherwise if we're already connected (because this is a
+ // keep-alive connection) do not bother. This is important since we won't
+ // get a 'connect' event for an already connected socket.
+ if (isConnecting) {
+ socket.once('connect', onSocketConnect);
+ } else {
+ onSocketConnect();
+ }
+ }
+ });
+
+ if (opts.gotTimeout) {
+ clearInterval(progressInterval);
+ timedOut(req, opts.gotTimeout);
+ }
+
+ setImmediate(() => {
+ ee.emit('request', req);
+ });
+ });
+ };
+
+ setImmediate(() => {
+ Promise.resolve(getBodySize(opts))
+ .then(size => {
+ uploadBodySize = size;
+
+ if (
+ is.undefined(opts.headers['content-length']) &&
+ is.undefined(opts.headers['transfer-encoding']) &&
+ isFormData(opts.body)
+ ) {
+ opts.headers['content-length'] = size;
+ }
+
+ get(opts);
+ })
+ .catch(err => {
+ ee.emit('error', err);
+ });
+ });
+
+ return ee;
+}
+
+function getResponse(res, opts, ee, redirects) {
+ const downloadBodySize = Number(res.headers['content-length']) || null;
+ let downloaded = 0;
+
+ const progressStream = new Transform({
+ transform(chunk, encoding, callback) {
+ downloaded += chunk.length;
+
+ const percent = downloadBodySize ? downloaded / downloadBodySize : 0;
+
+ // Let flush() be responsible for emitting the last event
+ if (percent < 1) {
+ ee.emit('downloadProgress', {
+ percent,
+ transferred: downloaded,
+ total: downloadBodySize
+ });
+ }
+
+ callback(null, chunk);
+ },
+
+ flush(callback) {
+ ee.emit('downloadProgress', {
+ percent: 1,
+ transferred: downloaded,
+ total: downloadBodySize
+ });
+
+ callback();
+ }
+ });
+
+ mimicResponse(res, progressStream);
+ progressStream.redirectUrls = redirects;
+
+ const response = opts.decompress === true &&
+ is.function(decompressResponse) &&
+ opts.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream;
+
+ if (!opts.decompress && ['gzip', 'deflate'].indexOf(res.headers['content-encoding']) !== -1) {
+ opts.encoding = null;
+ }
+
+ ee.emit('response', response);
+
+ ee.emit('downloadProgress', {
+ percent: 0,
+ transferred: 0,
+ total: downloadBodySize
+ });
+
+ res.pipe(progressStream);
+}
+
+function asPromise(opts) {
+ const timeoutFn = requestPromise => opts.gotTimeout && opts.gotTimeout.request ?
+ pTimeout(requestPromise, opts.gotTimeout.request, new got.RequestError({message: 'Request timed out', code: 'ETIMEDOUT'}, opts)) :
+ requestPromise;
+
+ const proxy = new EventEmitter();
+
+ const cancelable = new PCancelable((resolve, reject, onCancel) => {
+ const ee = requestAsEventEmitter(opts);
+ let cancelOnRequest = false;
+
+ onCancel(() => {
+ cancelOnRequest = true;
+ });
+
+ ee.on('request', req => {
+ if (cancelOnRequest) {
+ req.abort();
+ }
+
+ onCancel(() => {
+ req.abort();
+ });
+
+ if (is.nodeStream(opts.body)) {
+ opts.body.pipe(req);
+ opts.body = undefined;
+ return;
+ }
+
+ req.end(opts.body);
+ });
+
+ ee.on('response', res => {
+ const stream = is.null(opts.encoding) ? getStream.buffer(res) : getStream(res, opts);
+
+ stream
+ .catch(err => reject(new got.ReadError(err, opts)))
+ .then(data => {
+ const statusCode = res.statusCode;
+ const limitStatusCode = opts.followRedirect ? 299 : 399;
+
+ res.body = data;
+
+ if (opts.json && res.body) {
+ try {
+ res.body = JSON.parse(res.body);
+ } catch (err) {
+ if (statusCode >= 200 && statusCode < 300) {
+ throw new got.ParseError(err, statusCode, opts, data);
+ }
+ }
+ }
+
+ if (opts.throwHttpErrors && statusCode !== 304 && (statusCode < 200 || statusCode > limitStatusCode)) {
+ throw new got.HTTPError(statusCode, res.statusMessage, res.headers, opts);
+ }
+
+ resolve(res);
+ })
+ .catch(err => {
+ Object.defineProperty(err, 'response', {value: res});
+ reject(err);
+ });
+ });
+
+ ee.once('error', reject);
+ ee.on('redirect', proxy.emit.bind(proxy, 'redirect'));
+ ee.on('uploadProgress', proxy.emit.bind(proxy, 'uploadProgress'));
+ ee.on('downloadProgress', proxy.emit.bind(proxy, 'downloadProgress'));
+ });
+
+ // Preserve backwards-compatibility
+ // TODO: Remove this in the next major version
+ Object.defineProperty(cancelable, 'canceled', {
+ get() {
+ return cancelable.isCanceled;
+ }
+ });
+
+ const promise = timeoutFn(cancelable);
+
+ promise.cancel = cancelable.cancel.bind(cancelable);
+
+ promise.on = (name, fn) => {
+ proxy.on(name, fn);
+ return promise;
+ };
+
+ return promise;
+}
+
+function asStream(opts) {
+ opts.stream = true;
+
+ const input = new PassThrough();
+ const output = new PassThrough();
+ const proxy = duplexer3(input, output);
+ let timeout;
+
+ if (opts.gotTimeout && opts.gotTimeout.request) {
+ timeout = setTimeout(() => {
+ proxy.emit('error', new got.RequestError({message: 'Request timed out', code: 'ETIMEDOUT'}, opts));
+ }, opts.gotTimeout.request);
+ }
+
+ if (opts.json) {
+ throw new Error('Got can not be used as a stream when the `json` option is used');
+ }
+
+ if (opts.body) {
+ proxy.write = () => {
+ throw new Error('Got\'s stream is not writable when the `body` option is used');
+ };
+ }
+
+ const ee = requestAsEventEmitter(opts);
+
+ ee.on('request', req => {
+ proxy.emit('request', req);
+
+ if (is.nodeStream(opts.body)) {
+ opts.body.pipe(req);
+ return;
+ }
+
+ if (opts.body) {
+ req.end(opts.body);
+ return;
+ }
+
+ if (opts.method === 'POST' || opts.method === 'PUT' || opts.method === 'PATCH') {
+ input.pipe(req);
+ return;
+ }
+
+ req.end();
+ });
+
+ ee.on('response', res => {
+ clearTimeout(timeout);
+
+ const statusCode = res.statusCode;
+
+ res.on('error', err => {
+ proxy.emit('error', new got.ReadError(err, opts));
+ });
+
+ res.pipe(output);
+
+ if (opts.throwHttpErrors && statusCode !== 304 && (statusCode < 200 || statusCode > 299)) {
+ proxy.emit('error', new got.HTTPError(statusCode, res.statusMessage, res.headers, opts), null, res);
+ return;
+ }
+
+ proxy.emit('response', res);
+ });
+
+ ee.on('error', proxy.emit.bind(proxy, 'error'));
+ ee.on('redirect', proxy.emit.bind(proxy, 'redirect'));
+ ee.on('uploadProgress', proxy.emit.bind(proxy, 'uploadProgress'));
+ ee.on('downloadProgress', proxy.emit.bind(proxy, 'downloadProgress'));
+
+ return proxy;
+}
+
+function normalizeArguments(url, opts) {
+ if (!is.string(url) && !is.object(url)) {
+ throw new TypeError(`Parameter \`url\` must be a string or object, not ${is(url)}`);
+ } else if (is.string(url)) {
+ url = url.replace(/^unix:/, 'http://$&');
+
+ try {
+ decodeURI(url);
+ } catch (err) {
+ throw new Error('Parameter `url` must contain valid UTF-8 character sequences');
+ }
+
+ url = urlParseLax(url);
+ if (url.auth) {
+ throw new Error('Basic authentication must be done with the `auth` option');
+ }
+ } else if (isURL.lenient(url)) {
+ url = urlToOptions(url);
+ }
+
+ opts = Object.assign(
+ {
+ path: '',
+ retries: 2,
+ cache: false,
+ decompress: true,
+ useElectronNet: false,
+ throwHttpErrors: true
+ },
+ url,
+ {
+ protocol: url.protocol || 'http:' // Override both null/undefined with default protocol
+ },
+ opts
+ );
+
+ const headers = lowercaseKeys(opts.headers);
+ for (const key of Object.keys(headers)) {
+ if (is.nullOrUndefined(headers[key])) {
+ delete headers[key];
+ }
+ }
+
+ opts.headers = Object.assign({
+ 'user-agent': `${pkg.name}/${pkg.version} (https://github.com/sindresorhus/got)`
+ }, headers);
+
+ if (opts.decompress && is.undefined(opts.headers['accept-encoding'])) {
+ opts.headers['accept-encoding'] = 'gzip, deflate';
+ }
+
+ const query = opts.query;
+
+ if (query) {
+ if (!is.string(query)) {
+ opts.query = querystring.stringify(query);
+ }
+
+ opts.path = `${opts.path.split('?')[0]}?${opts.query}`;
+ delete opts.query;
+ }
+
+ if (opts.json && is.undefined(opts.headers.accept)) {
+ opts.headers.accept = 'application/json';
+ }
+
+ const body = opts.body;
+ if (is.nullOrUndefined(body)) {
+ opts.method = (opts.method || 'GET').toUpperCase();
+ } else {
+ const headers = opts.headers;
+ if (!is.nodeStream(body) && !is.string(body) && !is.buffer(body) && !(opts.form || opts.json)) {
+ throw new TypeError('The `body` option must be a stream.Readable, string, Buffer or plain Object');
+ }
+
+ const canBodyBeStringified = is.plainObject(body) || is.array(body);
+ if ((opts.form || opts.json) && !canBodyBeStringified) {
+ throw new TypeError('The `body` option must be a plain Object or Array when the `form` or `json` option is used');
+ }
+
+ if (isFormData(body)) {
+ // Special case for https://github.com/form-data/form-data
+ headers['content-type'] = headers['content-type'] || `multipart/form-data; boundary=${body.getBoundary()}`;
+ } else if (opts.form && canBodyBeStringified) {
+ headers['content-type'] = headers['content-type'] || 'application/x-www-form-urlencoded';
+ opts.body = querystring.stringify(body);
+ } else if (opts.json && canBodyBeStringified) {
+ headers['content-type'] = headers['content-type'] || 'application/json';
+ opts.body = JSON.stringify(body);
+ }
+
+ if (is.undefined(headers['content-length']) && is.undefined(headers['transfer-encoding']) && !is.nodeStream(body)) {
+ const length = is.string(opts.body) ? Buffer.byteLength(opts.body) : opts.body.length;
+ headers['content-length'] = length;
+ }
+
+ // Convert buffer to stream to receive upload progress events
+ // see https://github.com/sindresorhus/got/pull/322
+ if (is.buffer(body)) {
+ opts.body = intoStream(body);
+ opts.body._buffer = body;
+ }
+
+ opts.method = (opts.method || 'POST').toUpperCase();
+ }
+
+ if (opts.hostname === 'unix') {
+ const matches = /(.+?):(.+)/.exec(opts.path);
+
+ if (matches) {
+ opts.socketPath = matches[1];
+ opts.path = matches[2];
+ opts.host = null;
+ }
+ }
+
+ if (!is.function(opts.retries)) {
+ const retries = opts.retries;
+
+ opts.retries = (iter, err) => {
+ if (iter > retries || !isRetryAllowed(err)) {
+ return 0;
+ }
+
+ const noise = Math.random() * 100;
+
+ return ((1 << iter) * 1000) + noise;
+ };
+ }
+
+ if (is.undefined(opts.followRedirect)) {
+ opts.followRedirect = true;
+ }
+
+ if (opts.timeout) {
+ if (is.number(opts.timeout)) {
+ opts.gotTimeout = {request: opts.timeout};
+ } else {
+ opts.gotTimeout = opts.timeout;
+ }
+ delete opts.timeout;
+ }
+
+ return opts;
+}
+
+function got(url, opts) {
+ try {
+ const normalizedArgs = normalizeArguments(url, opts);
+
+ if (normalizedArgs.stream) {
+ return asStream(normalizedArgs);
+ }
+
+ return asPromise(normalizedArgs);
+ } catch (err) {
+ return Promise.reject(err);
+ }
+}
+
+got.stream = (url, opts) => asStream(normalizeArguments(url, opts));
+
+const methods = [
+ 'get',
+ 'post',
+ 'put',
+ 'patch',
+ 'head',
+ 'delete'
+];
+
+for (const method of methods) {
+ got[method] = (url, opts) => got(url, Object.assign({}, opts, {method}));
+ got.stream[method] = (url, opts) => got.stream(url, Object.assign({}, opts, {method}));
+}
+
+Object.assign(got, errors);
+
+module.exports = got;
diff --git a/node_modules/bin-wrapper/node_modules/got/license b/node_modules/bin-wrapper/node_modules/got/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/got/node_modules/pify/index.js b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/index.js
new file mode 100644
index 00000000..1dee43ad
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/index.js
@@ -0,0 +1,84 @@
+'use strict';
+
+const processFn = (fn, opts) => function () {
+ const P = opts.promiseModule;
+ const args = new Array(arguments.length);
+
+ for (let i = 0; i < arguments.length; i++) {
+ args[i] = arguments[i];
+ }
+
+ return new P((resolve, reject) => {
+ if (opts.errorFirst) {
+ args.push(function (err, result) {
+ if (opts.multiArgs) {
+ const results = new Array(arguments.length - 1);
+
+ for (let i = 1; i < arguments.length; i++) {
+ results[i - 1] = arguments[i];
+ }
+
+ if (err) {
+ results.unshift(err);
+ reject(results);
+ } else {
+ resolve(results);
+ }
+ } else if (err) {
+ reject(err);
+ } else {
+ resolve(result);
+ }
+ });
+ } else {
+ args.push(function (result) {
+ if (opts.multiArgs) {
+ const results = new Array(arguments.length - 1);
+
+ for (let i = 0; i < arguments.length; i++) {
+ results[i] = arguments[i];
+ }
+
+ resolve(results);
+ } else {
+ resolve(result);
+ }
+ });
+ }
+
+ fn.apply(this, args);
+ });
+};
+
+module.exports = (obj, opts) => {
+ opts = Object.assign({
+ exclude: [/.+(Sync|Stream)$/],
+ errorFirst: true,
+ promiseModule: Promise
+ }, opts);
+
+ const filter = key => {
+ const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key);
+ return opts.include ? opts.include.some(match) : !opts.exclude.some(match);
+ };
+
+ let ret;
+ if (typeof obj === 'function') {
+ ret = function () {
+ if (opts.excludeMain) {
+ return obj.apply(this, arguments);
+ }
+
+ return processFn(obj, opts).apply(this, arguments);
+ };
+ } else {
+ ret = Object.create(Object.getPrototypeOf(obj));
+ }
+
+ for (const key in obj) { // eslint-disable-line guard-for-in
+ const x = obj[key];
+ ret[key] = typeof x === 'function' && filter(key) ? processFn(x, opts) : x;
+ }
+
+ return ret;
+};
diff --git a/node_modules/bin-wrapper/node_modules/got/node_modules/pify/license b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/got/node_modules/pify/package.json b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/package.json
new file mode 100644
index 00000000..468d8576
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/package.json
@@ -0,0 +1,51 @@
+{
+ "name": "pify",
+ "version": "3.0.0",
+ "description": "Promisify a callback-style function",
+ "license": "MIT",
+ "repository": "sindresorhus/pify",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava && npm run optimization-test",
+ "optimization-test": "node --allow-natives-syntax optimization-test.js"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "promise",
+ "promises",
+ "promisify",
+ "all",
+ "denodify",
+ "denodeify",
+ "callback",
+ "cb",
+ "node",
+ "then",
+ "thenify",
+ "convert",
+ "transform",
+ "wrap",
+ "wrapper",
+ "bind",
+ "to",
+ "async",
+ "await",
+ "es2015",
+ "bluebird"
+ ],
+ "devDependencies": {
+ "ava": "*",
+ "pinkie-promise": "^2.0.0",
+ "v8-natives": "^1.0.0",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/got/node_modules/pify/readme.md b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/readme.md
new file mode 100644
index 00000000..376ca4e5
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/readme.md
@@ -0,0 +1,131 @@
+# pify [](https://travis-ci.org/sindresorhus/pify)
+
+> Promisify a callback-style function
+
+
+## Install
+
+```
+$ npm install --save pify
+```
+
+
+## Usage
+
+```js
+const fs = require('fs');
+const pify = require('pify');
+
+// Promisify a single function
+pify(fs.readFile)('package.json', 'utf8').then(data => {
+ console.log(JSON.parse(data).name);
+ //=> 'pify'
+});
+
+// Promisify all methods in a module
+pify(fs).readFile('package.json', 'utf8').then(data => {
+ console.log(JSON.parse(data).name);
+ //=> 'pify'
+});
+```
+
+
+## API
+
+### pify(input, [options])
+
+Returns a `Promise` wrapped version of the supplied function or module.
+
+#### input
+
+Type: `Function` `Object`
+
+Callback-style function or module whose methods you want to promisify.
+
+#### options
+
+##### multiArgs
+
+Type: `boolean`
+Default: `false`
+
+By default, the promisified function will only return the second argument from the callback, which works fine for most APIs. This option can be useful for modules like `request` that return multiple arguments. Turning this on will make it return an array of all arguments from the callback, excluding the error argument, instead of just the second argument. This also applies to rejections, where it returns an array of all the callback arguments, including the error.
+
+```js
+const request = require('request');
+const pify = require('pify');
+
+pify(request, {multiArgs: true})('https://sindresorhus.com').then(result => {
+ const [httpResponse, body] = result;
+});
+```
+
+##### include
+
+Type: `string[]` `RegExp[]`
+
+Methods in a module to promisify. Remaining methods will be left untouched.
+
+##### exclude
+
+Type: `string[]` `RegExp[]`
+Default: `[/.+(Sync|Stream)$/]`
+
+Methods in a module **not** to promisify. Methods with names ending with `'Sync'` are excluded by default.
+
+##### excludeMain
+
+Type: `boolean`
+Default: `false`
+
+If given module is a function itself, it will be promisified. Turn this option on if you want to promisify only methods of the module.
+
+```js
+const pify = require('pify');
+
+function fn() {
+ return true;
+}
+
+fn.method = (data, callback) => {
+ setImmediate(() => {
+ callback(null, data);
+ });
+};
+
+// Promisify methods but not `fn()`
+const promiseFn = pify(fn, {excludeMain: true});
+
+if (promiseFn()) {
+ promiseFn.method('hi').then(data => {
+ console.log(data);
+ });
+}
+```
+
+##### errorFirst
+
+Type: `boolean`
+Default: `true`
+
+Whether the callback has an error as the first argument. You'll want to set this to `false` if you're dealing with an API that doesn't have an error as the first argument, like `fs.exists()`, some browser APIs, Chrome Extension APIs, etc.
+
+##### promiseModule
+
+Type: `Function`
+
+Custom promise module to use instead of the native one.
+
+Check out [`pinkie-promise`](https://github.com/floatdrop/pinkie-promise) if you need a tiny promise polyfill.
+
+
+## Related
+
+- [p-event](https://github.com/sindresorhus/p-event) - Promisify an event by waiting for it to be emitted
+- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently
+- [More…](https://github.com/sindresorhus/promise-fun)
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/node_modules/got/package.json b/node_modules/bin-wrapper/node_modules/got/package.json
new file mode 100644
index 00000000..78f94a8d
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/package.json
@@ -0,0 +1,95 @@
+{
+ "name": "got",
+ "version": "8.3.2",
+ "description": "Simplified HTTP requests",
+ "license": "MIT",
+ "repository": "sindresorhus/got",
+ "maintainers": [
+ {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ {
+ "name": "Vsevolod Strukchinsky",
+ "email": "floatdrop@gmail.com",
+ "url": "github.com/floatdrop"
+ },
+ {
+ "name": "Alexander Tesfamichael",
+ "email": "alex.tesfamichael@gmail.com",
+ "url": "alextes.me"
+ }
+ ],
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && nyc ava",
+ "coveralls": "nyc report --reporter=text-lcov | coveralls"
+ },
+ "files": [
+ "index.js",
+ "errors.js"
+ ],
+ "keywords": [
+ "http",
+ "https",
+ "get",
+ "got",
+ "url",
+ "uri",
+ "request",
+ "util",
+ "utility",
+ "simple",
+ "curl",
+ "wget",
+ "fetch",
+ "net",
+ "network",
+ "electron"
+ ],
+ "dependencies": {
+ "@sindresorhus/is": "^0.7.0",
+ "cacheable-request": "^2.1.1",
+ "decompress-response": "^3.3.0",
+ "duplexer3": "^0.1.4",
+ "get-stream": "^3.0.0",
+ "into-stream": "^3.1.0",
+ "is-retry-allowed": "^1.1.0",
+ "isurl": "^1.0.0-alpha5",
+ "lowercase-keys": "^1.0.0",
+ "mimic-response": "^1.0.0",
+ "p-cancelable": "^0.4.0",
+ "p-timeout": "^2.0.1",
+ "pify": "^3.0.0",
+ "safe-buffer": "^5.1.1",
+ "timed-out": "^4.0.1",
+ "url-parse-lax": "^3.0.0",
+ "url-to-options": "^1.0.1"
+ },
+ "devDependencies": {
+ "ava": "^0.25.0",
+ "coveralls": "^3.0.0",
+ "form-data": "^2.1.1",
+ "get-port": "^3.0.0",
+ "nyc": "^11.0.2",
+ "p-event": "^1.3.0",
+ "pem": "^1.4.4",
+ "proxyquire": "^1.8.0",
+ "sinon": "^4.0.0",
+ "slow-stream": "0.0.4",
+ "tempfile": "^2.0.0",
+ "tempy": "^0.2.1",
+ "universal-url": "1.0.0-alpha",
+ "xo": "^0.20.0"
+ },
+ "ava": {
+ "concurrency": 4
+ },
+ "browser": {
+ "decompress-response": false,
+ "electron": false
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/got/readme.md b/node_modules/bin-wrapper/node_modules/got/readme.md
new file mode 100644
index 00000000..2347077e
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/got/readme.md
@@ -0,0 +1,650 @@
+
+
+
+

+
+
+
+
Huge thanks to
for sponsoring me!
+
+
+
+
+
+> Simplified HTTP requests
+
+[](https://travis-ci.org/sindresorhus/got) [](https://coveralls.io/github/sindresorhus/got?branch=master) [](https://npmjs.com/got)
+
+A nicer interface to the built-in [`http`](http://nodejs.org/api/http.html) module.
+
+Created because [`request`](https://github.com/request/request) is bloated *(several megabytes!)*.
+
+
+## Highlights
+
+- [Promise & stream API](#api)
+- [Request cancelation](#aborting-the-request)
+- [RFC compliant caching](#cache-adapters)
+- [Follows redirects](#followredirect)
+- [Retries on network failure](#retries)
+- [Progress events](#onuploadprogress-progress)
+- [Handles gzip/deflate](#decompress)
+- [Timeout handling](#timeout)
+- [Errors with metadata](#errors)
+- [JSON mode](#json)
+- [WHATWG URL support](#url)
+- [Electron support](#useelectronnet)
+
+
+## Install
+
+```
+$ npm install got
+```
+
+
+
+
+
+
+## Usage
+
+```js
+const got = require('got');
+
+(async () => {
+ try {
+ const response = await got('sindresorhus.com');
+ console.log(response.body);
+ //=> ' ...'
+ } catch (error) {
+ console.log(error.response.body);
+ //=> 'Internal server error ...'
+ }
+})();
+```
+
+###### Streams
+
+```js
+const fs = require('fs');
+const got = require('got');
+
+got.stream('sindresorhus.com').pipe(fs.createWriteStream('index.html'));
+
+// For POST, PUT, and PATCH methods `got.stream` returns a `stream.Writable`
+fs.createReadStream('index.html').pipe(got.stream.post('sindresorhus.com'));
+```
+
+
+### API
+
+It's a `GET` request by default, but can be changed by using different methods or in the `options`.
+
+#### got(url, [options])
+
+Returns a Promise for a `response` object with a `body` property, a `url` property with the request URL or the final URL after redirects, and a `requestUrl` property with the original request URL.
+
+The response object will normally be a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage), however if returned from the cache it will be a [responselike object](https://github.com/lukechilds/responselike) which behaves in the same way.
+
+The response will also have a `fromCache` property set with a boolean value.
+
+##### url
+
+Type: `string` `Object`
+
+The URL to request as simple string, a [`http.request` options](https://nodejs.org/api/http.html#http_http_request_options_callback), or a [WHATWG `URL`](https://nodejs.org/api/url.html#url_class_url).
+
+Properties from `options` will override properties in the parsed `url`.
+
+If no protocol is specified, it will default to `https`.
+
+##### options
+
+Type: `Object`
+
+Any of the [`http.request`](http://nodejs.org/api/http.html#http_http_request_options_callback) options.
+
+###### stream
+
+Type: `boolean`
+Default: `false`
+
+Returns a `Stream` instead of a `Promise`. This is equivalent to calling `got.stream(url, [options])`.
+
+###### body
+
+Type: `string` `Buffer` `stream.Readable`
+
+*This is mutually exclusive with stream mode.*
+
+Body that will be sent with a `POST` request.
+
+If present in `options` and `options.method` is not set, `options.method` will be set to `POST`.
+
+If `content-length` or `transfer-encoding` is not set in `options.headers` and `body` is a string or buffer, `content-length` will be set to the body length.
+
+###### encoding
+
+Type: `string` `null`
+Default: `'utf8'`
+
+[Encoding](https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings) to be used on `setEncoding` of the response data. If `null`, the body is returned as a [`Buffer`](https://nodejs.org/api/buffer.html) (binary data).
+
+###### form
+
+Type: `boolean`
+Default: `false`
+
+*This is mutually exclusive with stream mode.*
+
+If set to `true` and `Content-Type` header is not set, it will be set to `application/x-www-form-urlencoded`.
+
+`body` must be a plain object or array and will be stringified.
+
+###### json
+
+Type: `boolean`
+Default: `false`
+
+*This is mutually exclusive with stream mode.*
+
+If set to `true` and `Content-Type` header is not set, it will be set to `application/json`.
+
+Parse response body with `JSON.parse` and set `accept` header to `application/json`. If used in conjunction with the `form` option, the `body` will the stringified as querystring and the response parsed as JSON.
+
+`body` must be a plain object or array and will be stringified.
+
+###### query
+
+Type: `string` `Object`
+
+Query string object that will be added to the request URL. This will override the query string in `url`.
+
+###### timeout
+
+Type: `number` `Object`
+
+Milliseconds to wait for the server to end the response before aborting request with `ETIMEDOUT` error.
+
+This also accepts an object with separate `connect`, `socket`, and `request` fields for connection, socket, and entire request timeouts.
+
+###### retries
+
+Type: `number` `Function`
+Default: `2`
+
+Number of request retries when network errors happens. Delays between retries counts with function `1000 * Math.pow(2, retry) + Math.random() * 100`, where `retry` is attempt number (starts from 0).
+
+Option accepts `function` with `retry` and `error` arguments. Function must return delay in milliseconds (`0` return value cancels retry).
+
+**Note:** if `retries` is `number`, `ENOTFOUND` and `ENETUNREACH` error will not be retried (see full list in [`is-retry-allowed`](https://github.com/floatdrop/is-retry-allowed/blob/master/index.js#L12) module).
+
+###### followRedirect
+
+Type: `boolean`
+Default: `true`
+
+Defines if redirect responses should be followed automatically.
+
+Note that if a `303` is sent by the server in response to any request type (`POST`, `DELETE`, etc.), got will automatically
+request the resource pointed to in the location header via `GET`. This is in accordance with [the spec](https://tools.ietf.org/html/rfc7231#section-6.4.4).
+
+###### decompress
+
+Type: `boolean`
+Default: `true`
+
+Decompress the response automatically. This will set the `accept-encoding` header to `gzip, deflate` unless you set it yourself.
+
+If this is disabled, a compressed response is returned as a `Buffer`. This may be useful if you want to handle decompression yourself or stream the raw compressed data.
+
+###### cache
+
+Type: `Object`
+Default: `false`
+
+[Cache adapter instance](#cache-adapters) for storing cached data.
+
+###### useElectronNet
+
+Type: `boolean`
+Default: `false`
+
+When used in Electron, Got will use [`electron.net`](https://electronjs.org/docs/api/net/) instead of the Node.js `http` module. According to the Electron docs, it should be fully compatible, but it's not entirely. See [#315](https://github.com/sindresorhus/got/issues/315).
+
+###### throwHttpErrors
+
+Type: `boolean`
+Default: `true`
+
+Determines if a `got.HTTPError` is thrown for error responses (non-2xx status codes).
+
+If this is disabled, requests that encounter an error status code will be resolved with the `response` instead of throwing. This may be useful if you are checking for resource availability and are expecting error responses.
+
+#### Streams
+
+#### got.stream(url, [options])
+
+`stream` method will return Duplex stream with additional events:
+
+##### .on('request', request)
+
+`request` event to get the request object of the request.
+
+**Tip**: You can use `request` event to abort request:
+
+```js
+got.stream('github.com')
+ .on('request', req => setTimeout(() => req.abort(), 50));
+```
+
+##### .on('response', response)
+
+`response` event to get the response object of the final request.
+
+##### .on('redirect', response, nextOptions)
+
+`redirect` event to get the response object of a redirect. The second argument is options for the next request to the redirect location.
+
+##### .on('uploadProgress', progress)
+##### .on('downloadProgress', progress)
+
+Progress events for uploading (sending request) and downloading (receiving response). The `progress` argument is an object like:
+
+```js
+{
+ percent: 0.1,
+ transferred: 1024,
+ total: 10240
+}
+```
+
+If it's not possible to retrieve the body size (can happen when streaming), `total` will be `null`.
+
+**Note**: Progress events can also be used with promises.
+
+```js
+(async () => {
+ const response = await got('sindresorhus.com')
+ .on('downloadProgress', progress => {
+ // Report download progress
+ })
+ .on('uploadProgress', progress => {
+ // Report upload progress
+ });
+
+ console.log(response);
+})();
+```
+
+##### .on('error', error, body, response)
+
+`error` event emitted in case of protocol error (like `ENOTFOUND` etc.) or status error (4xx or 5xx). The second argument is the body of the server response in case of status error. The third argument is response object.
+
+#### got.get(url, [options])
+#### got.post(url, [options])
+#### got.put(url, [options])
+#### got.patch(url, [options])
+#### got.head(url, [options])
+#### got.delete(url, [options])
+
+Sets `options.method` to the method name and makes a request.
+
+
+## Errors
+
+Each error contains (if available) `statusCode`, `statusMessage`, `host`, `hostname`, `method`, `path`, `protocol` and `url` properties to make debugging easier.
+
+In Promise mode, the `response` is attached to the error.
+
+#### got.CacheError
+
+When a cache method fails, for example if the database goes down, or there's a filesystem error.
+
+#### got.RequestError
+
+When a request fails. Contains a `code` property with error class code, like `ECONNREFUSED`.
+
+#### got.ReadError
+
+When reading from response stream fails.
+
+#### got.ParseError
+
+When `json` option is enabled, server response code is 2xx, and `JSON.parse` fails.
+
+#### got.HTTPError
+
+When server response code is not 2xx. Includes `statusCode`, `statusMessage`, and `redirectUrls` properties.
+
+#### got.MaxRedirectsError
+
+When server redirects you more than 10 times. Includes a `redirectUrls` property, which is an array of the URLs Got was redirected to before giving up.
+
+#### got.UnsupportedProtocolError
+
+When given an unsupported protocol.
+
+#### got.CancelError
+
+When the request is aborted with `.cancel()`.
+
+
+## Aborting the request
+
+The promise returned by Got has a [`.cancel()`](https://github.com/sindresorhus/p-cancelable) method which, when called, aborts the request.
+
+```js
+(async () => {
+ const request = got(url, options);
+
+ …
+
+ // In another part of the code
+ if (something) {
+ request.cancel();
+ }
+
+ …
+
+ try {
+ await request;
+ } catch (error) {
+ if (request.isCanceled) { // Or `error instanceof got.CancelError`
+ // Handle cancelation
+ }
+
+ // Handle other errors
+ }
+})();
+```
+
+
+## Cache
+
+Got implements [RFC 7234](http://httpwg.org/specs/rfc7234.html) compliant HTTP caching which works out of the box in memory or is easily pluggable with a wide range of storage adapters. Fresh cache entries are served directly from cache and stale cache entries are revalidated with `If-None-Match`/`If-Modified-Since` headers. You can read more about the underlying cache behaviour in the `cacheable-request` [documentation](https://github.com/lukechilds/cacheable-request).
+
+You can use the JavaScript `Map` type as an in memory cache:
+
+```js
+const got = require('got');
+const map = new Map();
+
+(async () => {
+ let response = await got('sindresorhus.com', {cache: map});
+ console.log(response.fromCache);
+ //=> false
+
+ response = await got('sindresorhus.com', {cache: map});
+ console.log(response.fromCache);
+ //=> true
+})();
+```
+
+Got uses [Keyv](https://github.com/lukechilds/keyv) internally to support a wide range of storage adapters. For something more scalable you could use an [official Keyv storage adapter](https://github.com/lukechilds/keyv#official-storage-adapters):
+
+```
+$ npm install @keyv/redis
+```
+
+```js
+const got = require('got');
+const KeyvRedis = require('@keyv/redis');
+
+const redis = new KeyvRedis('redis://user:pass@localhost:6379');
+
+got('sindresorhus.com', {cache: redis});
+```
+
+Got supports anything that follows the Map API, so it's easy to write your own storage adapter or use a third-party solution.
+
+For example, the following are all valid storage adapters:
+
+```js
+const storageAdapter = new Map();
+// or
+const storageAdapter = require('./my-storage-adapter');
+// or
+const QuickLRU = require('quick-lru');
+const storageAdapter = new QuickLRU({maxSize: 1000});
+
+got('sindresorhus.com', {cache: storageAdapter});
+```
+
+View the [Keyv docs](https://github.com/lukechilds/keyv) for more information on how to use storage adapters.
+
+
+## Proxies
+
+You can use the [`tunnel`](https://github.com/koichik/node-tunnel) module with the `agent` option to work with proxies:
+
+```js
+const got = require('got');
+const tunnel = require('tunnel');
+
+got('sindresorhus.com', {
+ agent: tunnel.httpOverHttp({
+ proxy: {
+ host: 'localhost'
+ }
+ })
+});
+```
+
+If you require different agents for different protocols, you can pass a map of agents to the `agent` option. This is necessary because a request to one protocol might redirect to another. In such a scenario, `got` will switch over to the right protocol agent for you.
+
+```js
+const got = require('got');
+const HttpAgent = require('agentkeepalive');
+const HttpsAgent = HttpAgent.HttpsAgent;
+
+got('sindresorhus.com', {
+ agent: {
+ http: new HttpAgent(),
+ https: new HttpsAgent()
+ }
+});
+```
+
+
+## Cookies
+
+You can use the [`cookie`](https://github.com/jshttp/cookie) module to include cookies in a request:
+
+```js
+const got = require('got');
+const cookie = require('cookie');
+
+got('google.com', {
+ headers: {
+ cookie: cookie.serialize('foo', 'bar')
+ }
+});
+```
+
+
+## Form data
+
+You can use the [`form-data`](https://github.com/form-data/form-data) module to create POST request with form data:
+
+```js
+const fs = require('fs');
+const got = require('got');
+const FormData = require('form-data');
+const form = new FormData();
+
+form.append('my_file', fs.createReadStream('/foo/bar.jpg'));
+
+got.post('google.com', {
+ body: form
+});
+```
+
+
+## OAuth
+
+You can use the [`oauth-1.0a`](https://github.com/ddo/oauth-1.0a) module to create a signed OAuth request:
+
+```js
+const got = require('got');
+const crypto = require('crypto');
+const OAuth = require('oauth-1.0a');
+
+const oauth = OAuth({
+ consumer: {
+ key: process.env.CONSUMER_KEY,
+ secret: process.env.CONSUMER_SECRET
+ },
+ signature_method: 'HMAC-SHA1',
+ hash_function: (baseString, key) => crypto.createHmac('sha1', key).update(baseString).digest('base64')
+});
+
+const token = {
+ key: process.env.ACCESS_TOKEN,
+ secret: process.env.ACCESS_TOKEN_SECRET
+};
+
+const url = 'https://api.twitter.com/1.1/statuses/home_timeline.json';
+
+got(url, {
+ headers: oauth.toHeader(oauth.authorize({url, method: 'GET'}, token)),
+ json: true
+});
+```
+
+
+## Unix Domain Sockets
+
+Requests can also be sent via [unix domain sockets](http://serverfault.com/questions/124517/whats-the-difference-between-unix-socket-and-tcp-ip-socket). Use the following URL scheme: `PROTOCOL://unix:SOCKET:PATH`.
+
+- `PROTOCOL` - `http` or `https` *(optional)*
+- `SOCKET` - absolute path to a unix domain socket, e.g. `/var/run/docker.sock`
+- `PATH` - request path, e.g. `/v2/keys`
+
+```js
+got('http://unix:/var/run/docker.sock:/containers/json');
+
+// or without protocol (http by default)
+got('unix:/var/run/docker.sock:/containers/json');
+```
+
+## AWS
+
+Requests to AWS services need to have their headers signed. This can be accomplished by using the [`aws4`](https://www.npmjs.com/package/aws4) package. This is an example for querying an ["Elasticsearch Service"](https://aws.amazon.com/elasticsearch-service/) host with a signed request.
+
+```js
+const url = require('url');
+const AWS = require('aws-sdk');
+const aws4 = require('aws4');
+const got = require('got');
+const config = require('./config');
+
+// Reads keys from the environment or `~/.aws/credentials`. Could be a plain object.
+const awsConfig = new AWS.Config({ region: config.region });
+
+function request(uri, options) {
+ const awsOpts = {
+ region: awsConfig.region,
+ headers: {
+ accept: 'application/json',
+ 'content-type': 'application/json'
+ },
+ method: 'GET',
+ json: true
+ };
+
+ // We need to parse the URL before passing it to `got` so `aws4` can sign the request
+ const opts = Object.assign(url.parse(uri), awsOpts, options);
+ aws4.sign(opts, awsConfig.credentials);
+
+ return got(opts);
+}
+
+request(`https://${config.host}/production/users/1`);
+
+request(`https://${config.host}/production/`, {
+ // All usual `got` options
+});
+```
+
+
+## Testing
+
+You can test your requests by using the [`nock`](https://github.com/node-nock/nock) module to mock an endpoint:
+
+```js
+const got = require('got');
+const nock = require('nock');
+
+nock('https://sindresorhus.com')
+ .get('/')
+ .reply(200, 'Hello world!');
+
+(async () => {
+ const response = await got('sindresorhus.com');
+ console.log(response.body);
+ //=> 'Hello world!'
+})();
+```
+
+If you need real integration tests you can use [`create-test-server`](https://github.com/lukechilds/create-test-server):
+
+```js
+const got = require('got');
+const createTestServer = require('create-test-server');
+
+(async () => {
+ const server = await createTestServer();
+ server.get('/', 'Hello world!');
+
+ const response = await got(server.url);
+ console.log(response.body);
+ //=> 'Hello world!'
+
+ await server.close();
+})();
+```
+
+
+## Tips
+
+### User Agent
+
+It's a good idea to set the `'user-agent'` header so the provider can more easily see how their resource is used. By default, it's the URL to this repo.
+
+```js
+const got = require('got');
+const pkg = require('./package.json');
+
+got('sindresorhus.com', {
+ headers: {
+ 'user-agent': `my-module/${pkg.version} (https://github.com/username/my-module)`
+ }
+});
+```
+
+### 304 Responses
+
+Bear in mind, if you send an `if-modified-since` header and receive a `304 Not Modified` response, the body will be empty. It's your responsibility to cache and retrieve the body contents.
+
+
+## Related
+
+- [gh-got](https://github.com/sindresorhus/gh-got) - Got convenience wrapper to interact with the GitHub API
+- [gl-got](https://github.com/singapore/gl-got) - Got convenience wrapper to interact with the GitLab API
+- [travis-got](https://github.com/samverschueren/travis-got) - Got convenience wrapper to interact with the Travis API
+- [graphql-got](https://github.com/kevva/graphql-got) - Got convenience wrapper to interact with GraphQL
+- [GotQL](https://github.com/khaosdoctor/gotql) - Got convenience wrapper to interact with GraphQL using JSON-parsed queries instead of strings
+
+
+## Created by
+
+[](https://sindresorhus.com) | [](https://github.com/floatdrop) | [](https://github.com/AlexTes) | [](https://github.com/lukechilds)
+---|---|---|---
+[Sindre Sorhus](https://sindresorhus.com) | [Vsevolod Strukchinsky](https://github.com/floatdrop) | [Alexander Tesfamichael](https://alextes.me) | [Luke Childs](https://github.com/lukechilds)
+
+
+## License
+
+MIT
diff --git a/node_modules/bin-wrapper/node_modules/p-cancelable/index.js b/node_modules/bin-wrapper/node_modules/p-cancelable/index.js
new file mode 100644
index 00000000..cdd0cfa8
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-cancelable/index.js
@@ -0,0 +1,88 @@
+'use strict';
+
+class CancelError extends Error {
+ constructor() {
+ super('Promise was canceled');
+ this.name = 'CancelError';
+ }
+
+ get isCanceled() {
+ return true;
+ }
+}
+
+class PCancelable {
+ static fn(userFn) {
+ return function () {
+ const args = [].slice.apply(arguments);
+ return new PCancelable((resolve, reject, onCancel) => {
+ args.push(onCancel);
+ userFn.apply(null, args).then(resolve, reject);
+ });
+ };
+ }
+
+ constructor(executor) {
+ this._cancelHandlers = [];
+ this._isPending = true;
+ this._isCanceled = false;
+
+ this._promise = new Promise((resolve, reject) => {
+ this._reject = reject;
+
+ return executor(
+ value => {
+ this._isPending = false;
+ resolve(value);
+ },
+ error => {
+ this._isPending = false;
+ reject(error);
+ },
+ handler => {
+ this._cancelHandlers.push(handler);
+ }
+ );
+ });
+ }
+
+ then(onFulfilled, onRejected) {
+ return this._promise.then(onFulfilled, onRejected);
+ }
+
+ catch(onRejected) {
+ return this._promise.catch(onRejected);
+ }
+
+ finally(onFinally) {
+ return this._promise.finally(onFinally);
+ }
+
+ cancel() {
+ if (!this._isPending || this._isCanceled) {
+ return;
+ }
+
+ if (this._cancelHandlers.length > 0) {
+ try {
+ for (const handler of this._cancelHandlers) {
+ handler();
+ }
+ } catch (err) {
+ this._reject(err);
+ }
+ }
+
+ this._isCanceled = true;
+ this._reject(new CancelError());
+ }
+
+ get isCanceled() {
+ return this._isCanceled;
+ }
+}
+
+Object.setPrototypeOf(PCancelable.prototype, Promise.prototype);
+
+module.exports = PCancelable;
+module.exports.CancelError = CancelError;
diff --git a/node_modules/bin-wrapper/node_modules/p-cancelable/license b/node_modules/bin-wrapper/node_modules/p-cancelable/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-cancelable/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/p-cancelable/package.json b/node_modules/bin-wrapper/node_modules/p-cancelable/package.json
new file mode 100644
index 00000000..c17dff85
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-cancelable/package.json
@@ -0,0 +1,47 @@
+{
+ "name": "p-cancelable",
+ "version": "0.4.1",
+ "description": "Create a promise that can be canceled",
+ "license": "MIT",
+ "repository": "sindresorhus/p-cancelable",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "promise",
+ "cancelable",
+ "cancel",
+ "canceled",
+ "canceling",
+ "cancellable",
+ "cancellation",
+ "abort",
+ "abortable",
+ "aborting",
+ "cleanup",
+ "task",
+ "token",
+ "async",
+ "function",
+ "await",
+ "promises",
+ "bluebird"
+ ],
+ "devDependencies": {
+ "ava": "*",
+ "delay": "^2.0.0",
+ "promise.prototype.finally": "^3.1.0",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/p-cancelable/readme.md b/node_modules/bin-wrapper/node_modules/p-cancelable/readme.md
new file mode 100644
index 00000000..62ec32ee
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-cancelable/readme.md
@@ -0,0 +1,135 @@
+# p-cancelable [](https://travis-ci.org/sindresorhus/p-cancelable)
+
+> Create a promise that can be canceled
+
+Useful for animation, loading resources, long-running async computations, async iteration, etc.
+
+
+## Install
+
+```
+$ npm install p-cancelable
+```
+
+
+## Usage
+
+```js
+const PCancelable = require('p-cancelable');
+
+const cancelablePromise = new PCancelable((resolve, reject, onCancel) => {
+ const worker = new SomeLongRunningOperation();
+
+ onCancel(() => {
+ worker.close();
+ });
+
+ worker.on('finish', resolve);
+ worker.on('error', reject);
+});
+
+cancelablePromise
+ .then(value => {
+ console.log('Operation finished successfully:', value);
+ })
+ .catch(error => {
+ if (cancelablePromise.isCanceled) {
+ // Handle the cancelation here
+ console.log('Operation was canceled');
+ return;
+ }
+
+ throw error;
+ });
+
+// Cancel the operation after 10 seconds
+setTimeout(() => {
+ cancelablePromise.cancel();
+}, 10000);
+```
+
+
+## API
+
+### new PCancelable(executor)
+
+Same as the [`Promise` constructor](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise), but with an appended `onCancel` parameter in `executor`.
+
+`PCancelable` is a subclass of `Promise`.
+
+#### onCanceled(fn)
+
+Type: `Function`
+
+Accepts a function that is called when the promise is canceled.
+
+You're not required to call this function. You can call this function multiple times to add multiple cancel handlers.
+
+### PCancelable#cancel()
+
+Type: `Function`
+
+Cancel the promise.
+
+The cancellation is synchronous. Calling it after the promise has settled or multiple times does nothing.
+
+### PCancelable#isCanceled
+
+Type: `boolean`
+
+Whether the promise is canceled.
+
+### PCancelable.CancelError
+
+Type: `Error`
+
+Rejection reason when `.cancel()` is called.
+
+It includes a `.isCanceled` property for convenience.
+
+### PCancelable.fn(fn)
+
+Convenience method to make your promise-returning or async function cancelable.
+
+The function you specify will have `onCancel` appended to its parameters.
+
+```js
+const fn = PCancelable.fn((input, onCancel) => {
+ const job = new Job();
+
+ onCancel(() => {
+ job.cleanup();
+ });
+
+ return job.start(); //=> Promise
+});
+
+const promise = fn('input'); //=> PCancelable
+
+// …
+
+promise.cancel();
+```
+
+
+## FAQ
+
+### Cancelable vs. Cancellable
+
+[In American English, the verb cancel is usually inflected canceled and canceling—with one l.](http://grammarist.com/spelling/cancel/)
Both a [browser API](https://developer.mozilla.org/en-US/docs/Web/API/Event/cancelable) and the [Cancelable Promises proposal](https://github.com/tc39/proposal-cancelable-promises) use this spelling.
+
+### What about the official [Cancelable Promises proposal](https://github.com/tc39/proposal-cancelable-promises)?
+
+~~It's still an early draft and I don't really like its current direction. It complicates everything and will require deep changes in the ecosystem to adapt to it. And the way you have to use cancel tokens is verbose and convoluted. I much prefer the more pragmatic and less invasive approach in this module.~~ The proposal was withdrawn.
+
+
+## Related
+
+- [p-progress](https://github.com/sindresorhus/p-progress) - Create a promise that reports progress
+- [p-lazy](https://github.com/sindresorhus/p-lazy) - Create a lazy promise that defers execution until `.then()` or `.catch()` is called
+- [More…](https://github.com/sindresorhus/promise-fun)
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/node_modules/p-event/index.js b/node_modules/bin-wrapper/node_modules/p-event/index.js
new file mode 100644
index 00000000..23ee4211
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-event/index.js
@@ -0,0 +1,272 @@
+'use strict';
+const pTimeout = require('p-timeout');
+
+const symbolAsyncIterator = Symbol.asyncIterator || '@@asyncIterator';
+
+const normalizeEmitter = emitter => {
+ const addListener = emitter.on || emitter.addListener || emitter.addEventListener;
+ const removeListener = emitter.off || emitter.removeListener || emitter.removeEventListener;
+
+ if (!addListener || !removeListener) {
+ throw new TypeError('Emitter is not compatible');
+ }
+
+ return {
+ addListener: addListener.bind(emitter),
+ removeListener: removeListener.bind(emitter)
+ };
+};
+
+const normalizeEvents = event => Array.isArray(event) ? event : [event];
+
+const multiple = (emitter, event, options) => {
+ let cancel;
+ const ret = new Promise((resolve, reject) => {
+ options = Object.assign({
+ rejectionEvents: ['error'],
+ multiArgs: false,
+ resolveImmediately: false
+ }, options);
+
+ if (!(options.count >= 0 && (options.count === Infinity || Number.isInteger(options.count)))) {
+ throw new TypeError('The `count` option should be at least 0 or more');
+ }
+
+ // Allow multiple events
+ const events = normalizeEvents(event);
+
+ const items = [];
+ const {addListener, removeListener} = normalizeEmitter(emitter);
+
+ const onItem = (...args) => {
+ const value = options.multiArgs ? args : args[0];
+
+ if (options.filter && !options.filter(value)) {
+ return;
+ }
+
+ items.push(value);
+
+ if (options.count === items.length) {
+ cancel();
+ resolve(items);
+ }
+ };
+
+ const rejectHandler = error => {
+ cancel();
+ reject(error);
+ };
+
+ cancel = () => {
+ for (const event of events) {
+ removeListener(event, onItem);
+ }
+
+ for (const rejectionEvent of options.rejectionEvents) {
+ removeListener(rejectionEvent, rejectHandler);
+ }
+ };
+
+ for (const event of events) {
+ addListener(event, onItem);
+ }
+
+ for (const rejectionEvent of options.rejectionEvents) {
+ addListener(rejectionEvent, rejectHandler);
+ }
+
+ if (options.resolveImmediately) {
+ resolve(items);
+ }
+ });
+
+ ret.cancel = cancel;
+
+ if (typeof options.timeout === 'number') {
+ const timeout = pTimeout(ret, options.timeout);
+ timeout.cancel = cancel;
+ return timeout;
+ }
+
+ return ret;
+};
+
+module.exports = (emitter, event, options) => {
+ if (typeof options === 'function') {
+ options = {filter: options};
+ }
+
+ options = Object.assign({}, options, {
+ count: 1,
+ resolveImmediately: false
+ });
+
+ const arrayPromise = multiple(emitter, event, options);
+
+ const promise = arrayPromise.then(array => array[0]);
+ promise.cancel = arrayPromise.cancel;
+
+ return promise;
+};
+
+module.exports.multiple = multiple;
+
+module.exports.iterator = (emitter, event, options) => {
+ if (typeof options === 'function') {
+ options = {filter: options};
+ }
+
+ // Allow multiple events
+ const events = normalizeEvents(event);
+
+ options = Object.assign({
+ rejectionEvents: ['error'],
+ resolutionEvents: [],
+ limit: Infinity,
+ multiArgs: false
+ }, options);
+
+ const {limit} = options;
+ const isValidLimit = limit >= 0 && (limit === Infinity || Number.isInteger(limit));
+ if (!isValidLimit) {
+ throw new TypeError('The `limit` option should be a non-negative integer or Infinity');
+ }
+
+ if (limit === 0) {
+ // Return an empty async iterator to avoid any further cost
+ return {
+ [Symbol.asyncIterator]() {
+ return this;
+ },
+ next() {
+ return Promise.resolve({done: true, value: undefined});
+ }
+ };
+ }
+
+ let isLimitReached = false;
+
+ const {addListener, removeListener} = normalizeEmitter(emitter);
+
+ let done = false;
+ let error;
+ let hasPendingError = false;
+ const nextQueue = [];
+ const valueQueue = [];
+ let eventCount = 0;
+
+ const valueHandler = (...args) => {
+ eventCount++;
+ isLimitReached = eventCount === limit;
+
+ const value = options.multiArgs ? args : args[0];
+
+ if (nextQueue.length > 0) {
+ const {resolve} = nextQueue.shift();
+
+ resolve({done: false, value});
+
+ if (isLimitReached) {
+ cancel();
+ }
+
+ return;
+ }
+
+ valueQueue.push(value);
+
+ if (isLimitReached) {
+ cancel();
+ }
+ };
+
+ const cancel = () => {
+ done = true;
+ for (const event of events) {
+ removeListener(event, valueHandler);
+ }
+
+ for (const rejectionEvent of options.rejectionEvents) {
+ removeListener(rejectionEvent, rejectHandler);
+ }
+
+ for (const resolutionEvent of options.resolutionEvents) {
+ removeListener(resolutionEvent, resolveHandler);
+ }
+
+ while (nextQueue.length > 0) {
+ const {resolve} = nextQueue.shift();
+ resolve({done: true, value: undefined});
+ }
+ };
+
+ const rejectHandler = (...args) => {
+ error = options.multiArgs ? args : args[0];
+
+ if (nextQueue.length > 0) {
+ const {reject} = nextQueue.shift();
+ reject(error);
+ } else {
+ hasPendingError = true;
+ }
+
+ cancel();
+ };
+
+ const resolveHandler = (...args) => {
+ const value = options.multiArgs ? args : args[0];
+
+ if (options.filter && !options.filter(value)) {
+ return;
+ }
+
+ if (nextQueue.length > 0) {
+ const {resolve} = nextQueue.shift();
+ resolve({done: true, value});
+ } else {
+ valueQueue.push(value);
+ }
+
+ cancel();
+ };
+
+ for (const event of events) {
+ addListener(event, valueHandler);
+ }
+
+ for (const rejectionEvent of options.rejectionEvents) {
+ addListener(rejectionEvent, rejectHandler);
+ }
+
+ for (const resolutionEvent of options.resolutionEvents) {
+ addListener(resolutionEvent, resolveHandler);
+ }
+
+ return {
+ [symbolAsyncIterator]() {
+ return this;
+ },
+ next() {
+ if (valueQueue.length > 0) {
+ const value = valueQueue.shift();
+ return Promise.resolve({done: done && valueQueue.length === 0 && !isLimitReached, value});
+ }
+
+ if (hasPendingError) {
+ hasPendingError = false;
+ return Promise.reject(error);
+ }
+
+ if (done) {
+ return Promise.resolve({done: true, value: undefined});
+ }
+
+ return new Promise((resolve, reject) => nextQueue.push({resolve, reject}));
+ },
+ return(value) {
+ cancel();
+ return Promise.resolve({done, value});
+ }
+ };
+};
diff --git a/node_modules/bin-wrapper/node_modules/p-event/license b/node_modules/bin-wrapper/node_modules/p-event/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-event/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/p-event/package.json b/node_modules/bin-wrapper/node_modules/p-event/package.json
new file mode 100644
index 00000000..00024aa0
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-event/package.json
@@ -0,0 +1,52 @@
+{
+ "name": "p-event",
+ "version": "2.3.1",
+ "description": "Promisify an event by waiting for it to be emitted",
+ "license": "MIT",
+ "repository": "sindresorhus/p-event",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "promise",
+ "events",
+ "event",
+ "emitter",
+ "eventemitter",
+ "event-emitter",
+ "emit",
+ "emits",
+ "listener",
+ "promisify",
+ "addlistener",
+ "addeventlistener",
+ "wait",
+ "waits",
+ "on",
+ "browser",
+ "dom",
+ "async",
+ "await",
+ "promises",
+ "bluebird"
+ ],
+ "dependencies": {
+ "p-timeout": "^2.0.1"
+ },
+ "devDependencies": {
+ "ava": "^1.2.1",
+ "delay": "^4.1.0",
+ "xo": "^0.24.0"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/p-event/readme.md b/node_modules/bin-wrapper/node_modules/p-event/readme.md
new file mode 100644
index 00000000..840b0e16
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-event/readme.md
@@ -0,0 +1,315 @@
+# p-event [](https://travis-ci.org/sindresorhus/p-event)
+
+> Promisify an event by waiting for it to be emitted
+
+Useful when you need only one event emission and want to use it with promises or await it in an async function.
+
+It's works with any event API in Node.js and the browser (using a bundler).
+
+If you want multiple individual events as they are emitted, you can use the `pEvent.iterator()` method. [Observables](https://medium.com/@benlesh/learning-observable-by-building-observable-d5da57405d87) can be useful too.
+
+
+## Install
+
+```
+$ npm install p-event
+```
+
+
+## Usage
+
+In Node.js:
+
+```js
+const pEvent = require('p-event');
+const emitter = require('./some-event-emitter');
+
+(async () => {
+ try {
+ const result = await pEvent(emitter, 'finish');
+
+ // `emitter` emitted a `finish` event
+ console.log(result);
+ } catch (error) {
+ // `emitter` emitted an `error` event
+ console.error(error);
+ }
+})();
+```
+
+In the browser:
+
+```js
+const pEvent = require('p-event');
+
+(async () => {
+ await pEvent(document, 'DOMContentLoaded');
+ console.log('😎');
+})();
+```
+
+Async iteration:
+
+```js
+const pEvent = require('p-event');
+const emitter = require('./some-event-emitter');
+
+(async () => {
+ const asyncIterator = pEvent.iterator(emitter, 'data', {
+ resolutionEvents: ['finish']
+ });
+
+ for await (const event of asyncIterator) {
+ console.log(event);
+ }
+})();
+```
+
+
+## API
+
+### pEvent(emitter, event, [options])
+### pEvent(emitter, event, filter)
+
+Returns a `Promise` that is fulfilled when `emitter` emits an event matching `event`, or rejects if `emitter` emits any of the events defined in the `rejectionEvents` option.
+
+**Note**: `event` is a string for a single event type, for example, `'data'`. To listen on multiple
+events, pass an array of strings, such as `['started', 'stopped']`.
+
+The returned promise has a `.cancel()` method, which when called, removes the event listeners and causes the promise to never be settled.
+
+#### emitter
+
+Type: `Object`
+
+Event emitter object.
+
+Should have either a `.on()`/`.addListener()`/`.addEventListener()` and `.off()`/`.removeListener()`/`.removeEventListener()` method, like the [Node.js `EventEmitter`](https://nodejs.org/api/events.html) and [DOM events](https://developer.mozilla.org/en-US/docs/Web/Events).
+
+#### event
+
+Type: `string | string[]`
+
+Name of the event or events to listen to.
+
+If the same event is defined both here and in `rejectionEvents`, this one takes priority.
+
+#### options
+
+Type: `Object`
+
+##### rejectionEvents
+
+Type: `string[]`
+Default: `['error']`
+
+Events that will reject the promise.
+
+##### multiArgs
+
+Type: `boolean`
+Default: `false`
+
+By default, the promisified function will only return the first argument from the event callback, which works fine for most APIs. This option can be useful for APIs that return multiple arguments in the callback. Turning this on will make it return an array of all arguments from the callback, instead of just the first argument. This also applies to rejections.
+
+Example:
+
+```js
+const pEvent = require('p-event');
+const emitter = require('./some-event-emitter');
+
+(async () => {
+ const [foo, bar] = await pEvent(emitter, 'finish', {multiArgs: true});
+})();
+```
+
+##### timeout
+
+Type: `number`
+Default: `Infinity`
+
+Time in milliseconds before timing out.
+
+##### filter
+
+Type: `Function`
+
+Filter function for accepting an event.
+
+```js
+const pEvent = require('p-event');
+const emitter = require('./some-event-emitter');
+
+(async () => {
+ const result = await pEvent(emitter, '🦄', value => value > 3);
+ // Do something with first 🦄 event with a value greater than 3
+})();
+```
+
+### pEvent.multiple(emitter, event, options)
+
+Wait for multiple event emissions. Returns an array.
+
+This method has the same arguments and options as `pEvent()` with the addition of the following options:
+
+#### options
+
+Type: `Object`
+
+##### count
+
+*Required*
+Type: `number`
+
+The number of times the event needs to be emitted before the promise resolves.
+
+##### resolveImmediately
+
+Type: `boolean`
+Default: `false`
+
+Whether to resolve the promise immediately. Emitting one of the `rejectionEvents` won't throw an error.
+
+**Note**: The returned array will be mutated when an event is emitted.
+
+Example:
+
+```js
+const emitter = new EventEmitter();
+
+const promise = pEvent.multiple(emitter, 'hello', {
+ resolveImmediately: true,
+ count: Infinity
+});
+
+const result = await promise;
+console.log(result);
+//=> []
+
+emitter.emit('hello', 'Jack');
+console.log(result);
+//=> ['Jack']
+
+emitter.emit('hello', 'Mark');
+console.log(result);
+//=> ['Jack', 'Mark']
+
+// Stops listening
+emitter.emit('error', new Error('😿'));
+
+emitter.emit('hello', 'John');
+console.log(result);
+//=> ['Jack', 'Mark']
+```
+
+### pEvent.iterator(emitter, event, [options])
+### pEvent.iterator(emitter, event, filter)
+
+Returns an [async iterator](http://2ality.com/2016/10/asynchronous-iteration.html) that lets you asynchronously iterate over events of `event` emitted from `emitter`. The iterator ends when `emitter` emits an event matching any of the events defined in `resolutionEvents`, or rejects if `emitter` emits any of the events defined in the `rejectionEvents` option.
+
+This method has the same arguments and options as `pEvent()` with the addition of the following options:
+
+#### options
+
+Type: `Object`
+
+##### limit
+
+Type: `number` *(non-negative integer)*
+Default: `Infinity`
+
+Maximum number of events for the iterator before it ends. When the limit is reached, the iterator will be marked as `done`. This option is useful to paginate events, for example, fetching 10 events per page.
+
+##### resolutionEvents
+
+Type: `string[]`
+Default: `[]`
+
+Events that will end the iterator.
+
+
+## Before and after
+
+```js
+const fs = require('fs');
+
+function getOpenReadStream(file, callback) {
+ const stream = fs.createReadStream(file);
+
+ stream.on('open', () => {
+ callback(null, stream);
+ });
+
+ stream.on('error', error => {
+ callback(error);
+ });
+}
+
+getOpenReadStream('unicorn.txt', (error, stream) => {
+ if (error) {
+ console.error(error);
+ return;
+ }
+
+ console.log('File descriptor:', stream.fd);
+ stream.pipe(process.stdout);
+});
+```
+
+```js
+const fs = require('fs');
+const pEvent = require('p-event');
+
+async function getOpenReadStream(file) {
+ const stream = fs.createReadStream(file);
+ await pEvent(stream, 'open');
+ return stream;
+}
+
+(async () => {
+ const stream = await getOpenReadStream('unicorn.txt');
+ console.log('File descriptor:', stream.fd);
+ stream.pipe(process.stdout);
+})().catch(console.error);
+```
+
+
+## Tip
+
+### Dealing with calls that resolve with an error code
+
+Some functions might use a single event for success and for certain errors. Promises make it easy to have combined error handler for both error events and successes containing values which represent errors.
+
+```js
+const pEvent = require('p-event');
+const emitter = require('./some-event-emitter');
+
+(async () => {
+ try {
+ const result = await pEvent(emitter, 'finish');
+
+ if (result === 'unwanted result') {
+ throw new Error('Emitter finished with an error');
+ }
+
+ // `emitter` emitted a `finish` event with an acceptable value
+ console.log(result);
+ } catch (error) {
+ // `emitter` emitted an `error` event or
+ // emitted a `finish` with 'unwanted result'
+ console.error(error);
+ }
+})();
+```
+
+
+## Related
+
+- [pify](https://github.com/sindresorhus/pify) - Promisify a callback-style function
+- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently
+- [More…](https://github.com/sindresorhus/promise-fun)
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/node_modules/p-timeout/index.js b/node_modules/bin-wrapper/node_modules/p-timeout/index.js
new file mode 100644
index 00000000..8393646a
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-timeout/index.js
@@ -0,0 +1,44 @@
+'use strict';
+const pFinally = require('p-finally');
+
+class TimeoutError extends Error {
+ constructor(message) {
+ super(message);
+ this.name = 'TimeoutError';
+ }
+}
+
+module.exports = (promise, ms, fallback) => new Promise((resolve, reject) => {
+ if (typeof ms !== 'number' || ms < 0) {
+ throw new TypeError('Expected `ms` to be a positive number');
+ }
+
+ const timer = setTimeout(() => {
+ if (typeof fallback === 'function') {
+ try {
+ resolve(fallback());
+ } catch (err) {
+ reject(err);
+ }
+ return;
+ }
+
+ const message = typeof fallback === 'string' ? fallback : `Promise timed out after ${ms} milliseconds`;
+ const err = fallback instanceof Error ? fallback : new TimeoutError(message);
+
+ if (typeof promise.cancel === 'function') {
+ promise.cancel();
+ }
+
+ reject(err);
+ }, ms);
+
+ pFinally(
+ promise.then(resolve, reject),
+ () => {
+ clearTimeout(timer);
+ }
+ );
+});
+
+module.exports.TimeoutError = TimeoutError;
diff --git a/node_modules/bin-wrapper/node_modules/p-timeout/license b/node_modules/bin-wrapper/node_modules/p-timeout/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-timeout/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/p-timeout/package.json b/node_modules/bin-wrapper/node_modules/p-timeout/package.json
new file mode 100644
index 00000000..48cb322d
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-timeout/package.json
@@ -0,0 +1,43 @@
+{
+ "name": "p-timeout",
+ "version": "2.0.1",
+ "description": "Timeout a promise after a specified amount of time",
+ "license": "MIT",
+ "repository": "sindresorhus/p-timeout",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "promise",
+ "timeout",
+ "error",
+ "invalidate",
+ "async",
+ "await",
+ "promises",
+ "time",
+ "out",
+ "cancel",
+ "bluebird"
+ ],
+ "dependencies": {
+ "p-finally": "^1.0.0"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "delay": "^2.0.0",
+ "p-cancelable": "^0.3.0",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/p-timeout/readme.md b/node_modules/bin-wrapper/node_modules/p-timeout/readme.md
new file mode 100644
index 00000000..94ff3e39
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/p-timeout/readme.md
@@ -0,0 +1,89 @@
+# p-timeout [](https://travis-ci.org/sindresorhus/p-timeout)
+
+> Timeout a promise after a specified amount of time
+
+
+## Install
+
+```
+$ npm install p-timeout
+```
+
+
+## Usage
+
+```js
+const delay = require('delay');
+const pTimeout = require('p-timeout');
+
+const delayedPromise = delay(200);
+
+pTimeout(delayedPromise, 50).then(() => 'foo');
+//=> [TimeoutError: Promise timed out after 50 milliseconds]
+```
+
+
+## API
+
+### pTimeout(input, ms, [message | fallback])
+
+Returns a decorated `input` that times out after `ms` time.
+
+If you pass in a cancelable promise, specifically a promise with a `.cancel()` method, that method will be called when the `pTimeout` promise times out.
+
+#### input
+
+Type: `Promise`
+
+Promise to decorate.
+
+#### ms
+
+Type: `number`
+
+Milliseconds before timing out.
+
+#### message
+
+Type: `string` `Error`
+Default: `'Promise timed out after 50 milliseconds'`
+
+Specify a custom error message or error.
+
+If you do a custom error, it's recommended to sub-class `pTimeout.TimeoutError`.
+
+#### fallback
+
+Type: `Function`
+
+Do something other than rejecting with an error on timeout.
+
+You could for example retry:
+
+```js
+const delay = require('delay');
+const pTimeout = require('p-timeout');
+
+const delayedPromise = () => delay(200);
+
+pTimeout(delayedPromise(), 50, () => {
+ return pTimeout(delayedPromise(), 300);
+});
+```
+
+### pTimeout.TimeoutError
+
+Exposed for instance checking and sub-classing.
+
+
+## Related
+
+- [delay](https://github.com/sindresorhus/delay) - Delay a promise a specified amount of time
+- [p-min-delay](https://github.com/sindresorhus/p-min-delay) - Delay a promise a minimum amount of time
+- [p-retry](https://github.com/sindresorhus/p-retry) - Retry a promise-returning function
+- [More…](https://github.com/sindresorhus/promise-fun)
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/node_modules/prepend-http/index.js b/node_modules/bin-wrapper/node_modules/prepend-http/index.js
new file mode 100644
index 00000000..82b3a6b2
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/prepend-http/index.js
@@ -0,0 +1,15 @@
+'use strict';
+module.exports = (url, opts) => {
+ if (typeof url !== 'string') {
+ throw new TypeError(`Expected \`url\` to be of type \`string\`, got \`${typeof url}\``);
+ }
+
+ url = url.trim();
+ opts = Object.assign({https: false}, opts);
+
+ if (/^\.*\/|^(?!localhost)\w+:/.test(url)) {
+ return url;
+ }
+
+ return url.replace(/^(?!(?:\w+:)?\/\/)/, opts.https ? 'https://' : 'http://');
+};
diff --git a/node_modules/bin-wrapper/node_modules/prepend-http/license b/node_modules/bin-wrapper/node_modules/prepend-http/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/prepend-http/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/prepend-http/package.json b/node_modules/bin-wrapper/node_modules/prepend-http/package.json
new file mode 100644
index 00000000..cbfac022
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/prepend-http/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "prepend-http",
+ "version": "2.0.0",
+ "description": "Prepend `http://` to humanized URLs like todomvc.com and localhost",
+ "license": "MIT",
+ "repository": "sindresorhus/prepend-http",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "prepend",
+ "protocol",
+ "scheme",
+ "url",
+ "uri",
+ "http",
+ "https",
+ "humanized"
+ ],
+ "devDependencies": {
+ "ava": "*",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/prepend-http/readme.md b/node_modules/bin-wrapper/node_modules/prepend-http/readme.md
new file mode 100644
index 00000000..55d640df
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/prepend-http/readme.md
@@ -0,0 +1,56 @@
+# prepend-http [](https://travis-ci.org/sindresorhus/prepend-http)
+
+> Prepend `http://` to humanized URLs like `todomvc.com` and `localhost`
+
+
+## Install
+
+```
+$ npm install prepend-http
+```
+
+
+## Usage
+
+```js
+const prependHttp = require('prepend-http');
+
+prependHttp('todomvc.com');
+//=> 'http://todomvc.com'
+
+prependHttp('localhost');
+//=> 'http://localhost'
+
+prependHttp('http://todomvc.com');
+//=> 'http://todomvc.com'
+
+prependHttp('todomvc.com', {https: true});
+//=> 'https://todomvc.com'
+```
+
+
+## API
+
+### prependHttp(url, [options])
+
+#### url
+
+Type: `string`
+
+URL to prepend `http://` on.
+
+#### options
+
+Type: `Object`
+
+##### https
+
+Type: `boolean`
+Default: `false`
+
+Prepend `https://` instead of `http://`.
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/node_modules/url-parse-lax/index.js b/node_modules/bin-wrapper/node_modules/url-parse-lax/index.js
new file mode 100644
index 00000000..5c62a589
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/url-parse-lax/index.js
@@ -0,0 +1,12 @@
+'use strict';
+const url = require('url');
+const prependHttp = require('prepend-http');
+
+module.exports = (input, options) => {
+ if (typeof input !== 'string') {
+ throw new TypeError(`Expected \`url\` to be of type \`string\`, got \`${typeof input}\` instead.`);
+ }
+
+ const finalUrl = prependHttp(input, Object.assign({https: true}, options));
+ return url.parse(finalUrl);
+};
diff --git a/node_modules/bin-wrapper/node_modules/url-parse-lax/license b/node_modules/bin-wrapper/node_modules/url-parse-lax/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/url-parse-lax/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bin-wrapper/node_modules/url-parse-lax/package.json b/node_modules/bin-wrapper/node_modules/url-parse-lax/package.json
new file mode 100644
index 00000000..b3c58f97
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/url-parse-lax/package.json
@@ -0,0 +1,42 @@
+{
+ "name": "url-parse-lax",
+ "version": "3.0.0",
+ "description": "Lax url.parse() with support for protocol-less URLs & IPs",
+ "license": "MIT",
+ "repository": "sindresorhus/url-parse-lax",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "url",
+ "uri",
+ "parse",
+ "parser",
+ "loose",
+ "lax",
+ "protocol",
+ "less",
+ "protocol-less",
+ "ip",
+ "ipv4",
+ "ipv6"
+ ],
+ "dependencies": {
+ "prepend-http": "^2.0.0"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/node_modules/url-parse-lax/readme.md b/node_modules/bin-wrapper/node_modules/url-parse-lax/readme.md
new file mode 100644
index 00000000..be0d4371
--- /dev/null
+++ b/node_modules/bin-wrapper/node_modules/url-parse-lax/readme.md
@@ -0,0 +1,127 @@
+# url-parse-lax [](https://travis-ci.org/sindresorhus/url-parse-lax)
+
+> Lax [`url.parse()`](https://nodejs.org/docs/latest/api/url.html#url_url_parse_urlstr_parsequerystring_slashesdenotehost) with support for protocol-less URLs & IPs
+
+
+## Install
+
+```
+$ npm install url-parse-lax
+```
+
+
+## Usage
+
+```js
+const urlParseLax = require('url-parse-lax');
+
+urlParseLax('sindresorhus.com');
+/*
+{
+ protocol: 'https:',
+ slashes: true,
+ auth: null,
+ host: 'sindresorhus.com',
+ port: null,
+ hostname: 'sindresorhus.com',
+ hash: null,
+ search: null,
+ query: null,
+ pathname: '/',
+ path: '/',
+ href: 'https://sindresorhus.com/'
+}
+*/
+
+urlParseLax('[2001:db8::]:8000');
+/*
+{
+ protocol: null,
+ slashes: true,
+ auth: null,
+ host: '[2001:db8::]:8000',
+ port: '8000',
+ hostname: '2001:db8::',
+ hash: null,
+ search: null,
+ query: null,
+ pathname: '/',
+ path: '/',
+ href: 'http://[2001:db8::]:8000/'
+}
+*/
+```
+
+And with the built-in `url.parse()`:
+
+```js
+const url = require('url');
+
+url.parse('sindresorhus.com');
+/*
+{
+ protocol: null,
+ slashes: null,
+ auth: null,
+ host: null,
+ port: null,
+ hostname: null,
+ hash: null,
+ search: null,
+ query: null,
+ pathname: 'sindresorhus',
+ path: 'sindresorhus',
+ href: 'sindresorhus'
+}
+*/
+
+url.parse('[2001:db8::]:8000');
+/*
+{
+ protocol: null,
+ slashes: null,
+ auth: null,
+ host: null,
+ port: null,
+ hostname: null,
+ hash: null,
+ search: null,
+ query: null,
+ pathname: '[2001:db8::]:8000',
+ path: '[2001:db8::]:8000',
+ href: '[2001:db8::]:8000'
+}
+*/
+```
+
+
+## API
+
+### urlParseLax(url, [options])
+
+#### url
+
+Type: `string`
+
+URL to parse.
+
+#### options
+
+Type: `Object`
+
+##### https
+
+Type: `boolean`
+Default: `true`
+
+Prepend `https://` instead of `http://` to protocol-less URLs.
+
+
+## Related
+
+- [url-format-lax](https://github.com/sindresorhus/url-format-lax) - Lax `url.format()` that formats a hostname and port into IPv6-compatible socket form of `hostname:port`
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/bin-wrapper/package.json b/node_modules/bin-wrapper/package.json
new file mode 100644
index 00000000..e49e305d
--- /dev/null
+++ b/node_modules/bin-wrapper/package.json
@@ -0,0 +1,44 @@
+{
+ "name": "bin-wrapper",
+ "version": "4.1.0",
+ "description": "Binary wrapper that makes your programs seamlessly available as local dependencies",
+ "license": "MIT",
+ "repository": "kevva/bin-wrapper",
+ "author": {
+ "name": "Kevin Mårtensson",
+ "email": "kevinmartensson@gmail.com",
+ "url": "https://github.com/kevva"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "bin",
+ "check",
+ "local",
+ "wrapper"
+ ],
+ "dependencies": {
+ "bin-check": "^4.1.0",
+ "bin-version-check": "^4.0.0",
+ "download": "^7.1.0",
+ "import-lazy": "^3.1.0",
+ "os-filter-obj": "^2.0.0",
+ "pify": "^4.0.1"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "executable": "^4.1.1",
+ "nock": "^10.0.2",
+ "path-exists": "^3.0.0",
+ "rimraf": "^2.6.2",
+ "tempy": "^0.2.1",
+ "xo": "*"
+ }
+}
diff --git a/node_modules/bin-wrapper/readme.md b/node_modules/bin-wrapper/readme.md
new file mode 100644
index 00000000..e57de2a7
--- /dev/null
+++ b/node_modules/bin-wrapper/readme.md
@@ -0,0 +1,132 @@
+# bin-wrapper [](https://travis-ci.org/kevva/bin-wrapper)
+
+> Binary wrapper that makes your programs seamlessly available as local dependencies
+
+
+## Install
+
+```
+$ npm install bin-wrapper
+```
+
+
+## Usage
+
+```js
+const BinWrapper = require('bin-wrapper');
+
+const base = 'https://github.com/imagemin/gifsicle-bin/raw/master/vendor';
+const bin = new BinWrapper()
+ .src(`${base}/macos/gifsicle`, 'darwin')
+ .src(`${base}/linux/x64/gifsicle`, 'linux', 'x64')
+ .src(`${base}/win/x64/gifsicle.exe`, 'win32', 'x64')
+ .dest(path.join('vendor'))
+ .use(process.platform === 'win32' ? 'gifsicle.exe' : 'gifsicle')
+ .version('>=1.71');
+
+(async () => {
+ await bin.run(['--version']);
+ console.log('gifsicle is working');
+})();
+```
+
+Get the path to your binary with `bin.path()`:
+
+```js
+console.log(bin.path());
+//=> 'path/to/vendor/gifsicle'
+```
+
+
+## API
+
+### `new BinWrapper(options)`
+
+Creates a new `BinWrapper` instance.
+
+#### options
+
+Type: `Object`
+
+##### skipCheck
+
+Type: `boolean`
+Default: `false`
+
+Whether to skip the binary check or not.
+
+##### strip
+
+Type: `number`
+Default: `1`
+
+Strip a number of leading paths from file names on extraction.
+
+### .src(url, [os], [arch])
+
+Adds a source to download.
+
+#### url
+
+Type: `string`
+
+Accepts a URL pointing to a file to download.
+
+#### os
+
+Type: `string`
+
+Tie the source to a specific OS.
+
+#### arch
+
+Type: `string`
+
+Tie the source to a specific arch.
+
+### .dest(destination)
+
+#### destination
+
+Type: `string`
+
+Accepts a path which the files will be downloaded to.
+
+### .use(binary)
+
+#### binary
+
+Type: `string`
+
+Define which file to use as the binary.
+
+### .path()
+
+Returns the full path to your binary.
+
+### .version(range)
+
+#### range
+
+Type: `string`
+
+Define a [semver range](https://github.com/isaacs/node-semver#ranges) to check
+the binary against.
+
+### .run([arguments])
+
+Runs the search for the binary. If no binary is found it will download the file
+using the URL provided in `.src()`.
+
+#### arguments
+
+Type: `Array`
+Default: `['--version']`
+
+Command to run the binary with. If it exits with code `0` it means that the
+binary is working.
+
+
+## License
+
+MIT © [Kevin Mårtensson](http://kevinmartensson.com)
diff --git a/node_modules/binary-extensions/binary-extensions.json b/node_modules/binary-extensions/binary-extensions.json
new file mode 100644
index 00000000..4aab3837
--- /dev/null
+++ b/node_modules/binary-extensions/binary-extensions.json
@@ -0,0 +1,260 @@
+[
+ "3dm",
+ "3ds",
+ "3g2",
+ "3gp",
+ "7z",
+ "a",
+ "aac",
+ "adp",
+ "ai",
+ "aif",
+ "aiff",
+ "alz",
+ "ape",
+ "apk",
+ "appimage",
+ "ar",
+ "arj",
+ "asf",
+ "au",
+ "avi",
+ "bak",
+ "baml",
+ "bh",
+ "bin",
+ "bk",
+ "bmp",
+ "btif",
+ "bz2",
+ "bzip2",
+ "cab",
+ "caf",
+ "cgm",
+ "class",
+ "cmx",
+ "cpio",
+ "cr2",
+ "cur",
+ "dat",
+ "dcm",
+ "deb",
+ "dex",
+ "djvu",
+ "dll",
+ "dmg",
+ "dng",
+ "doc",
+ "docm",
+ "docx",
+ "dot",
+ "dotm",
+ "dra",
+ "DS_Store",
+ "dsk",
+ "dts",
+ "dtshd",
+ "dvb",
+ "dwg",
+ "dxf",
+ "ecelp4800",
+ "ecelp7470",
+ "ecelp9600",
+ "egg",
+ "eol",
+ "eot",
+ "epub",
+ "exe",
+ "f4v",
+ "fbs",
+ "fh",
+ "fla",
+ "flac",
+ "flatpak",
+ "fli",
+ "flv",
+ "fpx",
+ "fst",
+ "fvt",
+ "g3",
+ "gh",
+ "gif",
+ "graffle",
+ "gz",
+ "gzip",
+ "h261",
+ "h263",
+ "h264",
+ "icns",
+ "ico",
+ "ief",
+ "img",
+ "ipa",
+ "iso",
+ "jar",
+ "jpeg",
+ "jpg",
+ "jpgv",
+ "jpm",
+ "jxr",
+ "key",
+ "ktx",
+ "lha",
+ "lib",
+ "lvp",
+ "lz",
+ "lzh",
+ "lzma",
+ "lzo",
+ "m3u",
+ "m4a",
+ "m4v",
+ "mar",
+ "mdi",
+ "mht",
+ "mid",
+ "midi",
+ "mj2",
+ "mka",
+ "mkv",
+ "mmr",
+ "mng",
+ "mobi",
+ "mov",
+ "movie",
+ "mp3",
+ "mp4",
+ "mp4a",
+ "mpeg",
+ "mpg",
+ "mpga",
+ "mxu",
+ "nef",
+ "npx",
+ "numbers",
+ "nupkg",
+ "o",
+ "odp",
+ "ods",
+ "odt",
+ "oga",
+ "ogg",
+ "ogv",
+ "otf",
+ "ott",
+ "pages",
+ "pbm",
+ "pcx",
+ "pdb",
+ "pdf",
+ "pea",
+ "pgm",
+ "pic",
+ "png",
+ "pnm",
+ "pot",
+ "potm",
+ "potx",
+ "ppa",
+ "ppam",
+ "ppm",
+ "pps",
+ "ppsm",
+ "ppsx",
+ "ppt",
+ "pptm",
+ "pptx",
+ "psd",
+ "pya",
+ "pyc",
+ "pyo",
+ "pyv",
+ "qt",
+ "rar",
+ "ras",
+ "raw",
+ "resources",
+ "rgb",
+ "rip",
+ "rlc",
+ "rmf",
+ "rmvb",
+ "rpm",
+ "rtf",
+ "rz",
+ "s3m",
+ "s7z",
+ "scpt",
+ "sgi",
+ "shar",
+ "snap",
+ "sil",
+ "sketch",
+ "slk",
+ "smv",
+ "snk",
+ "so",
+ "stl",
+ "suo",
+ "sub",
+ "swf",
+ "tar",
+ "tbz",
+ "tbz2",
+ "tga",
+ "tgz",
+ "thmx",
+ "tif",
+ "tiff",
+ "tlz",
+ "ttc",
+ "ttf",
+ "txz",
+ "udf",
+ "uvh",
+ "uvi",
+ "uvm",
+ "uvp",
+ "uvs",
+ "uvu",
+ "viv",
+ "vob",
+ "war",
+ "wav",
+ "wax",
+ "wbmp",
+ "wdp",
+ "weba",
+ "webm",
+ "webp",
+ "whl",
+ "wim",
+ "wm",
+ "wma",
+ "wmv",
+ "wmx",
+ "woff",
+ "woff2",
+ "wrm",
+ "wvx",
+ "xbm",
+ "xif",
+ "xla",
+ "xlam",
+ "xls",
+ "xlsb",
+ "xlsm",
+ "xlsx",
+ "xlt",
+ "xltm",
+ "xltx",
+ "xm",
+ "xmind",
+ "xpi",
+ "xpm",
+ "xwd",
+ "xz",
+ "z",
+ "zip",
+ "zipx"
+]
diff --git a/node_modules/binary-extensions/binary-extensions.json.d.ts b/node_modules/binary-extensions/binary-extensions.json.d.ts
new file mode 100644
index 00000000..94a248c2
--- /dev/null
+++ b/node_modules/binary-extensions/binary-extensions.json.d.ts
@@ -0,0 +1,3 @@
+declare const binaryExtensionsJson: readonly string[];
+
+export = binaryExtensionsJson;
diff --git a/node_modules/binary-extensions/index.d.ts b/node_modules/binary-extensions/index.d.ts
new file mode 100644
index 00000000..f469ac5f
--- /dev/null
+++ b/node_modules/binary-extensions/index.d.ts
@@ -0,0 +1,14 @@
+/**
+List of binary file extensions.
+
+@example
+```
+import binaryExtensions = require('binary-extensions');
+
+console.log(binaryExtensions);
+//=> ['3ds', '3g2', …]
+```
+*/
+declare const binaryExtensions: readonly string[];
+
+export = binaryExtensions;
diff --git a/node_modules/binary-extensions/index.js b/node_modules/binary-extensions/index.js
new file mode 100644
index 00000000..d46e4688
--- /dev/null
+++ b/node_modules/binary-extensions/index.js
@@ -0,0 +1 @@
+module.exports = require('./binary-extensions.json');
diff --git a/node_modules/binary-extensions/license b/node_modules/binary-extensions/license
new file mode 100644
index 00000000..401b1c73
--- /dev/null
+++ b/node_modules/binary-extensions/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) 2019 Sindre Sorhus (https://sindresorhus.com), Paul Miller (https://paulmillr.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/binary-extensions/package.json b/node_modules/binary-extensions/package.json
new file mode 100644
index 00000000..c4d36417
--- /dev/null
+++ b/node_modules/binary-extensions/package.json
@@ -0,0 +1,38 @@
+{
+ "name": "binary-extensions",
+ "version": "2.2.0",
+ "description": "List of binary file extensions",
+ "license": "MIT",
+ "repository": "sindresorhus/binary-extensions",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts",
+ "binary-extensions.json",
+ "binary-extensions.json.d.ts"
+ ],
+ "keywords": [
+ "binary",
+ "extensions",
+ "extension",
+ "file",
+ "json",
+ "list",
+ "array"
+ ],
+ "devDependencies": {
+ "ava": "^1.4.1",
+ "tsd": "^0.7.2",
+ "xo": "^0.24.0"
+ }
+}
diff --git a/node_modules/binary-extensions/readme.md b/node_modules/binary-extensions/readme.md
new file mode 100644
index 00000000..3e25dd83
--- /dev/null
+++ b/node_modules/binary-extensions/readme.md
@@ -0,0 +1,41 @@
+# binary-extensions
+
+> List of binary file extensions
+
+The list is just a [JSON file](binary-extensions.json) and can be used anywhere.
+
+
+## Install
+
+```
+$ npm install binary-extensions
+```
+
+
+## Usage
+
+```js
+const binaryExtensions = require('binary-extensions');
+
+console.log(binaryExtensions);
+//=> ['3ds', '3g2', …]
+```
+
+
+## Related
+
+- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file
+- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions
+
+
+---
+
+
diff --git a/node_modules/bl/.jshintrc b/node_modules/bl/.jshintrc
new file mode 100644
index 00000000..c8ef3ca4
--- /dev/null
+++ b/node_modules/bl/.jshintrc
@@ -0,0 +1,59 @@
+{
+ "predef": [ ]
+ , "bitwise": false
+ , "camelcase": false
+ , "curly": false
+ , "eqeqeq": false
+ , "forin": false
+ , "immed": false
+ , "latedef": false
+ , "noarg": true
+ , "noempty": true
+ , "nonew": true
+ , "plusplus": false
+ , "quotmark": true
+ , "regexp": false
+ , "undef": true
+ , "unused": true
+ , "strict": false
+ , "trailing": true
+ , "maxlen": 120
+ , "asi": true
+ , "boss": true
+ , "debug": true
+ , "eqnull": true
+ , "esnext": true
+ , "evil": true
+ , "expr": true
+ , "funcscope": false
+ , "globalstrict": false
+ , "iterator": false
+ , "lastsemic": true
+ , "laxbreak": true
+ , "laxcomma": true
+ , "loopfunc": true
+ , "multistr": false
+ , "onecase": false
+ , "proto": false
+ , "regexdash": false
+ , "scripturl": true
+ , "smarttabs": false
+ , "shadow": false
+ , "sub": true
+ , "supernew": false
+ , "validthis": true
+ , "browser": true
+ , "couch": false
+ , "devel": false
+ , "dojo": false
+ , "mootools": false
+ , "node": true
+ , "nonstandard": true
+ , "prototypejs": false
+ , "rhino": false
+ , "worker": true
+ , "wsh": false
+ , "nomen": false
+ , "onevar": false
+ , "passfail": false
+}
\ No newline at end of file
diff --git a/node_modules/bl/.travis.yml b/node_modules/bl/.travis.yml
new file mode 100644
index 00000000..a349506c
--- /dev/null
+++ b/node_modules/bl/.travis.yml
@@ -0,0 +1,13 @@
+sudo: false
+language: node_js
+node_js:
+ - '6'
+ - '8'
+ - '10'
+ - '12'
+ - '14'
+ - lts/*
+notifications:
+ email:
+ - rod@vagg.org
+ - matteo.collina@gmail.com
diff --git a/node_modules/bl/LICENSE.md b/node_modules/bl/LICENSE.md
new file mode 100644
index 00000000..ff35a347
--- /dev/null
+++ b/node_modules/bl/LICENSE.md
@@ -0,0 +1,13 @@
+The MIT License (MIT)
+=====================
+
+Copyright (c) 2013-2016 bl contributors
+----------------------------------
+
+*bl contributors listed at *
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bl/README.md b/node_modules/bl/README.md
new file mode 100644
index 00000000..9eebd88b
--- /dev/null
+++ b/node_modules/bl/README.md
@@ -0,0 +1,208 @@
+# bl *(BufferList)*
+
+[](https://travis-ci.org/rvagg/bl)
+
+**A Node.js Buffer list collector, reader and streamer thingy.**
+
+[](https://nodei.co/npm/bl/)
+[](https://nodei.co/npm/bl/)
+
+**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!
+
+The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.
+
+```js
+const BufferList = require('bl')
+
+var bl = new BufferList()
+bl.append(new Buffer('abcd'))
+bl.append(new Buffer('efg'))
+bl.append('hi') // bl will also accept & convert Strings
+bl.append(new Buffer('j'))
+bl.append(new Buffer([ 0x3, 0x4 ]))
+
+console.log(bl.length) // 12
+
+console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'
+console.log(bl.slice(3, 10).toString('ascii')) // 'defghij'
+console.log(bl.slice(3, 6).toString('ascii')) // 'def'
+console.log(bl.slice(3, 8).toString('ascii')) // 'defgh'
+console.log(bl.slice(5, 10).toString('ascii')) // 'fghij'
+
+// or just use toString!
+console.log(bl.toString()) // 'abcdefghij\u0003\u0004'
+console.log(bl.toString('ascii', 3, 8)) // 'defgh'
+console.log(bl.toString('ascii', 5, 10)) // 'fghij'
+
+// other standard Buffer readables
+console.log(bl.readUInt16BE(10)) // 0x0304
+console.log(bl.readUInt16LE(10)) // 0x0403
+```
+
+Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:
+
+```js
+const bl = require('bl')
+ , fs = require('fs')
+
+fs.createReadStream('README.md')
+ .pipe(bl(function (err, data) { // note 'new' isn't strictly required
+ // `data` is a complete Buffer object containing the full data
+ console.log(data.toString())
+ }))
+```
+
+Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.
+
+Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):
+```js
+const hyperquest = require('hyperquest')
+ , bl = require('bl')
+ , url = 'https://raw.github.com/rvagg/bl/master/README.md'
+
+hyperquest(url).pipe(bl(function (err, data) {
+ console.log(data.toString())
+}))
+```
+
+Or, use it as a readable stream to recompose a list of Buffers to an output source:
+
+```js
+const BufferList = require('bl')
+ , fs = require('fs')
+
+var bl = new BufferList()
+bl.append(new Buffer('abcd'))
+bl.append(new Buffer('efg'))
+bl.append(new Buffer('hi'))
+bl.append(new Buffer('j'))
+
+bl.pipe(fs.createWriteStream('gibberish.txt'))
+```
+
+## API
+
+ * new BufferList([ callback ])
+ * bl.length
+ * bl.append(buffer)
+ * bl.get(index)
+ * bl.slice([ start[, end ] ])
+ * bl.shallowSlice([ start[, end ] ])
+ * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
+ * bl.duplicate()
+ * bl.consume(bytes)
+ * bl.toString([encoding, [ start, [ end ]]])
+ * bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()
+ * Streams
+
+--------------------------------------------------------
+
+### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ])
+The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.
+
+Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.
+
+`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
+
+```js
+var bl = require('bl')
+var myinstance = bl()
+
+// equivalent to:
+
+var BufferList = require('bl')
+var myinstance = new BufferList()
+```
+
+--------------------------------------------------------
+
+### bl.length
+Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.
+
+--------------------------------------------------------
+
+### bl.append(Buffer | Buffer array | BufferList | BufferList array | String)
+`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained.
+
+--------------------------------------------------------
+
+### bl.get(index)
+`get()` will return the byte at the specified index.
+
+--------------------------------------------------------
+
+### bl.slice([ start, [ end ] ])
+`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
+
+If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.
+
+--------------------------------------------------------
+
+### bl.shallowSlice([ start, [ end ] ])
+`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
+
+No copies will be performed. All buffers in the result share memory with the original list.
+
+--------------------------------------------------------
+
+### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
+`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.
+
+--------------------------------------------------------
+
+### bl.duplicate()
+`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:
+
+```js
+var bl = new BufferList()
+
+bl.append('hello')
+bl.append(' world')
+bl.append('\n')
+
+bl.duplicate().pipe(process.stdout, { end: false })
+
+console.log(bl.toString())
+```
+
+--------------------------------------------------------
+
+### bl.consume(bytes)
+`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data.
+
+--------------------------------------------------------
+
+### bl.toString([encoding, [ start, [ end ]]])
+`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.
+
+--------------------------------------------------------
+
+### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()
+
+All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.
+
+See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
+
+--------------------------------------------------------
+
+### Streams
+**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance.
+
+--------------------------------------------------------
+
+## Contributors
+
+**bl** is brought to you by the following hackers:
+
+ * [Rod Vagg](https://github.com/rvagg)
+ * [Matteo Collina](https://github.com/mcollina)
+ * [Jarett Cruger](https://github.com/jcrugzz)
+
+=======
+
+
+## License & copyright
+
+Copyright (c) 2013-2016 bl contributors (listed above).
+
+bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.
diff --git a/node_modules/bl/bl.js b/node_modules/bl/bl.js
new file mode 100644
index 00000000..0c8de189
--- /dev/null
+++ b/node_modules/bl/bl.js
@@ -0,0 +1,290 @@
+var DuplexStream = require('readable-stream/duplex')
+ , util = require('util')
+ , Buffer = require('safe-buffer').Buffer
+
+
+function BufferList (callback) {
+ if (!(this instanceof BufferList))
+ return new BufferList(callback)
+
+ this._bufs = []
+ this.length = 0
+
+ if (typeof callback == 'function') {
+ this._callback = callback
+
+ var piper = function piper (err) {
+ if (this._callback) {
+ this._callback(err)
+ this._callback = null
+ }
+ }.bind(this)
+
+ this.on('pipe', function onPipe (src) {
+ src.on('error', piper)
+ })
+ this.on('unpipe', function onUnpipe (src) {
+ src.removeListener('error', piper)
+ })
+ } else {
+ this.append(callback)
+ }
+
+ DuplexStream.call(this)
+}
+
+
+util.inherits(BufferList, DuplexStream)
+
+
+BufferList.prototype._offset = function _offset (offset) {
+ var tot = 0, i = 0, _t
+ if (offset === 0) return [ 0, 0 ]
+ for (; i < this._bufs.length; i++) {
+ _t = tot + this._bufs[i].length
+ if (offset < _t || i == this._bufs.length - 1)
+ return [ i, offset - tot ]
+ tot = _t
+ }
+}
+
+
+BufferList.prototype.append = function append (buf) {
+ var i = 0
+
+ if (Buffer.isBuffer(buf)) {
+ this._appendBuffer(buf);
+ } else if (Array.isArray(buf)) {
+ for (; i < buf.length; i++)
+ this.append(buf[i])
+ } else if (buf instanceof BufferList) {
+ // unwrap argument into individual BufferLists
+ for (; i < buf._bufs.length; i++)
+ this.append(buf._bufs[i])
+ } else if (buf != null) {
+ // coerce number arguments to strings, since Buffer(number) does
+ // uninitialized memory allocation
+ if (typeof buf == 'number')
+ buf = buf.toString()
+
+ this._appendBuffer(Buffer.from(buf));
+ }
+
+ return this
+}
+
+
+BufferList.prototype._appendBuffer = function appendBuffer (buf) {
+ this._bufs.push(buf)
+ this.length += buf.length
+}
+
+
+BufferList.prototype._write = function _write (buf, encoding, callback) {
+ this._appendBuffer(buf)
+
+ if (typeof callback == 'function')
+ callback()
+}
+
+
+BufferList.prototype._read = function _read (size) {
+ if (!this.length)
+ return this.push(null)
+
+ size = Math.min(size, this.length)
+ this.push(this.slice(0, size))
+ this.consume(size)
+}
+
+
+BufferList.prototype.end = function end (chunk) {
+ DuplexStream.prototype.end.call(this, chunk)
+
+ if (this._callback) {
+ this._callback(null, this.slice())
+ this._callback = null
+ }
+}
+
+
+BufferList.prototype.get = function get (index) {
+ return this.slice(index, index + 1)[0]
+}
+
+
+BufferList.prototype.slice = function slice (start, end) {
+ if (typeof start == 'number' && start < 0)
+ start += this.length
+ if (typeof end == 'number' && end < 0)
+ end += this.length
+ return this.copy(null, 0, start, end)
+}
+
+
+BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
+ if (typeof srcStart != 'number' || srcStart < 0)
+ srcStart = 0
+ if (typeof srcEnd != 'number' || srcEnd > this.length)
+ srcEnd = this.length
+ if (srcStart >= this.length)
+ return dst || Buffer.alloc(0)
+ if (srcEnd <= 0)
+ return dst || Buffer.alloc(0)
+
+ var copy = !!dst
+ , off = this._offset(srcStart)
+ , len = srcEnd - srcStart
+ , bytes = len
+ , bufoff = (copy && dstStart) || 0
+ , start = off[1]
+ , l
+ , i
+
+ // copy/slice everything
+ if (srcStart === 0 && srcEnd == this.length) {
+ if (!copy) { // slice, but full concat if multiple buffers
+ return this._bufs.length === 1
+ ? this._bufs[0]
+ : Buffer.concat(this._bufs, this.length)
+ }
+
+ // copy, need to copy individual buffers
+ for (i = 0; i < this._bufs.length; i++) {
+ this._bufs[i].copy(dst, bufoff)
+ bufoff += this._bufs[i].length
+ }
+
+ return dst
+ }
+
+ // easy, cheap case where it's a subset of one of the buffers
+ if (bytes <= this._bufs[off[0]].length - start) {
+ return copy
+ ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
+ : this._bufs[off[0]].slice(start, start + bytes)
+ }
+
+ if (!copy) // a slice, we need something to copy in to
+ dst = Buffer.allocUnsafe(len)
+
+ for (i = off[0]; i < this._bufs.length; i++) {
+ l = this._bufs[i].length - start
+
+ if (bytes > l) {
+ this._bufs[i].copy(dst, bufoff, start)
+ bufoff += l
+ } else {
+ this._bufs[i].copy(dst, bufoff, start, start + bytes)
+ bufoff += l
+ break
+ }
+
+ bytes -= l
+
+ if (start)
+ start = 0
+ }
+
+ // safeguard so that we don't return uninitialized memory
+ if (dst.length > bufoff) return dst.slice(0, bufoff)
+
+ return dst
+}
+
+BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
+ start = start || 0
+ end = end || this.length
+
+ if (start < 0)
+ start += this.length
+ if (end < 0)
+ end += this.length
+
+ var startOffset = this._offset(start)
+ , endOffset = this._offset(end)
+ , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
+
+ if (endOffset[1] == 0)
+ buffers.pop()
+ else
+ buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
+
+ if (startOffset[1] != 0)
+ buffers[0] = buffers[0].slice(startOffset[1])
+
+ return new BufferList(buffers)
+}
+
+BufferList.prototype.toString = function toString (encoding, start, end) {
+ return this.slice(start, end).toString(encoding)
+}
+
+BufferList.prototype.consume = function consume (bytes) {
+ // first, normalize the argument, in accordance with how Buffer does it
+ bytes = Math.trunc(bytes)
+ // do nothing if not a positive number
+ if (Number.isNaN(bytes) || bytes <= 0) return this
+
+ while (this._bufs.length) {
+ if (bytes >= this._bufs[0].length) {
+ bytes -= this._bufs[0].length
+ this.length -= this._bufs[0].length
+ this._bufs.shift()
+ } else {
+ this._bufs[0] = this._bufs[0].slice(bytes)
+ this.length -= bytes
+ break
+ }
+ }
+ return this
+}
+
+
+BufferList.prototype.duplicate = function duplicate () {
+ var i = 0
+ , copy = new BufferList()
+
+ for (; i < this._bufs.length; i++)
+ copy.append(this._bufs[i])
+
+ return copy
+}
+
+
+BufferList.prototype.destroy = function destroy () {
+ this._bufs.length = 0
+ this.length = 0
+ this.push(null)
+}
+
+
+;(function () {
+ var methods = {
+ 'readDoubleBE' : 8
+ , 'readDoubleLE' : 8
+ , 'readFloatBE' : 4
+ , 'readFloatLE' : 4
+ , 'readInt32BE' : 4
+ , 'readInt32LE' : 4
+ , 'readUInt32BE' : 4
+ , 'readUInt32LE' : 4
+ , 'readInt16BE' : 2
+ , 'readInt16LE' : 2
+ , 'readUInt16BE' : 2
+ , 'readUInt16LE' : 2
+ , 'readInt8' : 1
+ , 'readUInt8' : 1
+ }
+
+ for (var m in methods) {
+ (function (m) {
+ BufferList.prototype[m] = function (offset) {
+ return this.slice(offset, offset + methods[m])[m](0)
+ }
+ }(m))
+ }
+}())
+
+
+module.exports = BufferList
diff --git a/node_modules/bl/node_modules/readable-stream/.travis.yml b/node_modules/bl/node_modules/readable-stream/.travis.yml
new file mode 100644
index 00000000..f62cdac0
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/.travis.yml
@@ -0,0 +1,34 @@
+sudo: false
+language: node_js
+before_install:
+ - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true
+notifications:
+ email: false
+matrix:
+ fast_finish: true
+ include:
+ - node_js: '0.8'
+ env: NPM_LEGACY=true
+ - node_js: '0.10'
+ env: NPM_LEGACY=true
+ - node_js: '0.11'
+ env: NPM_LEGACY=true
+ - node_js: '0.12'
+ env: NPM_LEGACY=true
+ - node_js: 1
+ env: NPM_LEGACY=true
+ - node_js: 2
+ env: NPM_LEGACY=true
+ - node_js: 3
+ env: NPM_LEGACY=true
+ - node_js: 4
+ - node_js: 5
+ - node_js: 6
+ - node_js: 7
+ - node_js: 8
+ - node_js: 9
+script: "npm run test"
+env:
+ global:
+ - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
+ - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=
diff --git a/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md
new file mode 100644
index 00000000..f478d58d
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md
@@ -0,0 +1,38 @@
+# Developer's Certificate of Origin 1.1
+
+By making a contribution to this project, I certify that:
+
+* (a) The contribution was created in whole or in part by me and I
+ have the right to submit it under the open source license
+ indicated in the file; or
+
+* (b) The contribution is based upon previous work that, to the best
+ of my knowledge, is covered under an appropriate open source
+ license and I have the right under that license to submit that
+ work with modifications, whether created in whole or in part
+ by me, under the same open source license (unless I am
+ permitted to submit under a different license), as indicated
+ in the file; or
+
+* (c) The contribution was provided directly to me by some other
+ person who certified (a), (b) or (c) and I have not modified
+ it.
+
+* (d) I understand and agree that this project and the contribution
+ are public and that a record of the contribution (including all
+ personal information I submit with it, including my sign-off) is
+ maintained indefinitely and may be redistributed consistent with
+ this project or the open source license(s) involved.
+
+## Moderation Policy
+
+The [Node.js Moderation Policy] applies to this WG.
+
+## Code of Conduct
+
+The [Node.js Code of Conduct][] applies to this WG.
+
+[Node.js Code of Conduct]:
+https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
+[Node.js Moderation Policy]:
+https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md
diff --git a/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md b/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md
new file mode 100644
index 00000000..16ffb93f
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md
@@ -0,0 +1,136 @@
+### Streams Working Group
+
+The Node.js Streams is jointly governed by a Working Group
+(WG)
+that is responsible for high-level guidance of the project.
+
+The WG has final authority over this project including:
+
+* Technical direction
+* Project governance and process (including this policy)
+* Contribution policy
+* GitHub repository hosting
+* Conduct guidelines
+* Maintaining the list of additional Collaborators
+
+For the current list of WG members, see the project
+[README.md](./README.md#current-project-team-members).
+
+### Collaborators
+
+The readable-stream GitHub repository is
+maintained by the WG and additional Collaborators who are added by the
+WG on an ongoing basis.
+
+Individuals making significant and valuable contributions are made
+Collaborators and given commit-access to the project. These
+individuals are identified by the WG and their addition as
+Collaborators is discussed during the WG meeting.
+
+_Note:_ If you make a significant contribution and are not considered
+for commit-access log an issue or contact a WG member directly and it
+will be brought up in the next WG meeting.
+
+Modifications of the contents of the readable-stream repository are
+made on
+a collaborative basis. Anybody with a GitHub account may propose a
+modification via pull request and it will be considered by the project
+Collaborators. All pull requests must be reviewed and accepted by a
+Collaborator with sufficient expertise who is able to take full
+responsibility for the change. In the case of pull requests proposed
+by an existing Collaborator, an additional Collaborator is required
+for sign-off. Consensus should be sought if additional Collaborators
+participate and there is disagreement around a particular
+modification. See _Consensus Seeking Process_ below for further detail
+on the consensus model used for governance.
+
+Collaborators may opt to elevate significant or controversial
+modifications, or modifications that have not found consensus to the
+WG for discussion by assigning the ***WG-agenda*** tag to a pull
+request or issue. The WG should serve as the final arbiter where
+required.
+
+For the current list of Collaborators, see the project
+[README.md](./README.md#members).
+
+### WG Membership
+
+WG seats are not time-limited. There is no fixed size of the WG.
+However, the expected target is between 6 and 12, to ensure adequate
+coverage of important areas of expertise, balanced with the ability to
+make decisions efficiently.
+
+There is no specific set of requirements or qualifications for WG
+membership beyond these rules.
+
+The WG may add additional members to the WG by unanimous consensus.
+
+A WG member may be removed from the WG by voluntary resignation, or by
+unanimous consensus of all other WG members.
+
+Changes to WG membership should be posted in the agenda, and may be
+suggested as any other agenda item (see "WG Meetings" below).
+
+If an addition or removal is proposed during a meeting, and the full
+WG is not in attendance to participate, then the addition or removal
+is added to the agenda for the subsequent meeting. This is to ensure
+that all members are given the opportunity to participate in all
+membership decisions. If a WG member is unable to attend a meeting
+where a planned membership decision is being made, then their consent
+is assumed.
+
+No more than 1/3 of the WG members may be affiliated with the same
+employer. If removal or resignation of a WG member, or a change of
+employment by a WG member, creates a situation where more than 1/3 of
+the WG membership shares an employer, then the situation must be
+immediately remedied by the resignation or removal of one or more WG
+members affiliated with the over-represented employer(s).
+
+### WG Meetings
+
+The WG meets occasionally on a Google Hangout On Air. A designated moderator
+approved by the WG runs the meeting. Each meeting should be
+published to YouTube.
+
+Items are added to the WG agenda that are considered contentious or
+are modifications of governance, contribution policy, WG membership,
+or release process.
+
+The intention of the agenda is not to approve or review all patches;
+that should happen continuously on GitHub and be handled by the larger
+group of Collaborators.
+
+Any community member or contributor can ask that something be added to
+the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
+WG member or the moderator can add the item to the agenda by adding
+the ***WG-agenda*** tag to the issue.
+
+Prior to each WG meeting the moderator will share the Agenda with
+members of the WG. WG members can add any items they like to the
+agenda at the beginning of each meeting. The moderator and the WG
+cannot veto or remove items.
+
+The WG may invite persons or representatives from certain projects to
+participate in a non-voting capacity.
+
+The moderator is responsible for summarizing the discussion of each
+agenda item and sends it as a pull request after the meeting.
+
+### Consensus Seeking Process
+
+The WG follows a
+[Consensus
+Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
+decision-making model.
+
+When an agenda item has appeared to reach a consensus the moderator
+will ask "Does anyone object?" as a final call for dissent from the
+consensus.
+
+If an agenda item cannot reach a consensus a WG member can call for
+either a closing vote or a vote to table the issue to the next
+meeting. The call for a vote must be seconded by a majority of the WG
+or else the discussion will continue. Simple majority wins.
+
+Note that changes to WG membership require a majority consensus. See
+"WG Membership" above.
diff --git a/node_modules/bl/node_modules/readable-stream/LICENSE b/node_modules/bl/node_modules/readable-stream/LICENSE
new file mode 100644
index 00000000..2873b3b2
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/LICENSE
@@ -0,0 +1,47 @@
+Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
diff --git a/node_modules/bl/node_modules/readable-stream/README.md b/node_modules/bl/node_modules/readable-stream/README.md
new file mode 100644
index 00000000..f1c5a931
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/README.md
@@ -0,0 +1,58 @@
+# readable-stream
+
+***Node-core v8.17.0 streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
+
+
+[](https://nodei.co/npm/readable-stream/)
+[](https://nodei.co/npm/readable-stream/)
+
+
+[](https://saucelabs.com/u/readable-stream)
+
+```bash
+npm install --save readable-stream
+```
+
+***Node-core streams for userland***
+
+This package is a mirror of the Streams2 and Streams3 implementations in
+Node-core.
+
+Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.17.0/docs/api/stream.html).
+
+If you want to guarantee a stable streams base, regardless of what version of
+Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
+
+As of version 2.0.0 **readable-stream** uses semantic versioning.
+
+# Streams Working Group
+
+`readable-stream` is maintained by the Streams Working Group, which
+oversees the development and maintenance of the Streams API within
+Node.js. The responsibilities of the Streams Working Group include:
+
+* Addressing stream issues on the Node.js issue tracker.
+* Authoring and editing stream documentation within the Node.js project.
+* Reviewing changes to stream subclasses within the Node.js project.
+* Redirecting changes to streams from the Node.js project to this
+ project.
+* Assisting in the implementation of stream providers within Node.js.
+* Recommending versions of `readable-stream` to be included in Node.js.
+* Messaging about the future of streams to give the community advance
+ notice of changes.
+
+
+## Team Members
+
+* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com>
+ - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
+* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com>
+ - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
+* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org>
+ - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
+* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com>
+* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
+* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me>
+* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
+ - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
+* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com>
diff --git a/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
new file mode 100644
index 00000000..83275f19
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
@@ -0,0 +1,60 @@
+# streams WG Meeting 2015-01-30
+
+## Links
+
+* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
+* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
+* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
+
+## Agenda
+
+Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
+
+* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
+* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
+* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
+* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
+
+## Minutes
+
+### adopt a charter
+
+* group: +1's all around
+
+### What versioning scheme should be adopted?
+* group: +1’s 3.0.0
+* domenic+group: pulling in patches from other sources where appropriate
+* mikeal: version independently, suggesting versions for io.js
+* mikeal+domenic: work with TC to notify in advance of changes
+simpler stream creation
+
+### streamline creation of streams
+* sam: streamline creation of streams
+* domenic: nice simple solution posted
+ but, we lose the opportunity to change the model
+ may not be backwards incompatible (double check keys)
+
+ **action item:** domenic will check
+
+### remove implicit flowing of streams on(‘data’)
+* add isFlowing / isPaused
+* mikeal: worrying that we’re documenting polyfill methods – confuses users
+* domenic: more reflective API is probably good, with warning labels for users
+* new section for mad scientists (reflective stream access)
+* calvin: name the “third state”
+* mikeal: maybe borrow the name from whatwg?
+* domenic: we’re missing the “third state”
+* consensus: kind of difficult to name the third state
+* mikeal: figure out differences in states / compat
+* mathias: always flow on data – eliminates third state
+ * explore what it breaks
+
+**action items:**
+* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
+* ask rod/build for infrastructure
+* **chris**: explore the “flow on data” approach
+* add isPaused/isFlowing
+* add new docs section
+* move isPaused to that section
+
+
diff --git a/node_modules/bl/node_modules/readable-stream/duplex-browser.js b/node_modules/bl/node_modules/readable-stream/duplex-browser.js
new file mode 100644
index 00000000..f8b2db83
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/duplex-browser.js
@@ -0,0 +1 @@
+module.exports = require('./lib/_stream_duplex.js');
diff --git a/node_modules/bl/node_modules/readable-stream/duplex.js b/node_modules/bl/node_modules/readable-stream/duplex.js
new file mode 100644
index 00000000..46924cbf
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/duplex.js
@@ -0,0 +1 @@
+module.exports = require('./readable').Duplex
diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
new file mode 100644
index 00000000..57003c32
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
@@ -0,0 +1,131 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a duplex stream is just a stream that is both readable and writable.
+// Since JS doesn't have multiple prototypal inheritance, this class
+// prototypally inherits from Readable, and then parasitically from
+// Writable.
+
+'use strict';
+
+/**/
+
+var pna = require('process-nextick-args');
+/**/
+
+/**/
+var objectKeys = Object.keys || function (obj) {
+ var keys = [];
+ for (var key in obj) {
+ keys.push(key);
+ }return keys;
+};
+/**/
+
+module.exports = Duplex;
+
+/**/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/**/
+
+var Readable = require('./_stream_readable');
+var Writable = require('./_stream_writable');
+
+util.inherits(Duplex, Readable);
+
+{
+ // avoid scope creep, the keys array can then be collected
+ var keys = objectKeys(Writable.prototype);
+ for (var v = 0; v < keys.length; v++) {
+ var method = keys[v];
+ if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
+ }
+}
+
+function Duplex(options) {
+ if (!(this instanceof Duplex)) return new Duplex(options);
+
+ Readable.call(this, options);
+ Writable.call(this, options);
+
+ if (options && options.readable === false) this.readable = false;
+
+ if (options && options.writable === false) this.writable = false;
+
+ this.allowHalfOpen = true;
+ if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
+
+ this.once('end', onend);
+}
+
+Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
+ // making it explicit this property is not enumerable
+ // because otherwise some prototype manipulation in
+ // userland will fail
+ enumerable: false,
+ get: function () {
+ return this._writableState.highWaterMark;
+ }
+});
+
+// the no-half-open enforcer
+function onend() {
+ // if we allow half-open state, or if the writable side ended,
+ // then we're ok.
+ if (this.allowHalfOpen || this._writableState.ended) return;
+
+ // no more data can be written.
+ // But allow more writes to happen in this tick.
+ pna.nextTick(onEndNT, this);
+}
+
+function onEndNT(self) {
+ self.end();
+}
+
+Object.defineProperty(Duplex.prototype, 'destroyed', {
+ get: function () {
+ if (this._readableState === undefined || this._writableState === undefined) {
+ return false;
+ }
+ return this._readableState.destroyed && this._writableState.destroyed;
+ },
+ set: function (value) {
+ // we ignore the value if the stream
+ // has not been initialized yet
+ if (this._readableState === undefined || this._writableState === undefined) {
+ return;
+ }
+
+ // backward compatibility, the user is explicitly
+ // managing destroyed
+ this._readableState.destroyed = value;
+ this._writableState.destroyed = value;
+ }
+});
+
+Duplex.prototype._destroy = function (err, cb) {
+ this.push(null);
+ this.end();
+
+ pna.nextTick(cb, err);
+};
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
new file mode 100644
index 00000000..612edb4d
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
@@ -0,0 +1,47 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a passthrough stream.
+// basically just the most minimal sort of Transform stream.
+// Every written chunk gets output as-is.
+
+'use strict';
+
+module.exports = PassThrough;
+
+var Transform = require('./_stream_transform');
+
+/**/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/**/
+
+util.inherits(PassThrough, Transform);
+
+function PassThrough(options) {
+ if (!(this instanceof PassThrough)) return new PassThrough(options);
+
+ Transform.call(this, options);
+}
+
+PassThrough.prototype._transform = function (chunk, encoding, cb) {
+ cb(null, chunk);
+};
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
new file mode 100644
index 00000000..3af95cb2
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
@@ -0,0 +1,1019 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict';
+
+/**/
+
+var pna = require('process-nextick-args');
+/**/
+
+module.exports = Readable;
+
+/**/
+var isArray = require('isarray');
+/**/
+
+/**/
+var Duplex;
+/**/
+
+Readable.ReadableState = ReadableState;
+
+/**/
+var EE = require('events').EventEmitter;
+
+var EElistenerCount = function (emitter, type) {
+ return emitter.listeners(type).length;
+};
+/**/
+
+/**/
+var Stream = require('./internal/streams/stream');
+/**/
+
+/**/
+
+var Buffer = require('safe-buffer').Buffer;
+var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
+function _uint8ArrayToBuffer(chunk) {
+ return Buffer.from(chunk);
+}
+function _isUint8Array(obj) {
+ return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
+}
+
+/**/
+
+/**/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/**/
+
+/**/
+var debugUtil = require('util');
+var debug = void 0;
+if (debugUtil && debugUtil.debuglog) {
+ debug = debugUtil.debuglog('stream');
+} else {
+ debug = function () {};
+}
+/**/
+
+var BufferList = require('./internal/streams/BufferList');
+var destroyImpl = require('./internal/streams/destroy');
+var StringDecoder;
+
+util.inherits(Readable, Stream);
+
+var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
+
+function prependListener(emitter, event, fn) {
+ // Sadly this is not cacheable as some libraries bundle their own
+ // event emitter implementation with them.
+ if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
+
+ // This is a hack to make sure that our error handler is attached before any
+ // userland ones. NEVER DO THIS. This is here only because this code needs
+ // to continue to work with older versions of Node.js that do not include
+ // the prependListener() method. The goal is to eventually remove this hack.
+ if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
+}
+
+function ReadableState(options, stream) {
+ Duplex = Duplex || require('./_stream_duplex');
+
+ options = options || {};
+
+ // Duplex streams are both readable and writable, but share
+ // the same options object.
+ // However, some cases require setting options to different
+ // values for the readable and the writable sides of the duplex stream.
+ // These options can be provided separately as readableXXX and writableXXX.
+ var isDuplex = stream instanceof Duplex;
+
+ // object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away
+ this.objectMode = !!options.objectMode;
+
+ if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
+
+ // the point at which it stops calling _read() to fill the buffer
+ // Note: 0 is a valid value, means "don't call _read preemptively ever"
+ var hwm = options.highWaterMark;
+ var readableHwm = options.readableHighWaterMark;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+
+ if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
+
+ // cast to ints.
+ this.highWaterMark = Math.floor(this.highWaterMark);
+
+ // A linked list is used to store data chunks instead of an array because the
+ // linked list can remove elements from the beginning faster than
+ // array.shift()
+ this.buffer = new BufferList();
+ this.length = 0;
+ this.pipes = null;
+ this.pipesCount = 0;
+ this.flowing = null;
+ this.ended = false;
+ this.endEmitted = false;
+ this.reading = false;
+
+ // a flag to be able to tell if the event 'readable'/'data' is emitted
+ // immediately, or on a later tick. We set this to true at first, because
+ // any actions that shouldn't happen until "later" should generally also
+ // not happen before the first read call.
+ this.sync = true;
+
+ // whenever we return null, then we set a flag to say
+ // that we're awaiting a 'readable' event emission.
+ this.needReadable = false;
+ this.emittedReadable = false;
+ this.readableListening = false;
+ this.resumeScheduled = false;
+
+ // has it been destroyed
+ this.destroyed = false;
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+ // the number of writers that are awaiting a drain event in .pipe()s
+ this.awaitDrain = 0;
+
+ // if true, a maybeReadMore has been scheduled
+ this.readingMore = false;
+
+ this.decoder = null;
+ this.encoding = null;
+ if (options.encoding) {
+ if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
+ this.decoder = new StringDecoder(options.encoding);
+ this.encoding = options.encoding;
+ }
+}
+
+function Readable(options) {
+ Duplex = Duplex || require('./_stream_duplex');
+
+ if (!(this instanceof Readable)) return new Readable(options);
+
+ this._readableState = new ReadableState(options, this);
+
+ // legacy
+ this.readable = true;
+
+ if (options) {
+ if (typeof options.read === 'function') this._read = options.read;
+
+ if (typeof options.destroy === 'function') this._destroy = options.destroy;
+ }
+
+ Stream.call(this);
+}
+
+Object.defineProperty(Readable.prototype, 'destroyed', {
+ get: function () {
+ if (this._readableState === undefined) {
+ return false;
+ }
+ return this._readableState.destroyed;
+ },
+ set: function (value) {
+ // we ignore the value if the stream
+ // has not been initialized yet
+ if (!this._readableState) {
+ return;
+ }
+
+ // backward compatibility, the user is explicitly
+ // managing destroyed
+ this._readableState.destroyed = value;
+ }
+});
+
+Readable.prototype.destroy = destroyImpl.destroy;
+Readable.prototype._undestroy = destroyImpl.undestroy;
+Readable.prototype._destroy = function (err, cb) {
+ this.push(null);
+ cb(err);
+};
+
+// Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+Readable.prototype.push = function (chunk, encoding) {
+ var state = this._readableState;
+ var skipChunkCheck;
+
+ if (!state.objectMode) {
+ if (typeof chunk === 'string') {
+ encoding = encoding || state.defaultEncoding;
+ if (encoding !== state.encoding) {
+ chunk = Buffer.from(chunk, encoding);
+ encoding = '';
+ }
+ skipChunkCheck = true;
+ }
+ } else {
+ skipChunkCheck = true;
+ }
+
+ return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
+};
+
+// Unshift should *always* be something directly out of read()
+Readable.prototype.unshift = function (chunk) {
+ return readableAddChunk(this, chunk, null, true, false);
+};
+
+function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
+ var state = stream._readableState;
+ if (chunk === null) {
+ state.reading = false;
+ onEofChunk(stream, state);
+ } else {
+ var er;
+ if (!skipChunkCheck) er = chunkInvalid(state, chunk);
+ if (er) {
+ stream.emit('error', er);
+ } else if (state.objectMode || chunk && chunk.length > 0) {
+ if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
+ chunk = _uint8ArrayToBuffer(chunk);
+ }
+
+ if (addToFront) {
+ if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
+ } else if (state.ended) {
+ stream.emit('error', new Error('stream.push() after EOF'));
+ } else {
+ state.reading = false;
+ if (state.decoder && !encoding) {
+ chunk = state.decoder.write(chunk);
+ if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
+ } else {
+ addChunk(stream, state, chunk, false);
+ }
+ }
+ } else if (!addToFront) {
+ state.reading = false;
+ }
+ }
+
+ return needMoreData(state);
+}
+
+function addChunk(stream, state, chunk, addToFront) {
+ if (state.flowing && state.length === 0 && !state.sync) {
+ stream.emit('data', chunk);
+ stream.read(0);
+ } else {
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
+
+ if (state.needReadable) emitReadable(stream);
+ }
+ maybeReadMore(stream, state);
+}
+
+function chunkInvalid(state, chunk) {
+ var er;
+ if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
+ er = new TypeError('Invalid non-string/buffer chunk');
+ }
+ return er;
+}
+
+// if it's past the high water mark, we can push in some more.
+// Also, if we have no data yet, we can stand some
+// more bytes. This is to work around cases where hwm=0,
+// such as the repl. Also, if the push() triggered a
+// readable event, and the user called read(largeNumber) such that
+// needReadable was set, then we ought to push more, so that another
+// 'readable' event will be triggered.
+function needMoreData(state) {
+ return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
+}
+
+Readable.prototype.isPaused = function () {
+ return this._readableState.flowing === false;
+};
+
+// backwards compatibility.
+Readable.prototype.setEncoding = function (enc) {
+ if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
+ this._readableState.decoder = new StringDecoder(enc);
+ this._readableState.encoding = enc;
+ return this;
+};
+
+// Don't raise the hwm > 8MB
+var MAX_HWM = 0x800000;
+function computeNewHighWaterMark(n) {
+ if (n >= MAX_HWM) {
+ n = MAX_HWM;
+ } else {
+ // Get the next highest power of 2 to prevent increasing hwm excessively in
+ // tiny amounts
+ n--;
+ n |= n >>> 1;
+ n |= n >>> 2;
+ n |= n >>> 4;
+ n |= n >>> 8;
+ n |= n >>> 16;
+ n++;
+ }
+ return n;
+}
+
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function howMuchToRead(n, state) {
+ if (n <= 0 || state.length === 0 && state.ended) return 0;
+ if (state.objectMode) return 1;
+ if (n !== n) {
+ // Only flow one buffer at a time
+ if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
+ }
+ // If we're asking for more than the current hwm, then raise the hwm.
+ if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
+ if (n <= state.length) return n;
+ // Don't have enough
+ if (!state.ended) {
+ state.needReadable = true;
+ return 0;
+ }
+ return state.length;
+}
+
+// you can override either this method, or the async _read(n) below.
+Readable.prototype.read = function (n) {
+ debug('read', n);
+ n = parseInt(n, 10);
+ var state = this._readableState;
+ var nOrig = n;
+
+ if (n !== 0) state.emittedReadable = false;
+
+ // if we're doing read(0) to trigger a readable event, but we
+ // already have a bunch of data in the buffer, then just trigger
+ // the 'readable' event and move on.
+ if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
+ debug('read: emitReadable', state.length, state.ended);
+ if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
+ return null;
+ }
+
+ n = howMuchToRead(n, state);
+
+ // if we've ended, and we're now clear, then finish it up.
+ if (n === 0 && state.ended) {
+ if (state.length === 0) endReadable(this);
+ return null;
+ }
+
+ // All the actual chunk generation logic needs to be
+ // *below* the call to _read. The reason is that in certain
+ // synthetic stream cases, such as passthrough streams, _read
+ // may be a completely synchronous operation which may change
+ // the state of the read buffer, providing enough data when
+ // before there was *not* enough.
+ //
+ // So, the steps are:
+ // 1. Figure out what the state of things will be after we do
+ // a read from the buffer.
+ //
+ // 2. If that resulting state will trigger a _read, then call _read.
+ // Note that this may be asynchronous, or synchronous. Yes, it is
+ // deeply ugly to write APIs this way, but that still doesn't mean
+ // that the Readable class should behave improperly, as streams are
+ // designed to be sync/async agnostic.
+ // Take note if the _read call is sync or async (ie, if the read call
+ // has returned yet), so that we know whether or not it's safe to emit
+ // 'readable' etc.
+ //
+ // 3. Actually pull the requested chunks out of the buffer and return.
+
+ // if we need a readable event, then we need to do some reading.
+ var doRead = state.needReadable;
+ debug('need readable', doRead);
+
+ // if we currently have less than the highWaterMark, then also read some
+ if (state.length === 0 || state.length - n < state.highWaterMark) {
+ doRead = true;
+ debug('length less than watermark', doRead);
+ }
+
+ // however, if we've ended, then there's no point, and if we're already
+ // reading, then it's unnecessary.
+ if (state.ended || state.reading) {
+ doRead = false;
+ debug('reading or ended', doRead);
+ } else if (doRead) {
+ debug('do read');
+ state.reading = true;
+ state.sync = true;
+ // if the length is currently zero, then we *need* a readable event.
+ if (state.length === 0) state.needReadable = true;
+ // call internal read method
+ this._read(state.highWaterMark);
+ state.sync = false;
+ // If _read pushed data synchronously, then `reading` will be false,
+ // and we need to re-evaluate how much data we can return to the user.
+ if (!state.reading) n = howMuchToRead(nOrig, state);
+ }
+
+ var ret;
+ if (n > 0) ret = fromList(n, state);else ret = null;
+
+ if (ret === null) {
+ state.needReadable = true;
+ n = 0;
+ } else {
+ state.length -= n;
+ }
+
+ if (state.length === 0) {
+ // If we have nothing in the buffer, then we want to know
+ // as soon as we *do* get something into the buffer.
+ if (!state.ended) state.needReadable = true;
+
+ // If we tried to read() past the EOF, then emit end on the next tick.
+ if (nOrig !== n && state.ended) endReadable(this);
+ }
+
+ if (ret !== null) this.emit('data', ret);
+
+ return ret;
+};
+
+function onEofChunk(stream, state) {
+ if (state.ended) return;
+ if (state.decoder) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length) {
+ state.buffer.push(chunk);
+ state.length += state.objectMode ? 1 : chunk.length;
+ }
+ }
+ state.ended = true;
+
+ // emit 'readable' now to make sure it gets picked up.
+ emitReadable(stream);
+}
+
+// Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow. This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+function emitReadable(stream) {
+ var state = stream._readableState;
+ state.needReadable = false;
+ if (!state.emittedReadable) {
+ debug('emitReadable', state.flowing);
+ state.emittedReadable = true;
+ if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
+ }
+}
+
+function emitReadable_(stream) {
+ debug('emit readable');
+ stream.emit('readable');
+ flow(stream);
+}
+
+// at this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data. that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+function maybeReadMore(stream, state) {
+ if (!state.readingMore) {
+ state.readingMore = true;
+ pna.nextTick(maybeReadMore_, stream, state);
+ }
+}
+
+function maybeReadMore_(stream, state) {
+ var len = state.length;
+ while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
+ debug('maybeReadMore read 0');
+ stream.read(0);
+ if (len === state.length)
+ // didn't get any data, stop spinning.
+ break;else len = state.length;
+ }
+ state.readingMore = false;
+}
+
+// abstract method. to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+Readable.prototype._read = function (n) {
+ this.emit('error', new Error('_read() is not implemented'));
+};
+
+Readable.prototype.pipe = function (dest, pipeOpts) {
+ var src = this;
+ var state = this._readableState;
+
+ switch (state.pipesCount) {
+ case 0:
+ state.pipes = dest;
+ break;
+ case 1:
+ state.pipes = [state.pipes, dest];
+ break;
+ default:
+ state.pipes.push(dest);
+ break;
+ }
+ state.pipesCount += 1;
+ debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
+
+ var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
+
+ var endFn = doEnd ? onend : unpipe;
+ if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
+
+ dest.on('unpipe', onunpipe);
+ function onunpipe(readable, unpipeInfo) {
+ debug('onunpipe');
+ if (readable === src) {
+ if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
+ unpipeInfo.hasUnpiped = true;
+ cleanup();
+ }
+ }
+ }
+
+ function onend() {
+ debug('onend');
+ dest.end();
+ }
+
+ // when the dest drains, it reduces the awaitDrain counter
+ // on the source. This would be more elegant with a .once()
+ // handler in flow(), but adding and removing repeatedly is
+ // too slow.
+ var ondrain = pipeOnDrain(src);
+ dest.on('drain', ondrain);
+
+ var cleanedUp = false;
+ function cleanup() {
+ debug('cleanup');
+ // cleanup event handlers once the pipe is broken
+ dest.removeListener('close', onclose);
+ dest.removeListener('finish', onfinish);
+ dest.removeListener('drain', ondrain);
+ dest.removeListener('error', onerror);
+ dest.removeListener('unpipe', onunpipe);
+ src.removeListener('end', onend);
+ src.removeListener('end', unpipe);
+ src.removeListener('data', ondata);
+
+ cleanedUp = true;
+
+ // if the reader is waiting for a drain event from this
+ // specific writer, then it would cause it to never start
+ // flowing again.
+ // So, if this is awaiting a drain, then we just call it now.
+ // If we don't know, then assume that we are waiting for one.
+ if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
+ }
+
+ // If the user pushes more data while we're writing to dest then we'll end up
+ // in ondata again. However, we only want to increase awaitDrain once because
+ // dest will only emit one 'drain' event for the multiple writes.
+ // => Introduce a guard on increasing awaitDrain.
+ var increasedAwaitDrain = false;
+ src.on('data', ondata);
+ function ondata(chunk) {
+ debug('ondata');
+ increasedAwaitDrain = false;
+ var ret = dest.write(chunk);
+ if (false === ret && !increasedAwaitDrain) {
+ // If the user unpiped during `dest.write()`, it is possible
+ // to get stuck in a permanently paused state if that write
+ // also returned false.
+ // => Check whether `dest` is still a piping destination.
+ if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
+ debug('false write response, pause', state.awaitDrain);
+ state.awaitDrain++;
+ increasedAwaitDrain = true;
+ }
+ src.pause();
+ }
+ }
+
+ // if the dest has an error, then stop piping into it.
+ // however, don't suppress the throwing behavior for this.
+ function onerror(er) {
+ debug('onerror', er);
+ unpipe();
+ dest.removeListener('error', onerror);
+ if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
+ }
+
+ // Make sure our error handler is attached before userland ones.
+ prependListener(dest, 'error', onerror);
+
+ // Both close and finish should trigger unpipe, but only once.
+ function onclose() {
+ dest.removeListener('finish', onfinish);
+ unpipe();
+ }
+ dest.once('close', onclose);
+ function onfinish() {
+ debug('onfinish');
+ dest.removeListener('close', onclose);
+ unpipe();
+ }
+ dest.once('finish', onfinish);
+
+ function unpipe() {
+ debug('unpipe');
+ src.unpipe(dest);
+ }
+
+ // tell the dest that it's being piped to
+ dest.emit('pipe', src);
+
+ // start the flow if it hasn't been started already.
+ if (!state.flowing) {
+ debug('pipe resume');
+ src.resume();
+ }
+
+ return dest;
+};
+
+function pipeOnDrain(src) {
+ return function () {
+ var state = src._readableState;
+ debug('pipeOnDrain', state.awaitDrain);
+ if (state.awaitDrain) state.awaitDrain--;
+ if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
+ state.flowing = true;
+ flow(src);
+ }
+ };
+}
+
+Readable.prototype.unpipe = function (dest) {
+ var state = this._readableState;
+ var unpipeInfo = { hasUnpiped: false };
+
+ // if we're not piping anywhere, then do nothing.
+ if (state.pipesCount === 0) return this;
+
+ // just one destination. most common case.
+ if (state.pipesCount === 1) {
+ // passed in one, but it's not the right one.
+ if (dest && dest !== state.pipes) return this;
+
+ if (!dest) dest = state.pipes;
+
+ // got a match.
+ state.pipes = null;
+ state.pipesCount = 0;
+ state.flowing = false;
+ if (dest) dest.emit('unpipe', this, unpipeInfo);
+ return this;
+ }
+
+ // slow case. multiple pipe destinations.
+
+ if (!dest) {
+ // remove all.
+ var dests = state.pipes;
+ var len = state.pipesCount;
+ state.pipes = null;
+ state.pipesCount = 0;
+ state.flowing = false;
+
+ for (var i = 0; i < len; i++) {
+ dests[i].emit('unpipe', this, { hasUnpiped: false });
+ }return this;
+ }
+
+ // try to find the right one.
+ var index = indexOf(state.pipes, dest);
+ if (index === -1) return this;
+
+ state.pipes.splice(index, 1);
+ state.pipesCount -= 1;
+ if (state.pipesCount === 1) state.pipes = state.pipes[0];
+
+ dest.emit('unpipe', this, unpipeInfo);
+
+ return this;
+};
+
+// set up data events if they are asked for
+// Ensure readable listeners eventually get something
+Readable.prototype.on = function (ev, fn) {
+ var res = Stream.prototype.on.call(this, ev, fn);
+
+ if (ev === 'data') {
+ // Start flowing on next tick if stream isn't explicitly paused
+ if (this._readableState.flowing !== false) this.resume();
+ } else if (ev === 'readable') {
+ var state = this._readableState;
+ if (!state.endEmitted && !state.readableListening) {
+ state.readableListening = state.needReadable = true;
+ state.emittedReadable = false;
+ if (!state.reading) {
+ pna.nextTick(nReadingNextTick, this);
+ } else if (state.length) {
+ emitReadable(this);
+ }
+ }
+ }
+
+ return res;
+};
+Readable.prototype.addListener = Readable.prototype.on;
+
+function nReadingNextTick(self) {
+ debug('readable nexttick read 0');
+ self.read(0);
+}
+
+// pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+Readable.prototype.resume = function () {
+ var state = this._readableState;
+ if (!state.flowing) {
+ debug('resume');
+ state.flowing = true;
+ resume(this, state);
+ }
+ return this;
+};
+
+function resume(stream, state) {
+ if (!state.resumeScheduled) {
+ state.resumeScheduled = true;
+ pna.nextTick(resume_, stream, state);
+ }
+}
+
+function resume_(stream, state) {
+ if (!state.reading) {
+ debug('resume read 0');
+ stream.read(0);
+ }
+
+ state.resumeScheduled = false;
+ state.awaitDrain = 0;
+ stream.emit('resume');
+ flow(stream);
+ if (state.flowing && !state.reading) stream.read(0);
+}
+
+Readable.prototype.pause = function () {
+ debug('call pause flowing=%j', this._readableState.flowing);
+ if (false !== this._readableState.flowing) {
+ debug('pause');
+ this._readableState.flowing = false;
+ this.emit('pause');
+ }
+ return this;
+};
+
+function flow(stream) {
+ var state = stream._readableState;
+ debug('flow', state.flowing);
+ while (state.flowing && stream.read() !== null) {}
+}
+
+// wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+Readable.prototype.wrap = function (stream) {
+ var _this = this;
+
+ var state = this._readableState;
+ var paused = false;
+
+ stream.on('end', function () {
+ debug('wrapped end');
+ if (state.decoder && !state.ended) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length) _this.push(chunk);
+ }
+
+ _this.push(null);
+ });
+
+ stream.on('data', function (chunk) {
+ debug('wrapped data');
+ if (state.decoder) chunk = state.decoder.write(chunk);
+
+ // don't skip over falsy values in objectMode
+ if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
+
+ var ret = _this.push(chunk);
+ if (!ret) {
+ paused = true;
+ stream.pause();
+ }
+ });
+
+ // proxy all the other methods.
+ // important when wrapping filters and duplexes.
+ for (var i in stream) {
+ if (this[i] === undefined && typeof stream[i] === 'function') {
+ this[i] = function (method) {
+ return function () {
+ return stream[method].apply(stream, arguments);
+ };
+ }(i);
+ }
+ }
+
+ // proxy certain important events.
+ for (var n = 0; n < kProxyEvents.length; n++) {
+ stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
+ }
+
+ // when we try to consume some more bytes, simply unpause the
+ // underlying stream.
+ this._read = function (n) {
+ debug('wrapped _read', n);
+ if (paused) {
+ paused = false;
+ stream.resume();
+ }
+ };
+
+ return this;
+};
+
+Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
+ // making it explicit this property is not enumerable
+ // because otherwise some prototype manipulation in
+ // userland will fail
+ enumerable: false,
+ get: function () {
+ return this._readableState.highWaterMark;
+ }
+});
+
+// exposed for testing purposes only.
+Readable._fromList = fromList;
+
+// Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromList(n, state) {
+ // nothing buffered
+ if (state.length === 0) return null;
+
+ var ret;
+ if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
+ // read it all, truncate the list
+ if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
+ state.buffer.clear();
+ } else {
+ // read part of list
+ ret = fromListPartial(n, state.buffer, state.decoder);
+ }
+
+ return ret;
+}
+
+// Extracts only enough buffered data to satisfy the amount requested.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromListPartial(n, list, hasStrings) {
+ var ret;
+ if (n < list.head.data.length) {
+ // slice is the same for buffers and strings
+ ret = list.head.data.slice(0, n);
+ list.head.data = list.head.data.slice(n);
+ } else if (n === list.head.data.length) {
+ // first chunk is a perfect match
+ ret = list.shift();
+ } else {
+ // result spans more than one buffer
+ ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
+ }
+ return ret;
+}
+
+// Copies a specified amount of characters from the list of buffered data
+// chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBufferString(n, list) {
+ var p = list.head;
+ var c = 1;
+ var ret = p.data;
+ n -= ret.length;
+ while (p = p.next) {
+ var str = p.data;
+ var nb = n > str.length ? str.length : n;
+ if (nb === str.length) ret += str;else ret += str.slice(0, n);
+ n -= nb;
+ if (n === 0) {
+ if (nb === str.length) {
+ ++c;
+ if (p.next) list.head = p.next;else list.head = list.tail = null;
+ } else {
+ list.head = p;
+ p.data = str.slice(nb);
+ }
+ break;
+ }
+ ++c;
+ }
+ list.length -= c;
+ return ret;
+}
+
+// Copies a specified amount of bytes from the list of buffered data chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBuffer(n, list) {
+ var ret = Buffer.allocUnsafe(n);
+ var p = list.head;
+ var c = 1;
+ p.data.copy(ret);
+ n -= p.data.length;
+ while (p = p.next) {
+ var buf = p.data;
+ var nb = n > buf.length ? buf.length : n;
+ buf.copy(ret, ret.length - n, 0, nb);
+ n -= nb;
+ if (n === 0) {
+ if (nb === buf.length) {
+ ++c;
+ if (p.next) list.head = p.next;else list.head = list.tail = null;
+ } else {
+ list.head = p;
+ p.data = buf.slice(nb);
+ }
+ break;
+ }
+ ++c;
+ }
+ list.length -= c;
+ return ret;
+}
+
+function endReadable(stream) {
+ var state = stream._readableState;
+
+ // If we get here before consuming all the bytes, then that is a
+ // bug in node. Should never happen.
+ if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
+
+ if (!state.endEmitted) {
+ state.ended = true;
+ pna.nextTick(endReadableNT, state, stream);
+ }
+}
+
+function endReadableNT(state, stream) {
+ // Check that we didn't get one last unshift.
+ if (!state.endEmitted && state.length === 0) {
+ state.endEmitted = true;
+ stream.readable = false;
+ stream.emit('end');
+ }
+}
+
+function indexOf(xs, x) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) return i;
+ }
+ return -1;
+}
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
new file mode 100644
index 00000000..fcfc105a
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
@@ -0,0 +1,214 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a transform stream is a readable/writable stream where you do
+// something with the data. Sometimes it's called a "filter",
+// but that's not a great name for it, since that implies a thing where
+// some bits pass through, and others are simply ignored. (That would
+// be a valid example of a transform, of course.)
+//
+// While the output is causally related to the input, it's not a
+// necessarily symmetric or synchronous transformation. For example,
+// a zlib stream might take multiple plain-text writes(), and then
+// emit a single compressed chunk some time in the future.
+//
+// Here's how this works:
+//
+// The Transform stream has all the aspects of the readable and writable
+// stream classes. When you write(chunk), that calls _write(chunk,cb)
+// internally, and returns false if there's a lot of pending writes
+// buffered up. When you call read(), that calls _read(n) until
+// there's enough pending readable data buffered up.
+//
+// In a transform stream, the written data is placed in a buffer. When
+// _read(n) is called, it transforms the queued up data, calling the
+// buffered _write cb's as it consumes chunks. If consuming a single
+// written chunk would result in multiple output chunks, then the first
+// outputted bit calls the readcb, and subsequent chunks just go into
+// the read buffer, and will cause it to emit 'readable' if necessary.
+//
+// This way, back-pressure is actually determined by the reading side,
+// since _read has to be called to start processing a new chunk. However,
+// a pathological inflate type of transform can cause excessive buffering
+// here. For example, imagine a stream where every byte of input is
+// interpreted as an integer from 0-255, and then results in that many
+// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
+// 1kb of data being output. In this case, you could write a very small
+// amount of input, and end up with a very large amount of output. In
+// such a pathological inflating mechanism, there'd be no way to tell
+// the system to stop doing the transform. A single 4MB write could
+// cause the system to run out of memory.
+//
+// However, even in such a pathological case, only a single written chunk
+// would be consumed, and then the rest would wait (un-transformed) until
+// the results of the previous transformed chunk were consumed.
+
+'use strict';
+
+module.exports = Transform;
+
+var Duplex = require('./_stream_duplex');
+
+/**/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/**/
+
+util.inherits(Transform, Duplex);
+
+function afterTransform(er, data) {
+ var ts = this._transformState;
+ ts.transforming = false;
+
+ var cb = ts.writecb;
+
+ if (!cb) {
+ return this.emit('error', new Error('write callback called multiple times'));
+ }
+
+ ts.writechunk = null;
+ ts.writecb = null;
+
+ if (data != null) // single equals check for both `null` and `undefined`
+ this.push(data);
+
+ cb(er);
+
+ var rs = this._readableState;
+ rs.reading = false;
+ if (rs.needReadable || rs.length < rs.highWaterMark) {
+ this._read(rs.highWaterMark);
+ }
+}
+
+function Transform(options) {
+ if (!(this instanceof Transform)) return new Transform(options);
+
+ Duplex.call(this, options);
+
+ this._transformState = {
+ afterTransform: afterTransform.bind(this),
+ needTransform: false,
+ transforming: false,
+ writecb: null,
+ writechunk: null,
+ writeencoding: null
+ };
+
+ // start out asking for a readable event once data is transformed.
+ this._readableState.needReadable = true;
+
+ // we have implemented the _read method, and done the other things
+ // that Readable wants before the first _read call, so unset the
+ // sync guard flag.
+ this._readableState.sync = false;
+
+ if (options) {
+ if (typeof options.transform === 'function') this._transform = options.transform;
+
+ if (typeof options.flush === 'function') this._flush = options.flush;
+ }
+
+ // When the writable side finishes, then flush out anything remaining.
+ this.on('prefinish', prefinish);
+}
+
+function prefinish() {
+ var _this = this;
+
+ if (typeof this._flush === 'function') {
+ this._flush(function (er, data) {
+ done(_this, er, data);
+ });
+ } else {
+ done(this, null, null);
+ }
+}
+
+Transform.prototype.push = function (chunk, encoding) {
+ this._transformState.needTransform = false;
+ return Duplex.prototype.push.call(this, chunk, encoding);
+};
+
+// This is the part where you do stuff!
+// override this function in implementation classes.
+// 'chunk' is an input chunk.
+//
+// Call `push(newChunk)` to pass along transformed output
+// to the readable side. You may call 'push' zero or more times.
+//
+// Call `cb(err)` when you are done with this chunk. If you pass
+// an error, then that'll put the hurt on the whole operation. If you
+// never call cb(), then you'll never get another chunk.
+Transform.prototype._transform = function (chunk, encoding, cb) {
+ throw new Error('_transform() is not implemented');
+};
+
+Transform.prototype._write = function (chunk, encoding, cb) {
+ var ts = this._transformState;
+ ts.writecb = cb;
+ ts.writechunk = chunk;
+ ts.writeencoding = encoding;
+ if (!ts.transforming) {
+ var rs = this._readableState;
+ if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
+ }
+};
+
+// Doesn't matter what the args are here.
+// _transform does all the work.
+// That we got here means that the readable side wants more data.
+Transform.prototype._read = function (n) {
+ var ts = this._transformState;
+
+ if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
+ ts.transforming = true;
+ this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
+ } else {
+ // mark that we need a transform, so that any data that comes in
+ // will get processed, now that we've asked for it.
+ ts.needTransform = true;
+ }
+};
+
+Transform.prototype._destroy = function (err, cb) {
+ var _this2 = this;
+
+ Duplex.prototype._destroy.call(this, err, function (err2) {
+ cb(err2);
+ _this2.emit('close');
+ });
+};
+
+function done(stream, er, data) {
+ if (er) return stream.emit('error', er);
+
+ if (data != null) // single equals check for both `null` and `undefined`
+ stream.push(data);
+
+ // if there's nothing in the write buffer, then that means
+ // that nothing more will ever be provided
+ if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
+
+ if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
+
+ return stream.push(null);
+}
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
new file mode 100644
index 00000000..e1e897ff
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
@@ -0,0 +1,685 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// A bit simpler than readable streams.
+// Implement an async ._write(chunk, encoding, cb), and it'll handle all
+// the drain event emission and buffering.
+
+'use strict';
+
+/**/
+
+var pna = require('process-nextick-args');
+/**/
+
+module.exports = Writable;
+
+/* */
+function WriteReq(chunk, encoding, cb) {
+ this.chunk = chunk;
+ this.encoding = encoding;
+ this.callback = cb;
+ this.next = null;
+}
+
+// It seems a linked list but it is not
+// there will be only 2 of these for each stream
+function CorkedRequest(state) {
+ var _this = this;
+
+ this.next = null;
+ this.entry = null;
+ this.finish = function () {
+ onCorkedFinish(_this, state);
+ };
+}
+/* */
+
+/**/
+var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
+/**/
+
+/**/
+var Duplex;
+/**/
+
+Writable.WritableState = WritableState;
+
+/**/
+var util = Object.create(require('core-util-is'));
+util.inherits = require('inherits');
+/**/
+
+/**/
+var internalUtil = {
+ deprecate: require('util-deprecate')
+};
+/**/
+
+/**/
+var Stream = require('./internal/streams/stream');
+/**/
+
+/**/
+
+var Buffer = require('safe-buffer').Buffer;
+var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
+function _uint8ArrayToBuffer(chunk) {
+ return Buffer.from(chunk);
+}
+function _isUint8Array(obj) {
+ return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
+}
+
+/**/
+
+var destroyImpl = require('./internal/streams/destroy');
+
+util.inherits(Writable, Stream);
+
+function nop() {}
+
+function WritableState(options, stream) {
+ Duplex = Duplex || require('./_stream_duplex');
+
+ options = options || {};
+
+ // Duplex streams are both readable and writable, but share
+ // the same options object.
+ // However, some cases require setting options to different
+ // values for the readable and the writable sides of the duplex stream.
+ // These options can be provided separately as readableXXX and writableXXX.
+ var isDuplex = stream instanceof Duplex;
+
+ // object stream flag to indicate whether or not this stream
+ // contains buffers or objects.
+ this.objectMode = !!options.objectMode;
+
+ if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
+
+ // the point at which write() starts returning false
+ // Note: 0 is a valid value, means that we always return false if
+ // the entire buffer is not flushed immediately on write()
+ var hwm = options.highWaterMark;
+ var writableHwm = options.writableHighWaterMark;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+
+ if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
+
+ // cast to ints.
+ this.highWaterMark = Math.floor(this.highWaterMark);
+
+ // if _final has been called
+ this.finalCalled = false;
+
+ // drain event flag.
+ this.needDrain = false;
+ // at the start of calling end()
+ this.ending = false;
+ // when end() has been called, and returned
+ this.ended = false;
+ // when 'finish' is emitted
+ this.finished = false;
+
+ // has it been destroyed
+ this.destroyed = false;
+
+ // should we decode strings into buffers before passing to _write?
+ // this is here so that some node-core streams can optimize string
+ // handling at a lower level.
+ var noDecode = options.decodeStrings === false;
+ this.decodeStrings = !noDecode;
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+ // not an actual buffer we keep track of, but a measurement
+ // of how much we're waiting to get pushed to some underlying
+ // socket or file.
+ this.length = 0;
+
+ // a flag to see when we're in the middle of a write.
+ this.writing = false;
+
+ // when true all writes will be buffered until .uncork() call
+ this.corked = 0;
+
+ // a flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, because any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ this.sync = true;
+
+ // a flag to know if we're processing previously buffered items, which
+ // may call the _write() callback in the same tick, so that we don't
+ // end up in an overlapped onwrite situation.
+ this.bufferProcessing = false;
+
+ // the callback that's passed to _write(chunk,cb)
+ this.onwrite = function (er) {
+ onwrite(stream, er);
+ };
+
+ // the callback that the user supplies to write(chunk,encoding,cb)
+ this.writecb = null;
+
+ // the amount that is being written when _write is called.
+ this.writelen = 0;
+
+ this.bufferedRequest = null;
+ this.lastBufferedRequest = null;
+
+ // number of pending user-supplied write callbacks
+ // this must be 0 before 'finish' can be emitted
+ this.pendingcb = 0;
+
+ // emit prefinish if the only thing we're waiting for is _write cbs
+ // This is relevant for synchronous Transform streams
+ this.prefinished = false;
+
+ // True if the error was already emitted and should not be thrown again
+ this.errorEmitted = false;
+
+ // count buffered requests
+ this.bufferedRequestCount = 0;
+
+ // allocate the first CorkedRequest, there is always
+ // one allocated and free to use, and we maintain at most two
+ this.corkedRequestsFree = new CorkedRequest(this);
+}
+
+WritableState.prototype.getBuffer = function getBuffer() {
+ var current = this.bufferedRequest;
+ var out = [];
+ while (current) {
+ out.push(current);
+ current = current.next;
+ }
+ return out;
+};
+
+(function () {
+ try {
+ Object.defineProperty(WritableState.prototype, 'buffer', {
+ get: internalUtil.deprecate(function () {
+ return this.getBuffer();
+ }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
+ });
+ } catch (_) {}
+})();
+
+// Test _writableState for inheritance to account for Duplex streams,
+// whose prototype chain only points to Readable.
+var realHasInstance;
+if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
+ realHasInstance = Function.prototype[Symbol.hasInstance];
+ Object.defineProperty(Writable, Symbol.hasInstance, {
+ value: function (object) {
+ if (realHasInstance.call(this, object)) return true;
+ if (this !== Writable) return false;
+
+ return object && object._writableState instanceof WritableState;
+ }
+ });
+} else {
+ realHasInstance = function (object) {
+ return object instanceof this;
+ };
+}
+
+function Writable(options) {
+ Duplex = Duplex || require('./_stream_duplex');
+
+ // Writable ctor is applied to Duplexes, too.
+ // `realHasInstance` is necessary because using plain `instanceof`
+ // would return false, as no `_writableState` property is attached.
+
+ // Trying to use the custom `instanceof` for Writable here will also break the
+ // Node.js LazyTransform implementation, which has a non-trivial getter for
+ // `_writableState` that would lead to infinite recursion.
+ if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
+ return new Writable(options);
+ }
+
+ this._writableState = new WritableState(options, this);
+
+ // legacy.
+ this.writable = true;
+
+ if (options) {
+ if (typeof options.write === 'function') this._write = options.write;
+
+ if (typeof options.writev === 'function') this._writev = options.writev;
+
+ if (typeof options.destroy === 'function') this._destroy = options.destroy;
+
+ if (typeof options.final === 'function') this._final = options.final;
+ }
+
+ Stream.call(this);
+}
+
+// Otherwise people can pipe Writable streams, which is just wrong.
+Writable.prototype.pipe = function () {
+ this.emit('error', new Error('Cannot pipe, not readable'));
+};
+
+function writeAfterEnd(stream, cb) {
+ var er = new Error('write after end');
+ // TODO: defer error events consistently everywhere, not just the cb
+ stream.emit('error', er);
+ pna.nextTick(cb, er);
+}
+
+// Checks that a user-supplied chunk is valid, especially for the particular
+// mode the stream is in. Currently this means that `null` is never accepted
+// and undefined/non-string values are only allowed in object mode.
+function validChunk(stream, state, chunk, cb) {
+ var valid = true;
+ var er = false;
+
+ if (chunk === null) {
+ er = new TypeError('May not write null values to stream');
+ } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
+ er = new TypeError('Invalid non-string/buffer chunk');
+ }
+ if (er) {
+ stream.emit('error', er);
+ pna.nextTick(cb, er);
+ valid = false;
+ }
+ return valid;
+}
+
+Writable.prototype.write = function (chunk, encoding, cb) {
+ var state = this._writableState;
+ var ret = false;
+ var isBuf = !state.objectMode && _isUint8Array(chunk);
+
+ if (isBuf && !Buffer.isBuffer(chunk)) {
+ chunk = _uint8ArrayToBuffer(chunk);
+ }
+
+ if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = null;
+ }
+
+ if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
+
+ if (typeof cb !== 'function') cb = nop;
+
+ if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
+ state.pendingcb++;
+ ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
+ }
+
+ return ret;
+};
+
+Writable.prototype.cork = function () {
+ var state = this._writableState;
+
+ state.corked++;
+};
+
+Writable.prototype.uncork = function () {
+ var state = this._writableState;
+
+ if (state.corked) {
+ state.corked--;
+
+ if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
+ }
+};
+
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+ // node::ParseEncoding() requires lower case.
+ if (typeof encoding === 'string') encoding = encoding.toLowerCase();
+ if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
+ this._writableState.defaultEncoding = encoding;
+ return this;
+};
+
+function decodeChunk(state, chunk, encoding) {
+ if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
+ chunk = Buffer.from(chunk, encoding);
+ }
+ return chunk;
+}
+
+Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
+ // making it explicit this property is not enumerable
+ // because otherwise some prototype manipulation in
+ // userland will fail
+ enumerable: false,
+ get: function () {
+ return this._writableState.highWaterMark;
+ }
+});
+
+// if we're already writing something, then just put this
+// in the queue, and wait our turn. Otherwise, call _write
+// If we return false, then we need a drain event, so set that flag.
+function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
+ if (!isBuf) {
+ var newChunk = decodeChunk(state, chunk, encoding);
+ if (chunk !== newChunk) {
+ isBuf = true;
+ encoding = 'buffer';
+ chunk = newChunk;
+ }
+ }
+ var len = state.objectMode ? 1 : chunk.length;
+
+ state.length += len;
+
+ var ret = state.length < state.highWaterMark;
+ // we must ensure that previous needDrain will not be reset to false.
+ if (!ret) state.needDrain = true;
+
+ if (state.writing || state.corked) {
+ var last = state.lastBufferedRequest;
+ state.lastBufferedRequest = {
+ chunk: chunk,
+ encoding: encoding,
+ isBuf: isBuf,
+ callback: cb,
+ next: null
+ };
+ if (last) {
+ last.next = state.lastBufferedRequest;
+ } else {
+ state.bufferedRequest = state.lastBufferedRequest;
+ }
+ state.bufferedRequestCount += 1;
+ } else {
+ doWrite(stream, state, false, len, chunk, encoding, cb);
+ }
+
+ return ret;
+}
+
+function doWrite(stream, state, writev, len, chunk, encoding, cb) {
+ state.writelen = len;
+ state.writecb = cb;
+ state.writing = true;
+ state.sync = true;
+ if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
+ state.sync = false;
+}
+
+function onwriteError(stream, state, sync, er, cb) {
+ --state.pendingcb;
+
+ if (sync) {
+ // defer the callback if we are being called synchronously
+ // to avoid piling up things on the stack
+ pna.nextTick(cb, er);
+ // this can emit finish, and it will always happen
+ // after error
+ pna.nextTick(finishMaybe, stream, state);
+ stream._writableState.errorEmitted = true;
+ stream.emit('error', er);
+ } else {
+ // the caller expect this to happen before if
+ // it is async
+ cb(er);
+ stream._writableState.errorEmitted = true;
+ stream.emit('error', er);
+ // this can emit finish, but finish must
+ // always follow error
+ finishMaybe(stream, state);
+ }
+}
+
+function onwriteStateUpdate(state) {
+ state.writing = false;
+ state.writecb = null;
+ state.length -= state.writelen;
+ state.writelen = 0;
+}
+
+function onwrite(stream, er) {
+ var state = stream._writableState;
+ var sync = state.sync;
+ var cb = state.writecb;
+
+ onwriteStateUpdate(state);
+
+ if (er) onwriteError(stream, state, sync, er, cb);else {
+ // Check if we're actually ready to finish, but don't emit yet
+ var finished = needFinish(state);
+
+ if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
+ clearBuffer(stream, state);
+ }
+
+ if (sync) {
+ /**/
+ asyncWrite(afterWrite, stream, state, finished, cb);
+ /**/
+ } else {
+ afterWrite(stream, state, finished, cb);
+ }
+ }
+}
+
+function afterWrite(stream, state, finished, cb) {
+ if (!finished) onwriteDrain(stream, state);
+ state.pendingcb--;
+ cb();
+ finishMaybe(stream, state);
+}
+
+// Must force callback to be called on nextTick, so that we don't
+// emit 'drain' before the write() consumer gets the 'false' return
+// value, and has a chance to attach a 'drain' listener.
+function onwriteDrain(stream, state) {
+ if (state.length === 0 && state.needDrain) {
+ state.needDrain = false;
+ stream.emit('drain');
+ }
+}
+
+// if there's something in the buffer waiting, then process it
+function clearBuffer(stream, state) {
+ state.bufferProcessing = true;
+ var entry = state.bufferedRequest;
+
+ if (stream._writev && entry && entry.next) {
+ // Fast case, write everything using _writev()
+ var l = state.bufferedRequestCount;
+ var buffer = new Array(l);
+ var holder = state.corkedRequestsFree;
+ holder.entry = entry;
+
+ var count = 0;
+ var allBuffers = true;
+ while (entry) {
+ buffer[count] = entry;
+ if (!entry.isBuf) allBuffers = false;
+ entry = entry.next;
+ count += 1;
+ }
+ buffer.allBuffers = allBuffers;
+
+ doWrite(stream, state, true, state.length, buffer, '', holder.finish);
+
+ // doWrite is almost always async, defer these to save a bit of time
+ // as the hot path ends with doWrite
+ state.pendingcb++;
+ state.lastBufferedRequest = null;
+ if (holder.next) {
+ state.corkedRequestsFree = holder.next;
+ holder.next = null;
+ } else {
+ state.corkedRequestsFree = new CorkedRequest(state);
+ }
+ state.bufferedRequestCount = 0;
+ } else {
+ // Slow case, write chunks one-by-one
+ while (entry) {
+ var chunk = entry.chunk;
+ var encoding = entry.encoding;
+ var cb = entry.callback;
+ var len = state.objectMode ? 1 : chunk.length;
+
+ doWrite(stream, state, false, len, chunk, encoding, cb);
+ entry = entry.next;
+ state.bufferedRequestCount--;
+ // if we didn't call the onwrite immediately, then
+ // it means that we need to wait until it does.
+ // also, that means that the chunk and cb are currently
+ // being processed, so move the buffer counter past them.
+ if (state.writing) {
+ break;
+ }
+ }
+
+ if (entry === null) state.lastBufferedRequest = null;
+ }
+
+ state.bufferedRequest = entry;
+ state.bufferProcessing = false;
+}
+
+Writable.prototype._write = function (chunk, encoding, cb) {
+ cb(new Error('_write() is not implemented'));
+};
+
+Writable.prototype._writev = null;
+
+Writable.prototype.end = function (chunk, encoding, cb) {
+ var state = this._writableState;
+
+ if (typeof chunk === 'function') {
+ cb = chunk;
+ chunk = null;
+ encoding = null;
+ } else if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = null;
+ }
+
+ if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
+
+ // .end() fully uncorks
+ if (state.corked) {
+ state.corked = 1;
+ this.uncork();
+ }
+
+ // ignore unnecessary end() calls.
+ if (!state.ending) endWritable(this, state, cb);
+};
+
+function needFinish(state) {
+ return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
+}
+function callFinal(stream, state) {
+ stream._final(function (err) {
+ state.pendingcb--;
+ if (err) {
+ stream.emit('error', err);
+ }
+ state.prefinished = true;
+ stream.emit('prefinish');
+ finishMaybe(stream, state);
+ });
+}
+function prefinish(stream, state) {
+ if (!state.prefinished && !state.finalCalled) {
+ if (typeof stream._final === 'function') {
+ state.pendingcb++;
+ state.finalCalled = true;
+ pna.nextTick(callFinal, stream, state);
+ } else {
+ state.prefinished = true;
+ stream.emit('prefinish');
+ }
+ }
+}
+
+function finishMaybe(stream, state) {
+ var need = needFinish(state);
+ if (need) {
+ prefinish(stream, state);
+ if (state.pendingcb === 0) {
+ state.finished = true;
+ stream.emit('finish');
+ }
+ }
+ return need;
+}
+
+function endWritable(stream, state, cb) {
+ state.ending = true;
+ finishMaybe(stream, state);
+ if (cb) {
+ if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
+ }
+ state.ended = true;
+ stream.writable = false;
+}
+
+function onCorkedFinish(corkReq, state, err) {
+ var entry = corkReq.entry;
+ corkReq.entry = null;
+ while (entry) {
+ var cb = entry.callback;
+ state.pendingcb--;
+ cb(err);
+ entry = entry.next;
+ }
+
+ // reuse the free corkReq.
+ state.corkedRequestsFree.next = corkReq;
+}
+
+Object.defineProperty(Writable.prototype, 'destroyed', {
+ get: function () {
+ if (this._writableState === undefined) {
+ return false;
+ }
+ return this._writableState.destroyed;
+ },
+ set: function (value) {
+ // we ignore the value if the stream
+ // has not been initialized yet
+ if (!this._writableState) {
+ return;
+ }
+
+ // backward compatibility, the user is explicitly
+ // managing destroyed
+ this._writableState.destroyed = value;
+ }
+});
+
+Writable.prototype.destroy = destroyImpl.destroy;
+Writable.prototype._undestroy = destroyImpl.undestroy;
+Writable.prototype._destroy = function (err, cb) {
+ this.end();
+ cb(err);
+};
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/BufferList.js
new file mode 100644
index 00000000..5e080976
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/BufferList.js
@@ -0,0 +1,78 @@
+'use strict';
+
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
+
+var Buffer = require('safe-buffer').Buffer;
+var util = require('util');
+
+function copyBuffer(src, target, offset) {
+ src.copy(target, offset);
+}
+
+module.exports = function () {
+ function BufferList() {
+ _classCallCheck(this, BufferList);
+
+ this.head = null;
+ this.tail = null;
+ this.length = 0;
+ }
+
+ BufferList.prototype.push = function push(v) {
+ var entry = { data: v, next: null };
+ if (this.length > 0) this.tail.next = entry;else this.head = entry;
+ this.tail = entry;
+ ++this.length;
+ };
+
+ BufferList.prototype.unshift = function unshift(v) {
+ var entry = { data: v, next: this.head };
+ if (this.length === 0) this.tail = entry;
+ this.head = entry;
+ ++this.length;
+ };
+
+ BufferList.prototype.shift = function shift() {
+ if (this.length === 0) return;
+ var ret = this.head.data;
+ if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
+ --this.length;
+ return ret;
+ };
+
+ BufferList.prototype.clear = function clear() {
+ this.head = this.tail = null;
+ this.length = 0;
+ };
+
+ BufferList.prototype.join = function join(s) {
+ if (this.length === 0) return '';
+ var p = this.head;
+ var ret = '' + p.data;
+ while (p = p.next) {
+ ret += s + p.data;
+ }return ret;
+ };
+
+ BufferList.prototype.concat = function concat(n) {
+ if (this.length === 0) return Buffer.alloc(0);
+ var ret = Buffer.allocUnsafe(n >>> 0);
+ var p = this.head;
+ var i = 0;
+ while (p) {
+ copyBuffer(p.data, ret, i);
+ i += p.data.length;
+ p = p.next;
+ }
+ return ret;
+ };
+
+ return BufferList;
+}();
+
+if (util && util.inspect && util.inspect.custom) {
+ module.exports.prototype[util.inspect.custom] = function () {
+ var obj = util.inspect({ length: this.length });
+ return this.constructor.name + ' ' + obj;
+ };
+}
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js
new file mode 100644
index 00000000..85a82140
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js
@@ -0,0 +1,84 @@
+'use strict';
+
+/**/
+
+var pna = require('process-nextick-args');
+/**/
+
+// undocumented cb() API, needed for core, not for public API
+function destroy(err, cb) {
+ var _this = this;
+
+ var readableDestroyed = this._readableState && this._readableState.destroyed;
+ var writableDestroyed = this._writableState && this._writableState.destroyed;
+
+ if (readableDestroyed || writableDestroyed) {
+ if (cb) {
+ cb(err);
+ } else if (err) {
+ if (!this._writableState) {
+ pna.nextTick(emitErrorNT, this, err);
+ } else if (!this._writableState.errorEmitted) {
+ this._writableState.errorEmitted = true;
+ pna.nextTick(emitErrorNT, this, err);
+ }
+ }
+
+ return this;
+ }
+
+ // we set destroyed to true before firing error callbacks in order
+ // to make it re-entrance safe in case destroy() is called within callbacks
+
+ if (this._readableState) {
+ this._readableState.destroyed = true;
+ }
+
+ // if this is a duplex stream mark the writable part as destroyed as well
+ if (this._writableState) {
+ this._writableState.destroyed = true;
+ }
+
+ this._destroy(err || null, function (err) {
+ if (!cb && err) {
+ if (!_this._writableState) {
+ pna.nextTick(emitErrorNT, _this, err);
+ } else if (!_this._writableState.errorEmitted) {
+ _this._writableState.errorEmitted = true;
+ pna.nextTick(emitErrorNT, _this, err);
+ }
+ } else if (cb) {
+ cb(err);
+ }
+ });
+
+ return this;
+}
+
+function undestroy() {
+ if (this._readableState) {
+ this._readableState.destroyed = false;
+ this._readableState.reading = false;
+ this._readableState.ended = false;
+ this._readableState.endEmitted = false;
+ }
+
+ if (this._writableState) {
+ this._writableState.destroyed = false;
+ this._writableState.ended = false;
+ this._writableState.ending = false;
+ this._writableState.finalCalled = false;
+ this._writableState.prefinished = false;
+ this._writableState.finished = false;
+ this._writableState.errorEmitted = false;
+ }
+}
+
+function emitErrorNT(self, err) {
+ self.emit('error', err);
+}
+
+module.exports = {
+ destroy: destroy,
+ undestroy: undestroy
+};
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js
new file mode 100644
index 00000000..9332a3fd
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js
@@ -0,0 +1 @@
+module.exports = require('events').EventEmitter;
diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js
new file mode 100644
index 00000000..ce2ad5b6
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js
@@ -0,0 +1 @@
+module.exports = require('stream');
diff --git a/node_modules/bl/node_modules/readable-stream/package.json b/node_modules/bl/node_modules/readable-stream/package.json
new file mode 100644
index 00000000..514c178e
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/package.json
@@ -0,0 +1,52 @@
+{
+ "name": "readable-stream",
+ "version": "2.3.8",
+ "description": "Streams3, a user-land copy of the stream library from Node.js",
+ "main": "readable.js",
+ "dependencies": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.3",
+ "isarray": "~1.0.0",
+ "process-nextick-args": "~2.0.0",
+ "safe-buffer": "~5.1.1",
+ "string_decoder": "~1.1.1",
+ "util-deprecate": "~1.0.1"
+ },
+ "devDependencies": {
+ "assert": "^1.4.0",
+ "babel-polyfill": "^6.9.1",
+ "buffer": "^4.9.0",
+ "lolex": "^2.3.2",
+ "nyc": "^6.4.0",
+ "tap": "^0.7.0",
+ "tape": "^4.8.0"
+ },
+ "scripts": {
+ "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js",
+ "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
+ "cover": "nyc npm test",
+ "report": "nyc report --reporter=lcov"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/nodejs/readable-stream"
+ },
+ "keywords": [
+ "readable",
+ "stream",
+ "pipe"
+ ],
+ "browser": {
+ "util": false,
+ "./readable.js": "./readable-browser.js",
+ "./writable.js": "./writable-browser.js",
+ "./duplex.js": "./duplex-browser.js",
+ "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
+ },
+ "nyc": {
+ "include": [
+ "lib/**.js"
+ ]
+ },
+ "license": "MIT"
+}
diff --git a/node_modules/bl/node_modules/readable-stream/passthrough.js b/node_modules/bl/node_modules/readable-stream/passthrough.js
new file mode 100644
index 00000000..ffd791d7
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/passthrough.js
@@ -0,0 +1 @@
+module.exports = require('./readable').PassThrough
diff --git a/node_modules/bl/node_modules/readable-stream/readable-browser.js b/node_modules/bl/node_modules/readable-stream/readable-browser.js
new file mode 100644
index 00000000..e5037259
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/readable-browser.js
@@ -0,0 +1,7 @@
+exports = module.exports = require('./lib/_stream_readable.js');
+exports.Stream = exports;
+exports.Readable = exports;
+exports.Writable = require('./lib/_stream_writable.js');
+exports.Duplex = require('./lib/_stream_duplex.js');
+exports.Transform = require('./lib/_stream_transform.js');
+exports.PassThrough = require('./lib/_stream_passthrough.js');
diff --git a/node_modules/bl/node_modules/readable-stream/readable.js b/node_modules/bl/node_modules/readable-stream/readable.js
new file mode 100644
index 00000000..ec89ec53
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/readable.js
@@ -0,0 +1,19 @@
+var Stream = require('stream');
+if (process.env.READABLE_STREAM === 'disable' && Stream) {
+ module.exports = Stream;
+ exports = module.exports = Stream.Readable;
+ exports.Readable = Stream.Readable;
+ exports.Writable = Stream.Writable;
+ exports.Duplex = Stream.Duplex;
+ exports.Transform = Stream.Transform;
+ exports.PassThrough = Stream.PassThrough;
+ exports.Stream = Stream;
+} else {
+ exports = module.exports = require('./lib/_stream_readable.js');
+ exports.Stream = Stream || exports;
+ exports.Readable = exports;
+ exports.Writable = require('./lib/_stream_writable.js');
+ exports.Duplex = require('./lib/_stream_duplex.js');
+ exports.Transform = require('./lib/_stream_transform.js');
+ exports.PassThrough = require('./lib/_stream_passthrough.js');
+}
diff --git a/node_modules/bl/node_modules/readable-stream/transform.js b/node_modules/bl/node_modules/readable-stream/transform.js
new file mode 100644
index 00000000..b1baba26
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/transform.js
@@ -0,0 +1 @@
+module.exports = require('./readable').Transform
diff --git a/node_modules/bl/node_modules/readable-stream/writable-browser.js b/node_modules/bl/node_modules/readable-stream/writable-browser.js
new file mode 100644
index 00000000..ebdde6a8
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/writable-browser.js
@@ -0,0 +1 @@
+module.exports = require('./lib/_stream_writable.js');
diff --git a/node_modules/bl/node_modules/readable-stream/writable.js b/node_modules/bl/node_modules/readable-stream/writable.js
new file mode 100644
index 00000000..3211a6f8
--- /dev/null
+++ b/node_modules/bl/node_modules/readable-stream/writable.js
@@ -0,0 +1,8 @@
+var Stream = require("stream")
+var Writable = require("./lib/_stream_writable.js")
+
+if (process.env.READABLE_STREAM === 'disable') {
+ module.exports = Stream && Stream.Writable || Writable
+} else {
+ module.exports = Writable
+}
diff --git a/node_modules/bl/node_modules/safe-buffer/LICENSE b/node_modules/bl/node_modules/safe-buffer/LICENSE
new file mode 100644
index 00000000..0c068cee
--- /dev/null
+++ b/node_modules/bl/node_modules/safe-buffer/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Feross Aboukhadijeh
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/bl/node_modules/safe-buffer/README.md b/node_modules/bl/node_modules/safe-buffer/README.md
new file mode 100644
index 00000000..e9a81afd
--- /dev/null
+++ b/node_modules/bl/node_modules/safe-buffer/README.md
@@ -0,0 +1,584 @@
+# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
+
+[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg
+[travis-url]: https://travis-ci.org/feross/safe-buffer
+[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg
+[npm-url]: https://npmjs.org/package/safe-buffer
+[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg
+[downloads-url]: https://npmjs.org/package/safe-buffer
+[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
+[standard-url]: https://standardjs.com
+
+#### Safer Node.js Buffer API
+
+**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`,
+`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.**
+
+**Uses the built-in implementation when available.**
+
+## install
+
+```
+npm install safe-buffer
+```
+
+## usage
+
+The goal of this package is to provide a safe replacement for the node.js `Buffer`.
+
+It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to
+the top of your node.js modules:
+
+```js
+var Buffer = require('safe-buffer').Buffer
+
+// Existing buffer code will continue to work without issues:
+
+new Buffer('hey', 'utf8')
+new Buffer([1, 2, 3], 'utf8')
+new Buffer(obj)
+new Buffer(16) // create an uninitialized buffer (potentially unsafe)
+
+// But you can use these new explicit APIs to make clear what you want:
+
+Buffer.from('hey', 'utf8') // convert from many types to a Buffer
+Buffer.alloc(16) // create a zero-filled buffer (safe)
+Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe)
+```
+
+## api
+
+### Class Method: Buffer.from(array)
+
+
+* `array` {Array}
+
+Allocates a new `Buffer` using an `array` of octets.
+
+```js
+const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]);
+ // creates a new Buffer containing ASCII bytes
+ // ['b','u','f','f','e','r']
+```
+
+A `TypeError` will be thrown if `array` is not an `Array`.
+
+### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
+
+
+* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or
+ a `new ArrayBuffer()`
+* `byteOffset` {Number} Default: `0`
+* `length` {Number} Default: `arrayBuffer.length - byteOffset`
+
+When passed a reference to the `.buffer` property of a `TypedArray` instance,
+the newly created `Buffer` will share the same allocated memory as the
+TypedArray.
+
+```js
+const arr = new Uint16Array(2);
+arr[0] = 5000;
+arr[1] = 4000;
+
+const buf = Buffer.from(arr.buffer); // shares the memory with arr;
+
+console.log(buf);
+ // Prints:
+
+// changing the TypedArray changes the Buffer also
+arr[1] = 6000;
+
+console.log(buf);
+ // Prints:
+```
+
+The optional `byteOffset` and `length` arguments specify a memory range within
+the `arrayBuffer` that will be shared by the `Buffer`.
+
+```js
+const ab = new ArrayBuffer(10);
+const buf = Buffer.from(ab, 0, 2);
+console.log(buf.length);
+ // Prints: 2
+```
+
+A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`.
+
+### Class Method: Buffer.from(buffer)
+
+
+* `buffer` {Buffer}
+
+Copies the passed `buffer` data onto a new `Buffer` instance.
+
+```js
+const buf1 = Buffer.from('buffer');
+const buf2 = Buffer.from(buf1);
+
+buf1[0] = 0x61;
+console.log(buf1.toString());
+ // 'auffer'
+console.log(buf2.toString());
+ // 'buffer' (copy is not changed)
+```
+
+A `TypeError` will be thrown if `buffer` is not a `Buffer`.
+
+### Class Method: Buffer.from(str[, encoding])
+
+
+* `str` {String} String to encode.
+* `encoding` {String} Encoding to use, Default: `'utf8'`
+
+Creates a new `Buffer` containing the given JavaScript string `str`. If
+provided, the `encoding` parameter identifies the character encoding.
+If not provided, `encoding` defaults to `'utf8'`.
+
+```js
+const buf1 = Buffer.from('this is a tést');
+console.log(buf1.toString());
+ // prints: this is a tést
+console.log(buf1.toString('ascii'));
+ // prints: this is a tC)st
+
+const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex');
+console.log(buf2.toString());
+ // prints: this is a tést
+```
+
+A `TypeError` will be thrown if `str` is not a string.
+
+### Class Method: Buffer.alloc(size[, fill[, encoding]])
+
+
+* `size` {Number}
+* `fill` {Value} Default: `undefined`
+* `encoding` {String} Default: `utf8`
+
+Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
+`Buffer` will be *zero-filled*.
+
+```js
+const buf = Buffer.alloc(5);
+console.log(buf);
+ //
+```
+
+The `size` must be less than or equal to the value of
+`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
+`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
+be created if a `size` less than or equal to 0 is specified.
+
+If `fill` is specified, the allocated `Buffer` will be initialized by calling
+`buf.fill(fill)`. See [`buf.fill()`][] for more information.
+
+```js
+const buf = Buffer.alloc(5, 'a');
+console.log(buf);
+ //
+```
+
+If both `fill` and `encoding` are specified, the allocated `Buffer` will be
+initialized by calling `buf.fill(fill, encoding)`. For example:
+
+```js
+const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
+console.log(buf);
+ //
+```
+
+Calling `Buffer.alloc(size)` can be significantly slower than the alternative
+`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance
+contents will *never contain sensitive data*.
+
+A `TypeError` will be thrown if `size` is not a number.
+
+### Class Method: Buffer.allocUnsafe(size)
+
+
+* `size` {Number}
+
+Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must
+be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit
+architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is
+thrown. A zero-length Buffer will be created if a `size` less than or equal to
+0 is specified.
+
+The underlying memory for `Buffer` instances created in this way is *not
+initialized*. The contents of the newly created `Buffer` are unknown and
+*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
+`Buffer` instances to zeroes.
+
+```js
+const buf = Buffer.allocUnsafe(5);
+console.log(buf);
+ //
+ // (octets will be different, every time)
+buf.fill(0);
+console.log(buf);
+ //
+```
+
+A `TypeError` will be thrown if `size` is not a number.
+
+Note that the `Buffer` module pre-allocates an internal `Buffer` instance of
+size `Buffer.poolSize` that is used as a pool for the fast allocation of new
+`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated
+`new Buffer(size)` constructor) only when `size` is less than or equal to
+`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default
+value of `Buffer.poolSize` is `8192` but can be modified.
+
+Use of this pre-allocated internal memory pool is a key difference between
+calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
+Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer
+pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal
+Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The
+difference is subtle but can be important when an application requires the
+additional performance that `Buffer.allocUnsafe(size)` provides.
+
+### Class Method: Buffer.allocUnsafeSlow(size)
+
+
+* `size` {Number}
+
+Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The
+`size` must be less than or equal to the value of
+`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
+`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
+be created if a `size` less than or equal to 0 is specified.
+
+The underlying memory for `Buffer` instances created in this way is *not
+initialized*. The contents of the newly created `Buffer` are unknown and
+*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
+`Buffer` instances to zeroes.
+
+When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
+allocations under 4KB are, by default, sliced from a single pre-allocated
+`Buffer`. This allows applications to avoid the garbage collection overhead of
+creating many individually allocated Buffers. This approach improves both
+performance and memory usage by eliminating the need to track and cleanup as
+many `Persistent` objects.
+
+However, in the case where a developer may need to retain a small chunk of
+memory from a pool for an indeterminate amount of time, it may be appropriate
+to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then
+copy out the relevant bits.
+
+```js
+// need to keep around a few small chunks of memory
+const store = [];
+
+socket.on('readable', () => {
+ const data = socket.read();
+ // allocate for retained data
+ const sb = Buffer.allocUnsafeSlow(10);
+ // copy the data into the new allocation
+ data.copy(sb, 0, 0, 10);
+ store.push(sb);
+});
+```
+
+Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after*
+a developer has observed undue memory retention in their applications.
+
+A `TypeError` will be thrown if `size` is not a number.
+
+### All the Rest
+
+The rest of the `Buffer` API is exactly the same as in node.js.
+[See the docs](https://nodejs.org/api/buffer.html).
+
+
+## Related links
+
+- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660)
+- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4)
+
+## Why is `Buffer` unsafe?
+
+Today, the node.js `Buffer` constructor is overloaded to handle many different argument
+types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.),
+`ArrayBuffer`, and also `Number`.
+
+The API is optimized for convenience: you can throw any type at it, and it will try to do
+what you want.
+
+Because the Buffer constructor is so powerful, you often see code like this:
+
+```js
+// Convert UTF-8 strings to hex
+function toHex (str) {
+ return new Buffer(str).toString('hex')
+}
+```
+
+***But what happens if `toHex` is called with a `Number` argument?***
+
+### Remote Memory Disclosure
+
+If an attacker can make your program call the `Buffer` constructor with a `Number`
+argument, then they can make it allocate uninitialized memory from the node.js process.
+This could potentially disclose TLS private keys, user data, or database passwords.
+
+When the `Buffer` constructor is passed a `Number` argument, it returns an
+**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like
+this, you **MUST** overwrite the contents before returning it to the user.
+
+From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size):
+
+> `new Buffer(size)`
+>
+> - `size` Number
+>
+> The underlying memory for `Buffer` instances created in this way is not initialized.
+> **The contents of a newly created `Buffer` are unknown and could contain sensitive
+> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes.
+
+(Emphasis our own.)
+
+Whenever the programmer intended to create an uninitialized `Buffer` you often see code
+like this:
+
+```js
+var buf = new Buffer(16)
+
+// Immediately overwrite the uninitialized buffer with data from another buffer
+for (var i = 0; i < buf.length; i++) {
+ buf[i] = otherBuf[i]
+}
+```
+
+
+### Would this ever be a problem in real code?
+
+Yes. It's surprisingly common to forget to check the type of your variables in a
+dynamically-typed language like JavaScript.
+
+Usually the consequences of assuming the wrong type is that your program crashes with an
+uncaught exception. But the failure mode for forgetting to check the type of arguments to
+the `Buffer` constructor is more catastrophic.
+
+Here's an example of a vulnerable service that takes a JSON payload and converts it to
+hex:
+
+```js
+// Take a JSON payload {str: "some string"} and convert it to hex
+var server = http.createServer(function (req, res) {
+ var data = ''
+ req.setEncoding('utf8')
+ req.on('data', function (chunk) {
+ data += chunk
+ })
+ req.on('end', function () {
+ var body = JSON.parse(data)
+ res.end(new Buffer(body.str).toString('hex'))
+ })
+})
+
+server.listen(8080)
+```
+
+In this example, an http client just has to send:
+
+```json
+{
+ "str": 1000
+}
+```
+
+and it will get back 1,000 bytes of uninitialized memory from the server.
+
+This is a very serious bug. It's similar in severity to the
+[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process
+memory by remote attackers.
+
+
+### Which real-world packages were vulnerable?
+
+#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht)
+
+[Mathias Buus](https://github.com/mafintosh) and I
+([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages,
+[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow
+anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get
+them to reveal 20 bytes at a time of uninitialized memory from the node.js process.
+
+Here's
+[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8)
+that fixed it. We released a new fixed version, created a
+[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all
+vulnerable versions on npm so users will get a warning to upgrade to a newer version.
+
+#### [`ws`](https://www.npmjs.com/package/ws)
+
+That got us wondering if there were other vulnerable packages. Sure enough, within a short
+period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the
+most popular WebSocket implementation in node.js.
+
+If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as
+expected, then uninitialized server memory would be disclosed to the remote peer.
+
+These were the vulnerable methods:
+
+```js
+socket.send(number)
+socket.ping(number)
+socket.pong(number)
+```
+
+Here's a vulnerable socket server with some echo functionality:
+
+```js
+server.on('connection', function (socket) {
+ socket.on('message', function (message) {
+ message = JSON.parse(message)
+ if (message.type === 'echo') {
+ socket.send(message.data) // send back the user's message
+ }
+ })
+})
+```
+
+`socket.send(number)` called on the server, will disclose server memory.
+
+Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue
+was fixed, with a more detailed explanation. Props to
+[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the
+[Node Security Project disclosure](https://nodesecurity.io/advisories/67).
+
+
+### What's the solution?
+
+It's important that node.js offers a fast way to get memory otherwise performance-critical
+applications would needlessly get a lot slower.
+
+But we need a better way to *signal our intent* as programmers. **When we want
+uninitialized memory, we should request it explicitly.**
+
+Sensitive functionality should not be packed into a developer-friendly API that loosely
+accepts many different types. This type of API encourages the lazy practice of passing
+variables in without checking the type very carefully.
+
+#### A new API: `Buffer.allocUnsafe(number)`
+
+The functionality of creating buffers with uninitialized memory should be part of another
+API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that
+frequently gets user input of all sorts of different types passed into it.
+
+```js
+var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory!
+
+// Immediately overwrite the uninitialized buffer with data from another buffer
+for (var i = 0; i < buf.length; i++) {
+ buf[i] = otherBuf[i]
+}
+```
+
+
+### How do we fix node.js core?
+
+We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as
+`semver-major`) which defends against one case:
+
+```js
+var str = 16
+new Buffer(str, 'utf8')
+```
+
+In this situation, it's implied that the programmer intended the first argument to be a
+string, since they passed an encoding as a second argument. Today, node.js will allocate
+uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not
+what the programmer intended.
+
+But this is only a partial solution, since if the programmer does `new Buffer(variable)`
+(without an `encoding` parameter) there's no way to know what they intended. If `variable`
+is sometimes a number, then uninitialized memory will sometimes be returned.
+
+### What's the real long-term fix?
+
+We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when
+we need uninitialized memory. But that would break 1000s of packages.
+
+~~We believe the best solution is to:~~
+
+~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~
+
+~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~
+
+#### Update
+
+We now support adding three new APIs:
+
+- `Buffer.from(value)` - convert from any type to a buffer
+- `Buffer.alloc(size)` - create a zero-filled buffer
+- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size
+
+This solves the core problem that affected `ws` and `bittorrent-dht` which is
+`Buffer(variable)` getting tricked into taking a number argument.
+
+This way, existing code continues working and the impact on the npm ecosystem will be
+minimal. Over time, npm maintainers can migrate performance-critical code to use
+`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`.
+
+
+### Conclusion
+
+We think there's a serious design issue with the `Buffer` API as it exists today. It
+promotes insecure software by putting high-risk functionality into a convenient API
+with friendly "developer ergonomics".
+
+This wasn't merely a theoretical exercise because we found the issue in some of the
+most popular npm packages.
+
+Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of
+`buffer`.
+
+```js
+var Buffer = require('safe-buffer').Buffer
+```
+
+Eventually, we hope that node.js core can switch to this new, safer behavior. We believe
+the impact on the ecosystem would be minimal since it's not a breaking change.
+Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while
+older, insecure packages would magically become safe from this attack vector.
+
+
+## links
+
+- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514)
+- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67)
+- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68)
+
+
+## credit
+
+The original issues in `bittorrent-dht`
+([disclosure](https://nodesecurity.io/advisories/68)) and
+`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by
+[Mathias Buus](https://github.com/mafintosh) and
+[Feross Aboukhadijeh](http://feross.org/).
+
+Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues
+and for his work running the [Node Security Project](https://nodesecurity.io/).
+
+Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and
+auditing the code.
+
+
+## license
+
+MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org)
diff --git a/node_modules/bl/node_modules/safe-buffer/index.d.ts b/node_modules/bl/node_modules/safe-buffer/index.d.ts
new file mode 100644
index 00000000..e9fed809
--- /dev/null
+++ b/node_modules/bl/node_modules/safe-buffer/index.d.ts
@@ -0,0 +1,187 @@
+declare module "safe-buffer" {
+ export class Buffer {
+ length: number
+ write(string: string, offset?: number, length?: number, encoding?: string): number;
+ toString(encoding?: string, start?: number, end?: number): string;
+ toJSON(): { type: 'Buffer', data: any[] };
+ equals(otherBuffer: Buffer): boolean;
+ compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
+ copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
+ slice(start?: number, end?: number): Buffer;
+ writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
+ writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
+ writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
+ writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
+ readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
+ readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
+ readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
+ readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
+ readUInt8(offset: number, noAssert?: boolean): number;
+ readUInt16LE(offset: number, noAssert?: boolean): number;
+ readUInt16BE(offset: number, noAssert?: boolean): number;
+ readUInt32LE(offset: number, noAssert?: boolean): number;
+ readUInt32BE(offset: number, noAssert?: boolean): number;
+ readInt8(offset: number, noAssert?: boolean): number;
+ readInt16LE(offset: number, noAssert?: boolean): number;
+ readInt16BE(offset: number, noAssert?: boolean): number;
+ readInt32LE(offset: number, noAssert?: boolean): number;
+ readInt32BE(offset: number, noAssert?: boolean): number;
+ readFloatLE(offset: number, noAssert?: boolean): number;
+ readFloatBE(offset: number, noAssert?: boolean): number;
+ readDoubleLE(offset: number, noAssert?: boolean): number;
+ readDoubleBE(offset: number, noAssert?: boolean): number;
+ swap16(): Buffer;
+ swap32(): Buffer;
+ swap64(): Buffer;
+ writeUInt8(value: number, offset: number, noAssert?: boolean): number;
+ writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
+ writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
+ writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
+ writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
+ writeInt8(value: number, offset: number, noAssert?: boolean): number;
+ writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
+ writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
+ writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
+ writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
+ writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
+ writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
+ writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
+ writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
+ fill(value: any, offset?: number, end?: number): this;
+ indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
+ lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
+ includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
+
+ /**
+ * Allocates a new buffer containing the given {str}.
+ *
+ * @param str String to store in buffer.
+ * @param encoding encoding to use, optional. Default is 'utf8'
+ */
+ constructor (str: string, encoding?: string);
+ /**
+ * Allocates a new buffer of {size} octets.
+ *
+ * @param size count of octets to allocate.
+ */
+ constructor (size: number);
+ /**
+ * Allocates a new buffer containing the given {array} of octets.
+ *
+ * @param array The octets to store.
+ */
+ constructor (array: Uint8Array);
+ /**
+ * Produces a Buffer backed by the same allocated memory as
+ * the given {ArrayBuffer}.
+ *
+ *
+ * @param arrayBuffer The ArrayBuffer with which to share memory.
+ */
+ constructor (arrayBuffer: ArrayBuffer);
+ /**
+ * Allocates a new buffer containing the given {array} of octets.
+ *
+ * @param array The octets to store.
+ */
+ constructor (array: any[]);
+ /**
+ * Copies the passed {buffer} data onto a new {Buffer} instance.
+ *
+ * @param buffer The buffer to copy.
+ */
+ constructor (buffer: Buffer);
+ prototype: Buffer;
+ /**
+ * Allocates a new Buffer using an {array} of octets.
+ *
+ * @param array
+ */
+ static from(array: any[]): Buffer;
+ /**
+ * When passed a reference to the .buffer property of a TypedArray instance,
+ * the newly created Buffer will share the same allocated memory as the TypedArray.
+ * The optional {byteOffset} and {length} arguments specify a memory range
+ * within the {arrayBuffer} that will be shared by the Buffer.
+ *
+ * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
+ * @param byteOffset
+ * @param length
+ */
+ static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
+ /**
+ * Copies the passed {buffer} data onto a new Buffer instance.
+ *
+ * @param buffer
+ */
+ static from(buffer: Buffer): Buffer;
+ /**
+ * Creates a new Buffer containing the given JavaScript string {str}.
+ * If provided, the {encoding} parameter identifies the character encoding.
+ * If not provided, {encoding} defaults to 'utf8'.
+ *
+ * @param str
+ */
+ static from(str: string, encoding?: string): Buffer;
+ /**
+ * Returns true if {obj} is a Buffer
+ *
+ * @param obj object to test.
+ */
+ static isBuffer(obj: any): obj is Buffer;
+ /**
+ * Returns true if {encoding} is a valid encoding argument.
+ * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
+ *
+ * @param encoding string to test.
+ */
+ static isEncoding(encoding: string): boolean;
+ /**
+ * Gives the actual byte length of a string. encoding defaults to 'utf8'.
+ * This is not the same as String.prototype.length since that returns the number of characters in a string.
+ *
+ * @param string string to test.
+ * @param encoding encoding used to evaluate (defaults to 'utf8')
+ */
+ static byteLength(string: string, encoding?: string): number;
+ /**
+ * Returns a buffer which is the result of concatenating all the buffers in the list together.
+ *
+ * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
+ * If the list has exactly one item, then the first item of the list is returned.
+ * If the list has more than one item, then a new Buffer is created.
+ *
+ * @param list An array of Buffer objects to concatenate
+ * @param totalLength Total length of the buffers when concatenated.
+ * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
+ */
+ static concat(list: Buffer[], totalLength?: number): Buffer;
+ /**
+ * The same as buf1.compare(buf2).
+ */
+ static compare(buf1: Buffer, buf2: Buffer): number;
+ /**
+ * Allocates a new buffer of {size} octets.
+ *
+ * @param size count of octets to allocate.
+ * @param fill if specified, buffer will be initialized by calling buf.fill(fill).
+ * If parameter is omitted, buffer will be filled with zeros.
+ * @param encoding encoding used for call to buf.fill while initalizing
+ */
+ static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
+ /**
+ * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
+ * of the newly created Buffer are unknown and may contain sensitive data.
+ *
+ * @param size count of octets to allocate
+ */
+ static allocUnsafe(size: number): Buffer;
+ /**
+ * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
+ * of the newly created Buffer are unknown and may contain sensitive data.
+ *
+ * @param size count of octets to allocate
+ */
+ static allocUnsafeSlow(size: number): Buffer;
+ }
+}
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/safe-buffer/index.js b/node_modules/bl/node_modules/safe-buffer/index.js
new file mode 100644
index 00000000..22438dab
--- /dev/null
+++ b/node_modules/bl/node_modules/safe-buffer/index.js
@@ -0,0 +1,62 @@
+/* eslint-disable node/no-deprecated-api */
+var buffer = require('buffer')
+var Buffer = buffer.Buffer
+
+// alternative to using Object.keys for old browsers
+function copyProps (src, dst) {
+ for (var key in src) {
+ dst[key] = src[key]
+ }
+}
+if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
+ module.exports = buffer
+} else {
+ // Copy properties from require('buffer')
+ copyProps(buffer, exports)
+ exports.Buffer = SafeBuffer
+}
+
+function SafeBuffer (arg, encodingOrOffset, length) {
+ return Buffer(arg, encodingOrOffset, length)
+}
+
+// Copy static methods from Buffer
+copyProps(Buffer, SafeBuffer)
+
+SafeBuffer.from = function (arg, encodingOrOffset, length) {
+ if (typeof arg === 'number') {
+ throw new TypeError('Argument must not be a number')
+ }
+ return Buffer(arg, encodingOrOffset, length)
+}
+
+SafeBuffer.alloc = function (size, fill, encoding) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
+ }
+ var buf = Buffer(size)
+ if (fill !== undefined) {
+ if (typeof encoding === 'string') {
+ buf.fill(fill, encoding)
+ } else {
+ buf.fill(fill)
+ }
+ } else {
+ buf.fill(0)
+ }
+ return buf
+}
+
+SafeBuffer.allocUnsafe = function (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
+ }
+ return Buffer(size)
+}
+
+SafeBuffer.allocUnsafeSlow = function (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
+ }
+ return buffer.SlowBuffer(size)
+}
diff --git a/node_modules/bl/node_modules/safe-buffer/package.json b/node_modules/bl/node_modules/safe-buffer/package.json
new file mode 100644
index 00000000..623fbc3f
--- /dev/null
+++ b/node_modules/bl/node_modules/safe-buffer/package.json
@@ -0,0 +1,37 @@
+{
+ "name": "safe-buffer",
+ "description": "Safer Node.js Buffer API",
+ "version": "5.1.2",
+ "author": {
+ "name": "Feross Aboukhadijeh",
+ "email": "feross@feross.org",
+ "url": "http://feross.org"
+ },
+ "bugs": {
+ "url": "https://github.com/feross/safe-buffer/issues"
+ },
+ "devDependencies": {
+ "standard": "*",
+ "tape": "^4.0.0"
+ },
+ "homepage": "https://github.com/feross/safe-buffer",
+ "keywords": [
+ "buffer",
+ "buffer allocate",
+ "node security",
+ "safe",
+ "safe-buffer",
+ "security",
+ "uninitialized"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "types": "index.d.ts",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/feross/safe-buffer.git"
+ },
+ "scripts": {
+ "test": "standard && tape test/*.js"
+ }
+}
diff --git a/node_modules/bl/node_modules/string_decoder/.travis.yml b/node_modules/bl/node_modules/string_decoder/.travis.yml
new file mode 100644
index 00000000..3347a725
--- /dev/null
+++ b/node_modules/bl/node_modules/string_decoder/.travis.yml
@@ -0,0 +1,50 @@
+sudo: false
+language: node_js
+before_install:
+ - npm install -g npm@2
+ - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g
+notifications:
+ email: false
+matrix:
+ fast_finish: true
+ include:
+ - node_js: '0.8'
+ env:
+ - TASK=test
+ - NPM_LEGACY=true
+ - node_js: '0.10'
+ env:
+ - TASK=test
+ - NPM_LEGACY=true
+ - node_js: '0.11'
+ env:
+ - TASK=test
+ - NPM_LEGACY=true
+ - node_js: '0.12'
+ env:
+ - TASK=test
+ - NPM_LEGACY=true
+ - node_js: 1
+ env:
+ - TASK=test
+ - NPM_LEGACY=true
+ - node_js: 2
+ env:
+ - TASK=test
+ - NPM_LEGACY=true
+ - node_js: 3
+ env:
+ - TASK=test
+ - NPM_LEGACY=true
+ - node_js: 4
+ env: TASK=test
+ - node_js: 5
+ env: TASK=test
+ - node_js: 6
+ env: TASK=test
+ - node_js: 7
+ env: TASK=test
+ - node_js: 8
+ env: TASK=test
+ - node_js: 9
+ env: TASK=test
diff --git a/node_modules/bl/node_modules/string_decoder/LICENSE b/node_modules/bl/node_modules/string_decoder/LICENSE
new file mode 100644
index 00000000..778edb20
--- /dev/null
+++ b/node_modules/bl/node_modules/string_decoder/LICENSE
@@ -0,0 +1,48 @@
+Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
diff --git a/node_modules/bl/node_modules/string_decoder/README.md b/node_modules/bl/node_modules/string_decoder/README.md
new file mode 100644
index 00000000..5fd58315
--- /dev/null
+++ b/node_modules/bl/node_modules/string_decoder/README.md
@@ -0,0 +1,47 @@
+# string_decoder
+
+***Node-core v8.9.4 string_decoder for userland***
+
+
+[](https://nodei.co/npm/string_decoder/)
+[](https://nodei.co/npm/string_decoder/)
+
+
+```bash
+npm install --save string_decoder
+```
+
+***Node-core string_decoder for userland***
+
+This package is a mirror of the string_decoder implementation in Node-core.
+
+Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/).
+
+As of version 1.0.0 **string_decoder** uses semantic versioning.
+
+## Previous versions
+
+Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10.
+
+## Update
+
+The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version.
+
+## Streams Working Group
+
+`string_decoder` is maintained by the Streams Working Group, which
+oversees the development and maintenance of the Streams API within
+Node.js. The responsibilities of the Streams Working Group include:
+
+* Addressing stream issues on the Node.js issue tracker.
+* Authoring and editing stream documentation within the Node.js project.
+* Reviewing changes to stream subclasses within the Node.js project.
+* Redirecting changes to streams from the Node.js project to this
+ project.
+* Assisting in the implementation of stream providers within Node.js.
+* Recommending versions of `readable-stream` to be included in Node.js.
+* Messaging about the future of streams to give the community advance
+ notice of changes.
+
+See [readable-stream](https://github.com/nodejs/readable-stream) for
+more details.
diff --git a/node_modules/bl/node_modules/string_decoder/lib/string_decoder.js b/node_modules/bl/node_modules/string_decoder/lib/string_decoder.js
new file mode 100644
index 00000000..2e89e63f
--- /dev/null
+++ b/node_modules/bl/node_modules/string_decoder/lib/string_decoder.js
@@ -0,0 +1,296 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict';
+
+/**/
+
+var Buffer = require('safe-buffer').Buffer;
+/**/
+
+var isEncoding = Buffer.isEncoding || function (encoding) {
+ encoding = '' + encoding;
+ switch (encoding && encoding.toLowerCase()) {
+ case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
+ return true;
+ default:
+ return false;
+ }
+};
+
+function _normalizeEncoding(enc) {
+ if (!enc) return 'utf8';
+ var retried;
+ while (true) {
+ switch (enc) {
+ case 'utf8':
+ case 'utf-8':
+ return 'utf8';
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return 'utf16le';
+ case 'latin1':
+ case 'binary':
+ return 'latin1';
+ case 'base64':
+ case 'ascii':
+ case 'hex':
+ return enc;
+ default:
+ if (retried) return; // undefined
+ enc = ('' + enc).toLowerCase();
+ retried = true;
+ }
+ }
+};
+
+// Do not cache `Buffer.isEncoding` when checking encoding names as some
+// modules monkey-patch it to support additional encodings
+function normalizeEncoding(enc) {
+ var nenc = _normalizeEncoding(enc);
+ if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
+ return nenc || enc;
+}
+
+// StringDecoder provides an interface for efficiently splitting a series of
+// buffers into a series of JS strings without breaking apart multi-byte
+// characters.
+exports.StringDecoder = StringDecoder;
+function StringDecoder(encoding) {
+ this.encoding = normalizeEncoding(encoding);
+ var nb;
+ switch (this.encoding) {
+ case 'utf16le':
+ this.text = utf16Text;
+ this.end = utf16End;
+ nb = 4;
+ break;
+ case 'utf8':
+ this.fillLast = utf8FillLast;
+ nb = 4;
+ break;
+ case 'base64':
+ this.text = base64Text;
+ this.end = base64End;
+ nb = 3;
+ break;
+ default:
+ this.write = simpleWrite;
+ this.end = simpleEnd;
+ return;
+ }
+ this.lastNeed = 0;
+ this.lastTotal = 0;
+ this.lastChar = Buffer.allocUnsafe(nb);
+}
+
+StringDecoder.prototype.write = function (buf) {
+ if (buf.length === 0) return '';
+ var r;
+ var i;
+ if (this.lastNeed) {
+ r = this.fillLast(buf);
+ if (r === undefined) return '';
+ i = this.lastNeed;
+ this.lastNeed = 0;
+ } else {
+ i = 0;
+ }
+ if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
+ return r || '';
+};
+
+StringDecoder.prototype.end = utf8End;
+
+// Returns only complete characters in a Buffer
+StringDecoder.prototype.text = utf8Text;
+
+// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
+StringDecoder.prototype.fillLast = function (buf) {
+ if (this.lastNeed <= buf.length) {
+ buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
+ return this.lastChar.toString(this.encoding, 0, this.lastTotal);
+ }
+ buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
+ this.lastNeed -= buf.length;
+};
+
+// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
+// continuation byte. If an invalid byte is detected, -2 is returned.
+function utf8CheckByte(byte) {
+ if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
+ return byte >> 6 === 0x02 ? -1 : -2;
+}
+
+// Checks at most 3 bytes at the end of a Buffer in order to detect an
+// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
+// needed to complete the UTF-8 character (if applicable) are returned.
+function utf8CheckIncomplete(self, buf, i) {
+ var j = buf.length - 1;
+ if (j < i) return 0;
+ var nb = utf8CheckByte(buf[j]);
+ if (nb >= 0) {
+ if (nb > 0) self.lastNeed = nb - 1;
+ return nb;
+ }
+ if (--j < i || nb === -2) return 0;
+ nb = utf8CheckByte(buf[j]);
+ if (nb >= 0) {
+ if (nb > 0) self.lastNeed = nb - 2;
+ return nb;
+ }
+ if (--j < i || nb === -2) return 0;
+ nb = utf8CheckByte(buf[j]);
+ if (nb >= 0) {
+ if (nb > 0) {
+ if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
+ }
+ return nb;
+ }
+ return 0;
+}
+
+// Validates as many continuation bytes for a multi-byte UTF-8 character as
+// needed or are available. If we see a non-continuation byte where we expect
+// one, we "replace" the validated continuation bytes we've seen so far with
+// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
+// behavior. The continuation byte check is included three times in the case
+// where all of the continuation bytes for a character exist in the same buffer.
+// It is also done this way as a slight performance increase instead of using a
+// loop.
+function utf8CheckExtraBytes(self, buf, p) {
+ if ((buf[0] & 0xC0) !== 0x80) {
+ self.lastNeed = 0;
+ return '\ufffd';
+ }
+ if (self.lastNeed > 1 && buf.length > 1) {
+ if ((buf[1] & 0xC0) !== 0x80) {
+ self.lastNeed = 1;
+ return '\ufffd';
+ }
+ if (self.lastNeed > 2 && buf.length > 2) {
+ if ((buf[2] & 0xC0) !== 0x80) {
+ self.lastNeed = 2;
+ return '\ufffd';
+ }
+ }
+ }
+}
+
+// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
+function utf8FillLast(buf) {
+ var p = this.lastTotal - this.lastNeed;
+ var r = utf8CheckExtraBytes(this, buf, p);
+ if (r !== undefined) return r;
+ if (this.lastNeed <= buf.length) {
+ buf.copy(this.lastChar, p, 0, this.lastNeed);
+ return this.lastChar.toString(this.encoding, 0, this.lastTotal);
+ }
+ buf.copy(this.lastChar, p, 0, buf.length);
+ this.lastNeed -= buf.length;
+}
+
+// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
+// partial character, the character's bytes are buffered until the required
+// number of bytes are available.
+function utf8Text(buf, i) {
+ var total = utf8CheckIncomplete(this, buf, i);
+ if (!this.lastNeed) return buf.toString('utf8', i);
+ this.lastTotal = total;
+ var end = buf.length - (total - this.lastNeed);
+ buf.copy(this.lastChar, 0, end);
+ return buf.toString('utf8', i, end);
+}
+
+// For UTF-8, a replacement character is added when ending on a partial
+// character.
+function utf8End(buf) {
+ var r = buf && buf.length ? this.write(buf) : '';
+ if (this.lastNeed) return r + '\ufffd';
+ return r;
+}
+
+// UTF-16LE typically needs two bytes per character, but even if we have an even
+// number of bytes available, we need to check if we end on a leading/high
+// surrogate. In that case, we need to wait for the next two bytes in order to
+// decode the last character properly.
+function utf16Text(buf, i) {
+ if ((buf.length - i) % 2 === 0) {
+ var r = buf.toString('utf16le', i);
+ if (r) {
+ var c = r.charCodeAt(r.length - 1);
+ if (c >= 0xD800 && c <= 0xDBFF) {
+ this.lastNeed = 2;
+ this.lastTotal = 4;
+ this.lastChar[0] = buf[buf.length - 2];
+ this.lastChar[1] = buf[buf.length - 1];
+ return r.slice(0, -1);
+ }
+ }
+ return r;
+ }
+ this.lastNeed = 1;
+ this.lastTotal = 2;
+ this.lastChar[0] = buf[buf.length - 1];
+ return buf.toString('utf16le', i, buf.length - 1);
+}
+
+// For UTF-16LE we do not explicitly append special replacement characters if we
+// end on a partial character, we simply let v8 handle that.
+function utf16End(buf) {
+ var r = buf && buf.length ? this.write(buf) : '';
+ if (this.lastNeed) {
+ var end = this.lastTotal - this.lastNeed;
+ return r + this.lastChar.toString('utf16le', 0, end);
+ }
+ return r;
+}
+
+function base64Text(buf, i) {
+ var n = (buf.length - i) % 3;
+ if (n === 0) return buf.toString('base64', i);
+ this.lastNeed = 3 - n;
+ this.lastTotal = 3;
+ if (n === 1) {
+ this.lastChar[0] = buf[buf.length - 1];
+ } else {
+ this.lastChar[0] = buf[buf.length - 2];
+ this.lastChar[1] = buf[buf.length - 1];
+ }
+ return buf.toString('base64', i, buf.length - n);
+}
+
+function base64End(buf) {
+ var r = buf && buf.length ? this.write(buf) : '';
+ if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
+ return r;
+}
+
+// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
+function simpleWrite(buf) {
+ return buf.toString(this.encoding);
+}
+
+function simpleEnd(buf) {
+ return buf && buf.length ? this.write(buf) : '';
+}
\ No newline at end of file
diff --git a/node_modules/bl/node_modules/string_decoder/package.json b/node_modules/bl/node_modules/string_decoder/package.json
new file mode 100644
index 00000000..518c3eb9
--- /dev/null
+++ b/node_modules/bl/node_modules/string_decoder/package.json
@@ -0,0 +1,31 @@
+{
+ "name": "string_decoder",
+ "version": "1.1.1",
+ "description": "The string_decoder module from Node core",
+ "main": "lib/string_decoder.js",
+ "dependencies": {
+ "safe-buffer": "~5.1.0"
+ },
+ "devDependencies": {
+ "babel-polyfill": "^6.23.0",
+ "core-util-is": "^1.0.2",
+ "inherits": "^2.0.3",
+ "tap": "~0.4.8"
+ },
+ "scripts": {
+ "test": "tap test/parallel/*.js && node test/verify-dependencies",
+ "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/nodejs/string_decoder.git"
+ },
+ "homepage": "https://github.com/nodejs/string_decoder",
+ "keywords": [
+ "string",
+ "decoder",
+ "browser",
+ "browserify"
+ ],
+ "license": "MIT"
+}
diff --git a/node_modules/bl/package.json b/node_modules/bl/package.json
new file mode 100644
index 00000000..1ffd2526
--- /dev/null
+++ b/node_modules/bl/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "bl",
+ "version": "1.2.3",
+ "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
+ "main": "bl.js",
+ "scripts": {
+ "test": "node test/test.js | faucet"
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/rvagg/bl.git"
+ },
+ "homepage": "https://github.com/rvagg/bl",
+ "authors": [
+ "Rod Vagg (https://github.com/rvagg)",
+ "Matteo Collina (https://github.com/mcollina)",
+ "Jarett Cruger (https://github.com/jcrugzz)"
+ ],
+ "keywords": [
+ "buffer",
+ "buffers",
+ "stream",
+ "awesomesauce"
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "readable-stream": "^2.3.5",
+ "safe-buffer": "^5.1.1"
+ },
+ "devDependencies": {
+ "faucet": "0.0.1",
+ "hash_file": "~0.1.1",
+ "tape": "~4.9.0"
+ }
+}
diff --git a/node_modules/bl/test/test.js b/node_modules/bl/test/test.js
new file mode 100644
index 00000000..dac1861e
--- /dev/null
+++ b/node_modules/bl/test/test.js
@@ -0,0 +1,718 @@
+var tape = require('tape')
+ , crypto = require('crypto')
+ , fs = require('fs')
+ , hash = require('hash_file')
+ , BufferList = require('../')
+ , Buffer = require('safe-buffer').Buffer
+
+ , encodings =
+ ('hex utf8 utf-8 ascii binary base64'
+ + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ')
+
+tape('single bytes from single buffer', function (t) {
+ var bl = new BufferList()
+ bl.append(Buffer.from('abcd'))
+
+ t.equal(bl.length, 4)
+
+ t.equal(bl.get(0), 97)
+ t.equal(bl.get(1), 98)
+ t.equal(bl.get(2), 99)
+ t.equal(bl.get(3), 100)
+
+ t.end()
+})
+
+tape('single bytes from multiple buffers', function (t) {
+ var bl = new BufferList()
+ bl.append(Buffer.from('abcd'))
+ bl.append(Buffer.from('efg'))
+ bl.append(Buffer.from('hi'))
+ bl.append(Buffer.from('j'))
+
+ t.equal(bl.length, 10)
+
+ t.equal(bl.get(0), 97)
+ t.equal(bl.get(1), 98)
+ t.equal(bl.get(2), 99)
+ t.equal(bl.get(3), 100)
+ t.equal(bl.get(4), 101)
+ t.equal(bl.get(5), 102)
+ t.equal(bl.get(6), 103)
+ t.equal(bl.get(7), 104)
+ t.equal(bl.get(8), 105)
+ t.equal(bl.get(9), 106)
+ t.end()
+})
+
+tape('multi bytes from single buffer', function (t) {
+ var bl = new BufferList()
+ bl.append(Buffer.from('abcd'))
+
+ t.equal(bl.length, 4)
+
+ t.equal(bl.slice(0, 4).toString('ascii'), 'abcd')
+ t.equal(bl.slice(0, 3).toString('ascii'), 'abc')
+ t.equal(bl.slice(1, 4).toString('ascii'), 'bcd')
+ t.equal(bl.slice(-4, -1).toString('ascii'), 'abc')
+
+ t.end()
+})
+
+tape('multi bytes from single buffer (negative indexes)', function (t) {
+ var bl = new BufferList()
+ bl.append(Buffer.from('buffer'))
+
+ t.equal(bl.length, 6)
+
+ t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe')
+ t.equal(bl.slice(-6, -2).toString('ascii'), 'buff')
+ t.equal(bl.slice(-5, -2).toString('ascii'), 'uff')
+
+ t.end()
+})
+
+tape('multiple bytes from multiple buffers', function (t) {
+ var bl = new BufferList()
+
+ bl.append(Buffer.from('abcd'))
+ bl.append(Buffer.from('efg'))
+ bl.append(Buffer.from('hi'))
+ bl.append(Buffer.from('j'))
+
+ t.equal(bl.length, 10)
+
+ t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
+ t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
+ t.equal(bl.slice(3, 6).toString('ascii'), 'def')
+ t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
+ t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
+ t.equal(bl.slice(-7, -4).toString('ascii'), 'def')
+
+ t.end()
+})
+
+tape('multiple bytes from multiple buffer lists', function (t) {
+ var bl = new BufferList()
+
+ bl.append(new BufferList([ Buffer.from('abcd'), Buffer.from('efg') ]))
+ bl.append(new BufferList([ Buffer.from('hi'), Buffer.from('j') ]))
+
+ t.equal(bl.length, 10)
+
+ t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
+
+ t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
+ t.equal(bl.slice(3, 6).toString('ascii'), 'def')
+ t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
+ t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
+
+ t.end()
+})
+
+// same data as previous test, just using nested constructors
+tape('multiple bytes from crazy nested buffer lists', function (t) {
+ var bl = new BufferList()
+
+ bl.append(new BufferList([
+ new BufferList([
+ new BufferList(Buffer.from('abc'))
+ , Buffer.from('d')
+ , new BufferList(Buffer.from('efg'))
+ ])
+ , new BufferList([ Buffer.from('hi') ])
+ , new BufferList(Buffer.from('j'))
+ ]))
+
+ t.equal(bl.length, 10)
+
+ t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
+
+ t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
+ t.equal(bl.slice(3, 6).toString('ascii'), 'def')
+ t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
+ t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
+
+ t.end()
+})
+
+tape('append accepts arrays of Buffers', function (t) {
+ var bl = new BufferList()
+ bl.append(Buffer.from('abc'))
+ bl.append([ Buffer.from('def') ])
+ bl.append([ Buffer.from('ghi'), Buffer.from('jkl') ])
+ bl.append([ Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz') ])
+ t.equal(bl.length, 26)
+ t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
+ t.end()
+})
+
+tape('append accepts arrays of BufferLists', function (t) {
+ var bl = new BufferList()
+ bl.append(Buffer.from('abc'))
+ bl.append([ new BufferList('def') ])
+ bl.append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ]))
+ bl.append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ])
+ t.equal(bl.length, 26)
+ t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
+ t.end()
+})
+
+tape('append chainable', function (t) {
+ var bl = new BufferList()
+ t.ok(bl.append(Buffer.from('abcd')) === bl)
+ t.ok(bl.append([ Buffer.from('abcd') ]) === bl)
+ t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl)
+ t.ok(bl.append([ new BufferList(Buffer.from('abcd')) ]) === bl)
+ t.end()
+})
+
+tape('append chainable (test results)', function (t) {
+ var bl = new BufferList('abc')
+ .append([ new BufferList('def') ])
+ .append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ]))
+ .append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ])
+
+ t.equal(bl.length, 26)
+ t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
+ t.end()
+})
+
+tape('consuming from multiple buffers', function (t) {
+ var bl = new BufferList()
+
+ bl.append(Buffer.from('abcd'))
+ bl.append(Buffer.from('efg'))
+ bl.append(Buffer.from('hi'))
+ bl.append(Buffer.from('j'))
+
+ t.equal(bl.length, 10)
+
+ t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
+
+ bl.consume(3)
+ t.equal(bl.length, 7)
+ t.equal(bl.slice(0, 7).toString('ascii'), 'defghij')
+
+ bl.consume(2)
+ t.equal(bl.length, 5)
+ t.equal(bl.slice(0, 5).toString('ascii'), 'fghij')
+
+ bl.consume(1)
+ t.equal(bl.length, 4)
+ t.equal(bl.slice(0, 4).toString('ascii'), 'ghij')
+
+ bl.consume(1)
+ t.equal(bl.length, 3)
+ t.equal(bl.slice(0, 3).toString('ascii'), 'hij')
+
+ bl.consume(2)
+ t.equal(bl.length, 1)
+ t.equal(bl.slice(0, 1).toString('ascii'), 'j')
+
+ t.end()
+})
+
+tape('complete consumption', function (t) {
+ var bl = new BufferList()
+
+ bl.append(Buffer.from('a'))
+ bl.append(Buffer.from('b'))
+
+ bl.consume(2)
+
+ t.equal(bl.length, 0)
+ t.equal(bl._bufs.length, 0)
+
+ t.end()
+})
+
+tape('test readUInt8 / readInt8', function (t) {
+ var buf1 = Buffer.alloc(1)
+ , buf2 = Buffer.alloc(3)
+ , buf3 = Buffer.alloc(3)
+ , bl = new BufferList()
+
+ buf2[1] = 0x3
+ buf2[2] = 0x4
+ buf3[0] = 0x23
+ buf3[1] = 0x42
+
+ bl.append(buf1)
+ bl.append(buf2)
+ bl.append(buf3)
+
+ t.equal(bl.readUInt8(2), 0x3)
+ t.equal(bl.readInt8(2), 0x3)
+ t.equal(bl.readUInt8(3), 0x4)
+ t.equal(bl.readInt8(3), 0x4)
+ t.equal(bl.readUInt8(4), 0x23)
+ t.equal(bl.readInt8(4), 0x23)
+ t.equal(bl.readUInt8(5), 0x42)
+ t.equal(bl.readInt8(5), 0x42)
+ t.end()
+})
+
+tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) {
+ var buf1 = Buffer.alloc(1)
+ , buf2 = Buffer.alloc(3)
+ , buf3 = Buffer.alloc(3)
+ , bl = new BufferList()
+
+ buf2[1] = 0x3
+ buf2[2] = 0x4
+ buf3[0] = 0x23
+ buf3[1] = 0x42
+
+ bl.append(buf1)
+ bl.append(buf2)
+ bl.append(buf3)
+
+ t.equal(bl.readUInt16BE(2), 0x0304)
+ t.equal(bl.readUInt16LE(2), 0x0403)
+ t.equal(bl.readInt16BE(2), 0x0304)
+ t.equal(bl.readInt16LE(2), 0x0403)
+ t.equal(bl.readUInt16BE(3), 0x0423)
+ t.equal(bl.readUInt16LE(3), 0x2304)
+ t.equal(bl.readInt16BE(3), 0x0423)
+ t.equal(bl.readInt16LE(3), 0x2304)
+ t.equal(bl.readUInt16BE(4), 0x2342)
+ t.equal(bl.readUInt16LE(4), 0x4223)
+ t.equal(bl.readInt16BE(4), 0x2342)
+ t.equal(bl.readInt16LE(4), 0x4223)
+ t.end()
+})
+
+tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) {
+ var buf1 = Buffer.alloc(1)
+ , buf2 = Buffer.alloc(3)
+ , buf3 = Buffer.alloc(3)
+ , bl = new BufferList()
+
+ buf2[1] = 0x3
+ buf2[2] = 0x4
+ buf3[0] = 0x23
+ buf3[1] = 0x42
+
+ bl.append(buf1)
+ bl.append(buf2)
+ bl.append(buf3)
+
+ t.equal(bl.readUInt32BE(2), 0x03042342)
+ t.equal(bl.readUInt32LE(2), 0x42230403)
+ t.equal(bl.readInt32BE(2), 0x03042342)
+ t.equal(bl.readInt32LE(2), 0x42230403)
+ t.end()
+})
+
+tape('test readFloatLE / readFloatBE', function (t) {
+ var buf1 = Buffer.alloc(1)
+ , buf2 = Buffer.alloc(3)
+ , buf3 = Buffer.alloc(3)
+ , bl = new BufferList()
+
+ buf2[1] = 0x00
+ buf2[2] = 0x00
+ buf3[0] = 0x80
+ buf3[1] = 0x3f
+
+ bl.append(buf1)
+ bl.append(buf2)
+ bl.append(buf3)
+
+ t.equal(bl.readFloatLE(2), 0x01)
+ t.end()
+})
+
+tape('test readDoubleLE / readDoubleBE', function (t) {
+ var buf1 = Buffer.alloc(1)
+ , buf2 = Buffer.alloc(3)
+ , buf3 = Buffer.alloc(10)
+ , bl = new BufferList()
+
+ buf2[1] = 0x55
+ buf2[2] = 0x55
+ buf3[0] = 0x55
+ buf3[1] = 0x55
+ buf3[2] = 0x55
+ buf3[3] = 0x55
+ buf3[4] = 0xd5
+ buf3[5] = 0x3f
+
+ bl.append(buf1)
+ bl.append(buf2)
+ bl.append(buf3)
+
+ t.equal(bl.readDoubleLE(2), 0.3333333333333333)
+ t.end()
+})
+
+tape('test toString', function (t) {
+ var bl = new BufferList()
+
+ bl.append(Buffer.from('abcd'))
+ bl.append(Buffer.from('efg'))
+ bl.append(Buffer.from('hi'))
+ bl.append(Buffer.from('j'))
+
+ t.equal(bl.toString('ascii', 0, 10), 'abcdefghij')
+ t.equal(bl.toString('ascii', 3, 10), 'defghij')
+ t.equal(bl.toString('ascii', 3, 6), 'def')
+ t.equal(bl.toString('ascii', 3, 8), 'defgh')
+ t.equal(bl.toString('ascii', 5, 10), 'fghij')
+
+ t.end()
+})
+
+tape('test toString encoding', function (t) {
+ var bl = new BufferList()
+ , b = Buffer.from('abcdefghij\xff\x00')
+
+ bl.append(Buffer.from('abcd'))
+ bl.append(Buffer.from('efg'))
+ bl.append(Buffer.from('hi'))
+ bl.append(Buffer.from('j'))
+ bl.append(Buffer.from('\xff\x00'))
+
+ encodings.forEach(function (enc) {
+ t.equal(bl.toString(enc), b.toString(enc), enc)
+ })
+
+ t.end()
+})
+
+tape('uninitialized memory', function (t) {
+ const secret = crypto.randomBytes(256)
+ for (let i = 0; i < 1e6; i++) {
+ const clone = Buffer.from(secret)
+ const bl = new BufferList()
+ bl.append(Buffer.from('a'))
+ bl.consume(-1024)
+ const buf = bl.slice(1)
+ if (buf.indexOf(clone) !== -1) {
+ t.fail(`Match (at ${i})`)
+ break
+ }
+ }
+ t.end()
+})
+
+!process.browser && tape('test stream', function (t) {
+ var random = crypto.randomBytes(65534)
+ , rndhash = hash(random, 'md5')
+ , md5sum = crypto.createHash('md5')
+ , bl = new BufferList(function (err, buf) {
+ t.ok(Buffer.isBuffer(buf))
+ t.ok(err === null)
+ t.equal(rndhash, hash(bl.slice(), 'md5'))
+ t.equal(rndhash, hash(buf, 'md5'))
+
+ bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat'))
+ .on('close', function () {
+ var s = fs.createReadStream('/tmp/bl_test_rnd_out.dat')
+ s.on('data', md5sum.update.bind(md5sum))
+ s.on('end', function() {
+ t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!')
+ t.end()
+ })
+ })
+
+ })
+
+ fs.writeFileSync('/tmp/bl_test_rnd.dat', random)
+ fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl)
+})
+
+tape('instantiation with Buffer', function (t) {
+ var buf = crypto.randomBytes(1024)
+ , buf2 = crypto.randomBytes(1024)
+ , b = BufferList(buf)
+
+ t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer')
+ b = BufferList([ buf, buf2 ])
+ t.equal(b.slice().toString('hex'), Buffer.concat([ buf, buf2 ]).toString('hex'), 'same buffer')
+ t.end()
+})
+
+tape('test String appendage', function (t) {
+ var bl = new BufferList()
+ , b = Buffer.from('abcdefghij\xff\x00')
+
+ bl.append('abcd')
+ bl.append('efg')
+ bl.append('hi')
+ bl.append('j')
+ bl.append('\xff\x00')
+
+ encodings.forEach(function (enc) {
+ t.equal(bl.toString(enc), b.toString(enc))
+ })
+
+ t.end()
+})
+
+tape('test Number appendage', function (t) {
+ var bl = new BufferList()
+ , b = Buffer.from('1234567890')
+
+ bl.append(1234)
+ bl.append(567)
+ bl.append(89)
+ bl.append(0)
+
+ encodings.forEach(function (enc) {
+ t.equal(bl.toString(enc), b.toString(enc))
+ })
+
+ t.end()
+})
+
+tape('write nothing, should get empty buffer', function (t) {
+ t.plan(3)
+ BufferList(function (err, data) {
+ t.notOk(err, 'no error')
+ t.ok(Buffer.isBuffer(data), 'got a buffer')
+ t.equal(0, data.length, 'got a zero-length buffer')
+ t.end()
+ }).end()
+})
+
+tape('unicode string', function (t) {
+ t.plan(2)
+ var inp1 = '\u2600'
+ , inp2 = '\u2603'
+ , exp = inp1 + ' and ' + inp2
+ , bl = BufferList()
+ bl.write(inp1)
+ bl.write(' and ')
+ bl.write(inp2)
+ t.equal(exp, bl.toString())
+ t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex'))
+})
+
+tape('should emit finish', function (t) {
+ var source = BufferList()
+ , dest = BufferList()
+
+ source.write('hello')
+ source.pipe(dest)
+
+ dest.on('finish', function () {
+ t.equal(dest.toString('utf8'), 'hello')
+ t.end()
+ })
+})
+
+tape('basic copy', function (t) {
+ var buf = crypto.randomBytes(1024)
+ , buf2 = Buffer.alloc(1024)
+ , b = BufferList(buf)
+
+ b.copy(buf2)
+ t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
+ t.end()
+})
+
+tape('copy after many appends', function (t) {
+ var buf = crypto.randomBytes(512)
+ , buf2 = Buffer.alloc(1024)
+ , b = BufferList(buf)
+
+ b.append(buf)
+ b.copy(buf2)
+ t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
+ t.end()
+})
+
+tape('copy at a precise position', function (t) {
+ var buf = crypto.randomBytes(1004)
+ , buf2 = Buffer.alloc(1024)
+ , b = BufferList(buf)
+
+ b.copy(buf2, 20)
+ t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer')
+ t.end()
+})
+
+tape('copy starting from a precise location', function (t) {
+ var buf = crypto.randomBytes(10)
+ , buf2 = Buffer.alloc(5)
+ , b = BufferList(buf)
+
+ b.copy(buf2, 0, 5)
+ t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer')
+ t.end()
+})
+
+tape('copy in an interval', function (t) {
+ var rnd = crypto.randomBytes(10)
+ , b = BufferList(rnd) // put the random bytes there
+ , actual = Buffer.alloc(3)
+ , expected = Buffer.alloc(3)
+
+ rnd.copy(expected, 0, 5, 8)
+ b.copy(actual, 0, 5, 8)
+
+ t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer')
+ t.end()
+})
+
+tape('copy an interval between two buffers', function (t) {
+ var buf = crypto.randomBytes(10)
+ , buf2 = Buffer.alloc(10)
+ , b = BufferList(buf)
+
+ b.append(buf)
+ b.copy(buf2, 0, 5, 15)
+
+ t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer')
+ t.end()
+})
+
+tape('shallow slice across buffer boundaries', function (t) {
+ var bl = new BufferList(['First', 'Second', 'Third'])
+
+ t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh')
+ t.end()
+})
+
+tape('shallow slice within single buffer', function (t) {
+ t.plan(2)
+ var bl = new BufferList(['First', 'Second', 'Third'])
+
+ t.equal(bl.shallowSlice(5, 10).toString(), 'Secon')
+ t.equal(bl.shallowSlice(7, 10).toString(), 'con')
+ t.end()
+})
+
+tape('shallow slice single buffer', function (t) {
+ t.plan(3)
+ var bl = new BufferList(['First', 'Second', 'Third'])
+
+ t.equal(bl.shallowSlice(0, 5).toString(), 'First')
+ t.equal(bl.shallowSlice(5, 11).toString(), 'Second')
+ t.equal(bl.shallowSlice(11, 16).toString(), 'Third')
+})
+
+tape('shallow slice with negative or omitted indices', function (t) {
+ t.plan(4)
+ var bl = new BufferList(['First', 'Second', 'Third'])
+
+ t.equal(bl.shallowSlice().toString(), 'FirstSecondThird')
+ t.equal(bl.shallowSlice(5).toString(), 'SecondThird')
+ t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh')
+ t.equal(bl.shallowSlice(-8).toString(), 'ondThird')
+})
+
+tape('shallow slice does not make a copy', function (t) {
+ t.plan(1)
+ var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
+ var bl = (new BufferList(buffers)).shallowSlice(5, -3)
+
+ buffers[1].fill('h')
+ buffers[2].fill('h')
+
+ t.equal(bl.toString(), 'hhhhhhhh')
+})
+
+tape('duplicate', function (t) {
+ t.plan(2)
+
+ var bl = new BufferList('abcdefghij\xff\x00')
+ , dup = bl.duplicate()
+
+ t.equal(bl.prototype, dup.prototype)
+ t.equal(bl.toString('hex'), dup.toString('hex'))
+})
+
+tape('destroy no pipe', function (t) {
+ t.plan(2)
+
+ var bl = new BufferList('alsdkfja;lsdkfja;lsdk')
+ bl.destroy()
+
+ t.equal(bl._bufs.length, 0)
+ t.equal(bl.length, 0)
+})
+
+!process.browser && tape('destroy with pipe before read end', function (t) {
+ t.plan(2)
+
+ var bl = new BufferList()
+ fs.createReadStream(__dirname + '/test.js')
+ .pipe(bl)
+
+ bl.destroy()
+
+ t.equal(bl._bufs.length, 0)
+ t.equal(bl.length, 0)
+
+})
+
+!process.browser && tape('destroy with pipe before read end with race', function (t) {
+ t.plan(2)
+
+ var bl = new BufferList()
+ fs.createReadStream(__dirname + '/test.js')
+ .pipe(bl)
+
+ setTimeout(function () {
+ bl.destroy()
+ setTimeout(function () {
+ t.equal(bl._bufs.length, 0)
+ t.equal(bl.length, 0)
+ }, 500)
+ }, 500)
+})
+
+!process.browser && tape('destroy with pipe after read end', function (t) {
+ t.plan(2)
+
+ var bl = new BufferList()
+ fs.createReadStream(__dirname + '/test.js')
+ .on('end', onEnd)
+ .pipe(bl)
+
+ function onEnd () {
+ bl.destroy()
+
+ t.equal(bl._bufs.length, 0)
+ t.equal(bl.length, 0)
+ }
+})
+
+!process.browser && tape('destroy with pipe while writing to a destination', function (t) {
+ t.plan(4)
+
+ var bl = new BufferList()
+ , ds = new BufferList()
+
+ fs.createReadStream(__dirname + '/test.js')
+ .on('end', onEnd)
+ .pipe(bl)
+
+ function onEnd () {
+ bl.pipe(ds)
+
+ setTimeout(function () {
+ bl.destroy()
+
+ t.equals(bl._bufs.length, 0)
+ t.equals(bl.length, 0)
+
+ ds.destroy()
+
+ t.equals(bl._bufs.length, 0)
+ t.equals(bl.length, 0)
+
+ }, 100)
+ }
+})
+
+!process.browser && tape('handle error', function (t) {
+ t.plan(2)
+ fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) {
+ t.ok(err instanceof Error, 'has error')
+ t.notOk(data, 'no data')
+ }))
+})
diff --git a/node_modules/body-parser/HISTORY.md b/node_modules/body-parser/HISTORY.md
new file mode 100644
index 00000000..b8924919
--- /dev/null
+++ b/node_modules/body-parser/HISTORY.md
@@ -0,0 +1,665 @@
+1.20.2 / 2023-02-21
+===================
+
+ * Fix strict json error message on Node.js 19+
+ * deps: content-type@~1.0.5
+ - perf: skip value escaping when unnecessary
+ * deps: raw-body@2.5.2
+
+1.20.1 / 2022-10-06
+===================
+
+ * deps: qs@6.11.0
+ * perf: remove unnecessary object clone
+
+1.20.0 / 2022-04-02
+===================
+
+ * Fix error message for json parse whitespace in `strict`
+ * Fix internal error when inflated body exceeds limit
+ * Prevent loss of async hooks context
+ * Prevent hanging when request already read
+ * deps: depd@2.0.0
+ - Replace internal `eval` usage with `Function` constructor
+ - Use instance methods on `process` to check for listeners
+ * deps: http-errors@2.0.0
+ - deps: depd@2.0.0
+ - deps: statuses@2.0.1
+ * deps: on-finished@2.4.1
+ * deps: qs@6.10.3
+ * deps: raw-body@2.5.1
+ - deps: http-errors@2.0.0
+
+1.19.2 / 2022-02-15
+===================
+
+ * deps: bytes@3.1.2
+ * deps: qs@6.9.7
+ * Fix handling of `__proto__` keys
+ * deps: raw-body@2.4.3
+ - deps: bytes@3.1.2
+
+1.19.1 / 2021-12-10
+===================
+
+ * deps: bytes@3.1.1
+ * deps: http-errors@1.8.1
+ - deps: inherits@2.0.4
+ - deps: toidentifier@1.0.1
+ - deps: setprototypeof@1.2.0
+ * deps: qs@6.9.6
+ * deps: raw-body@2.4.2
+ - deps: bytes@3.1.1
+ - deps: http-errors@1.8.1
+ * deps: safe-buffer@5.2.1
+ * deps: type-is@~1.6.18
+
+1.19.0 / 2019-04-25
+===================
+
+ * deps: bytes@3.1.0
+ - Add petabyte (`pb`) support
+ * deps: http-errors@1.7.2
+ - Set constructor name when possible
+ - deps: setprototypeof@1.1.1
+ - deps: statuses@'>= 1.5.0 < 2'
+ * deps: iconv-lite@0.4.24
+ - Added encoding MIK
+ * deps: qs@6.7.0
+ - Fix parsing array brackets after index
+ * deps: raw-body@2.4.0
+ - deps: bytes@3.1.0
+ - deps: http-errors@1.7.2
+ - deps: iconv-lite@0.4.24
+ * deps: type-is@~1.6.17
+ - deps: mime-types@~2.1.24
+ - perf: prevent internal `throw` on invalid type
+
+1.18.3 / 2018-05-14
+===================
+
+ * Fix stack trace for strict json parse error
+ * deps: depd@~1.1.2
+ - perf: remove argument reassignment
+ * deps: http-errors@~1.6.3
+ - deps: depd@~1.1.2
+ - deps: setprototypeof@1.1.0
+ - deps: statuses@'>= 1.3.1 < 2'
+ * deps: iconv-lite@0.4.23
+ - Fix loading encoding with year appended
+ - Fix deprecation warnings on Node.js 10+
+ * deps: qs@6.5.2
+ * deps: raw-body@2.3.3
+ - deps: http-errors@1.6.3
+ - deps: iconv-lite@0.4.23
+ * deps: type-is@~1.6.16
+ - deps: mime-types@~2.1.18
+
+1.18.2 / 2017-09-22
+===================
+
+ * deps: debug@2.6.9
+ * perf: remove argument reassignment
+
+1.18.1 / 2017-09-12
+===================
+
+ * deps: content-type@~1.0.4
+ - perf: remove argument reassignment
+ - perf: skip parameter parsing when no parameters
+ * deps: iconv-lite@0.4.19
+ - Fix ISO-8859-1 regression
+ - Update Windows-1255
+ * deps: qs@6.5.1
+ - Fix parsing & compacting very deep objects
+ * deps: raw-body@2.3.2
+ - deps: iconv-lite@0.4.19
+
+1.18.0 / 2017-09-08
+===================
+
+ * Fix JSON strict violation error to match native parse error
+ * Include the `body` property on verify errors
+ * Include the `type` property on all generated errors
+ * Use `http-errors` to set status code on errors
+ * deps: bytes@3.0.0
+ * deps: debug@2.6.8
+ * deps: depd@~1.1.1
+ - Remove unnecessary `Buffer` loading
+ * deps: http-errors@~1.6.2
+ - deps: depd@1.1.1
+ * deps: iconv-lite@0.4.18
+ - Add support for React Native
+ - Add a warning if not loaded as utf-8
+ - Fix CESU-8 decoding in Node.js 8
+ - Improve speed of ISO-8859-1 encoding
+ * deps: qs@6.5.0
+ * deps: raw-body@2.3.1
+ - Use `http-errors` for standard emitted errors
+ - deps: bytes@3.0.0
+ - deps: iconv-lite@0.4.18
+ - perf: skip buffer decoding on overage chunk
+ * perf: prevent internal `throw` when missing charset
+
+1.17.2 / 2017-05-17
+===================
+
+ * deps: debug@2.6.7
+ - Fix `DEBUG_MAX_ARRAY_LENGTH`
+ - deps: ms@2.0.0
+ * deps: type-is@~1.6.15
+ - deps: mime-types@~2.1.15
+
+1.17.1 / 2017-03-06
+===================
+
+ * deps: qs@6.4.0
+ - Fix regression parsing keys starting with `[`
+
+1.17.0 / 2017-03-01
+===================
+
+ * deps: http-errors@~1.6.1
+ - Make `message` property enumerable for `HttpError`s
+ - deps: setprototypeof@1.0.3
+ * deps: qs@6.3.1
+ - Fix compacting nested arrays
+
+1.16.1 / 2017-02-10
+===================
+
+ * deps: debug@2.6.1
+ - Fix deprecation messages in WebStorm and other editors
+ - Undeprecate `DEBUG_FD` set to `1` or `2`
+
+1.16.0 / 2017-01-17
+===================
+
+ * deps: debug@2.6.0
+ - Allow colors in workers
+ - Deprecated `DEBUG_FD` environment variable
+ - Fix error when running under React Native
+ - Use same color for same namespace
+ - deps: ms@0.7.2
+ * deps: http-errors@~1.5.1
+ - deps: inherits@2.0.3
+ - deps: setprototypeof@1.0.2
+ - deps: statuses@'>= 1.3.1 < 2'
+ * deps: iconv-lite@0.4.15
+ - Added encoding MS-31J
+ - Added encoding MS-932
+ - Added encoding MS-936
+ - Added encoding MS-949
+ - Added encoding MS-950
+ - Fix GBK/GB18030 handling of Euro character
+ * deps: qs@6.2.1
+ - Fix array parsing from skipping empty values
+ * deps: raw-body@~2.2.0
+ - deps: iconv-lite@0.4.15
+ * deps: type-is@~1.6.14
+ - deps: mime-types@~2.1.13
+
+1.15.2 / 2016-06-19
+===================
+
+ * deps: bytes@2.4.0
+ * deps: content-type@~1.0.2
+ - perf: enable strict mode
+ * deps: http-errors@~1.5.0
+ - Use `setprototypeof` module to replace `__proto__` setting
+ - deps: statuses@'>= 1.3.0 < 2'
+ - perf: enable strict mode
+ * deps: qs@6.2.0
+ * deps: raw-body@~2.1.7
+ - deps: bytes@2.4.0
+ - perf: remove double-cleanup on happy path
+ * deps: type-is@~1.6.13
+ - deps: mime-types@~2.1.11
+
+1.15.1 / 2016-05-05
+===================
+
+ * deps: bytes@2.3.0
+ - Drop partial bytes on all parsed units
+ - Fix parsing byte string that looks like hex
+ * deps: raw-body@~2.1.6
+ - deps: bytes@2.3.0
+ * deps: type-is@~1.6.12
+ - deps: mime-types@~2.1.10
+
+1.15.0 / 2016-02-10
+===================
+
+ * deps: http-errors@~1.4.0
+ - Add `HttpError` export, for `err instanceof createError.HttpError`
+ - deps: inherits@2.0.1
+ - deps: statuses@'>= 1.2.1 < 2'
+ * deps: qs@6.1.0
+ * deps: type-is@~1.6.11
+ - deps: mime-types@~2.1.9
+
+1.14.2 / 2015-12-16
+===================
+
+ * deps: bytes@2.2.0
+ * deps: iconv-lite@0.4.13
+ * deps: qs@5.2.0
+ * deps: raw-body@~2.1.5
+ - deps: bytes@2.2.0
+ - deps: iconv-lite@0.4.13
+ * deps: type-is@~1.6.10
+ - deps: mime-types@~2.1.8
+
+1.14.1 / 2015-09-27
+===================
+
+ * Fix issue where invalid charset results in 400 when `verify` used
+ * deps: iconv-lite@0.4.12
+ - Fix CESU-8 decoding in Node.js 4.x
+ * deps: raw-body@~2.1.4
+ - Fix masking critical errors from `iconv-lite`
+ - deps: iconv-lite@0.4.12
+ * deps: type-is@~1.6.9
+ - deps: mime-types@~2.1.7
+
+1.14.0 / 2015-09-16
+===================
+
+ * Fix JSON strict parse error to match syntax errors
+ * Provide static `require` analysis in `urlencoded` parser
+ * deps: depd@~1.1.0
+ - Support web browser loading
+ * deps: qs@5.1.0
+ * deps: raw-body@~2.1.3
+ - Fix sync callback when attaching data listener causes sync read
+ * deps: type-is@~1.6.8
+ - Fix type error when given invalid type to match against
+ - deps: mime-types@~2.1.6
+
+1.13.3 / 2015-07-31
+===================
+
+ * deps: type-is@~1.6.6
+ - deps: mime-types@~2.1.4
+
+1.13.2 / 2015-07-05
+===================
+
+ * deps: iconv-lite@0.4.11
+ * deps: qs@4.0.0
+ - Fix dropping parameters like `hasOwnProperty`
+ - Fix user-visible incompatibilities from 3.1.0
+ - Fix various parsing edge cases
+ * deps: raw-body@~2.1.2
+ - Fix error stack traces to skip `makeError`
+ - deps: iconv-lite@0.4.11
+ * deps: type-is@~1.6.4
+ - deps: mime-types@~2.1.2
+ - perf: enable strict mode
+ - perf: remove argument reassignment
+
+1.13.1 / 2015-06-16
+===================
+
+ * deps: qs@2.4.2
+ - Downgraded from 3.1.0 because of user-visible incompatibilities
+
+1.13.0 / 2015-06-14
+===================
+
+ * Add `statusCode` property on `Error`s, in addition to `status`
+ * Change `type` default to `application/json` for JSON parser
+ * Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser
+ * Provide static `require` analysis
+ * Use the `http-errors` module to generate errors
+ * deps: bytes@2.1.0
+ - Slight optimizations
+ * deps: iconv-lite@0.4.10
+ - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails
+ - Leading BOM is now removed when decoding
+ * deps: on-finished@~2.3.0
+ - Add defined behavior for HTTP `CONNECT` requests
+ - Add defined behavior for HTTP `Upgrade` requests
+ - deps: ee-first@1.1.1
+ * deps: qs@3.1.0
+ - Fix dropping parameters like `hasOwnProperty`
+ - Fix various parsing edge cases
+ - Parsed object now has `null` prototype
+ * deps: raw-body@~2.1.1
+ - Use `unpipe` module for unpiping requests
+ - deps: iconv-lite@0.4.10
+ * deps: type-is@~1.6.3
+ - deps: mime-types@~2.1.1
+ - perf: reduce try block size
+ - perf: remove bitwise operations
+ * perf: enable strict mode
+ * perf: remove argument reassignment
+ * perf: remove delete call
+
+1.12.4 / 2015-05-10
+===================
+
+ * deps: debug@~2.2.0
+ * deps: qs@2.4.2
+ - Fix allowing parameters like `constructor`
+ * deps: on-finished@~2.2.1
+ * deps: raw-body@~2.0.1
+ - Fix a false-positive when unpiping in Node.js 0.8
+ - deps: bytes@2.0.1
+ * deps: type-is@~1.6.2
+ - deps: mime-types@~2.0.11
+
+1.12.3 / 2015-04-15
+===================
+
+ * Slight efficiency improvement when not debugging
+ * deps: depd@~1.0.1
+ * deps: iconv-lite@0.4.8
+ - Add encoding alias UNICODE-1-1-UTF-7
+ * deps: raw-body@1.3.4
+ - Fix hanging callback if request aborts during read
+ - deps: iconv-lite@0.4.8
+
+1.12.2 / 2015-03-16
+===================
+
+ * deps: qs@2.4.1
+ - Fix error when parameter `hasOwnProperty` is present
+
+1.12.1 / 2015-03-15
+===================
+
+ * deps: debug@~2.1.3
+ - Fix high intensity foreground color for bold
+ - deps: ms@0.7.0
+ * deps: type-is@~1.6.1
+ - deps: mime-types@~2.0.10
+
+1.12.0 / 2015-02-13
+===================
+
+ * add `debug` messages
+ * accept a function for the `type` option
+ * use `content-type` to parse `Content-Type` headers
+ * deps: iconv-lite@0.4.7
+ - Gracefully support enumerables on `Object.prototype`
+ * deps: raw-body@1.3.3
+ - deps: iconv-lite@0.4.7
+ * deps: type-is@~1.6.0
+ - fix argument reassignment
+ - fix false-positives in `hasBody` `Transfer-Encoding` check
+ - support wildcard for both type and subtype (`*/*`)
+ - deps: mime-types@~2.0.9
+
+1.11.0 / 2015-01-30
+===================
+
+ * make internal `extended: true` depth limit infinity
+ * deps: type-is@~1.5.6
+ - deps: mime-types@~2.0.8
+
+1.10.2 / 2015-01-20
+===================
+
+ * deps: iconv-lite@0.4.6
+ - Fix rare aliases of single-byte encodings
+ * deps: raw-body@1.3.2
+ - deps: iconv-lite@0.4.6
+
+1.10.1 / 2015-01-01
+===================
+
+ * deps: on-finished@~2.2.0
+ * deps: type-is@~1.5.5
+ - deps: mime-types@~2.0.7
+
+1.10.0 / 2014-12-02
+===================
+
+ * make internal `extended: true` array limit dynamic
+
+1.9.3 / 2014-11-21
+==================
+
+ * deps: iconv-lite@0.4.5
+ - Fix Windows-31J and X-SJIS encoding support
+ * deps: qs@2.3.3
+ - Fix `arrayLimit` behavior
+ * deps: raw-body@1.3.1
+ - deps: iconv-lite@0.4.5
+ * deps: type-is@~1.5.3
+ - deps: mime-types@~2.0.3
+
+1.9.2 / 2014-10-27
+==================
+
+ * deps: qs@2.3.2
+ - Fix parsing of mixed objects and values
+
+1.9.1 / 2014-10-22
+==================
+
+ * deps: on-finished@~2.1.1
+ - Fix handling of pipelined requests
+ * deps: qs@2.3.0
+ - Fix parsing of mixed implicit and explicit arrays
+ * deps: type-is@~1.5.2
+ - deps: mime-types@~2.0.2
+
+1.9.0 / 2014-09-24
+==================
+
+ * include the charset in "unsupported charset" error message
+ * include the encoding in "unsupported content encoding" error message
+ * deps: depd@~1.0.0
+
+1.8.4 / 2014-09-23
+==================
+
+ * fix content encoding to be case-insensitive
+
+1.8.3 / 2014-09-19
+==================
+
+ * deps: qs@2.2.4
+ - Fix issue with object keys starting with numbers truncated
+
+1.8.2 / 2014-09-15
+==================
+
+ * deps: depd@0.4.5
+
+1.8.1 / 2014-09-07
+==================
+
+ * deps: media-typer@0.3.0
+ * deps: type-is@~1.5.1
+
+1.8.0 / 2014-09-05
+==================
+
+ * make empty-body-handling consistent between chunked requests
+ - empty `json` produces `{}`
+ - empty `raw` produces `new Buffer(0)`
+ - empty `text` produces `''`
+ - empty `urlencoded` produces `{}`
+ * deps: qs@2.2.3
+ - Fix issue where first empty value in array is discarded
+ * deps: type-is@~1.5.0
+ - fix `hasbody` to be true for `content-length: 0`
+
+1.7.0 / 2014-09-01
+==================
+
+ * add `parameterLimit` option to `urlencoded` parser
+ * change `urlencoded` extended array limit to 100
+ * respond with 413 when over `parameterLimit` in `urlencoded`
+
+1.6.7 / 2014-08-29
+==================
+
+ * deps: qs@2.2.2
+ - Remove unnecessary cloning
+
+1.6.6 / 2014-08-27
+==================
+
+ * deps: qs@2.2.0
+ - Array parsing fix
+ - Performance improvements
+
+1.6.5 / 2014-08-16
+==================
+
+ * deps: on-finished@2.1.0
+
+1.6.4 / 2014-08-14
+==================
+
+ * deps: qs@1.2.2
+
+1.6.3 / 2014-08-10
+==================
+
+ * deps: qs@1.2.1
+
+1.6.2 / 2014-08-07
+==================
+
+ * deps: qs@1.2.0
+ - Fix parsing array of objects
+
+1.6.1 / 2014-08-06
+==================
+
+ * deps: qs@1.1.0
+ - Accept urlencoded square brackets
+ - Accept empty values in implicit array notation
+
+1.6.0 / 2014-08-05
+==================
+
+ * deps: qs@1.0.2
+ - Complete rewrite
+ - Limits array length to 20
+ - Limits object depth to 5
+ - Limits parameters to 1,000
+
+1.5.2 / 2014-07-27
+==================
+
+ * deps: depd@0.4.4
+ - Work-around v8 generating empty stack traces
+
+1.5.1 / 2014-07-26
+==================
+
+ * deps: depd@0.4.3
+ - Fix exception when global `Error.stackTraceLimit` is too low
+
+1.5.0 / 2014-07-20
+==================
+
+ * deps: depd@0.4.2
+ - Add `TRACE_DEPRECATION` environment variable
+ - Remove non-standard grey color from color output
+ - Support `--no-deprecation` argument
+ - Support `--trace-deprecation` argument
+ * deps: iconv-lite@0.4.4
+ - Added encoding UTF-7
+ * deps: raw-body@1.3.0
+ - deps: iconv-lite@0.4.4
+ - Added encoding UTF-7
+ - Fix `Cannot switch to old mode now` error on Node.js 0.10+
+ * deps: type-is@~1.3.2
+
+1.4.3 / 2014-06-19
+==================
+
+ * deps: type-is@1.3.1
+ - fix global variable leak
+
+1.4.2 / 2014-06-19
+==================
+
+ * deps: type-is@1.3.0
+ - improve type parsing
+
+1.4.1 / 2014-06-19
+==================
+
+ * fix urlencoded extended deprecation message
+
+1.4.0 / 2014-06-19
+==================
+
+ * add `text` parser
+ * add `raw` parser
+ * check accepted charset in content-type (accepts utf-8)
+ * check accepted encoding in content-encoding (accepts identity)
+ * deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed
+ * deprecate `urlencoded()` without provided `extended` option
+ * lazy-load urlencoded parsers
+ * parsers split into files for reduced mem usage
+ * support gzip and deflate bodies
+ - set `inflate: false` to turn off
+ * deps: raw-body@1.2.2
+ - Support all encodings from `iconv-lite`
+
+1.3.1 / 2014-06-11
+==================
+
+ * deps: type-is@1.2.1
+ - Switch dependency from mime to mime-types@1.0.0
+
+1.3.0 / 2014-05-31
+==================
+
+ * add `extended` option to urlencoded parser
+
+1.2.2 / 2014-05-27
+==================
+
+ * deps: raw-body@1.1.6
+ - assert stream encoding on node.js 0.8
+ - assert stream encoding on node.js < 0.10.6
+ - deps: bytes@1
+
+1.2.1 / 2014-05-26
+==================
+
+ * invoke `next(err)` after request fully read
+ - prevents hung responses and socket hang ups
+
+1.2.0 / 2014-05-11
+==================
+
+ * add `verify` option
+ * deps: type-is@1.2.0
+ - support suffix matching
+
+1.1.2 / 2014-05-11
+==================
+
+ * improve json parser speed
+
+1.1.1 / 2014-05-11
+==================
+
+ * fix repeated limit parsing with every request
+
+1.1.0 / 2014-05-10
+==================
+
+ * add `type` option
+ * deps: pin for safety and consistency
+
+1.0.2 / 2014-04-14
+==================
+
+ * use `type-is` module
+
+1.0.1 / 2014-03-20
+==================
+
+ * lower default limits to 100kb
diff --git a/node_modules/body-parser/LICENSE b/node_modules/body-parser/LICENSE
new file mode 100644
index 00000000..386b7b69
--- /dev/null
+++ b/node_modules/body-parser/LICENSE
@@ -0,0 +1,23 @@
+(The MIT License)
+
+Copyright (c) 2014 Jonathan Ong
+Copyright (c) 2014-2015 Douglas Christopher Wilson
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/body-parser/README.md b/node_modules/body-parser/README.md
new file mode 100644
index 00000000..38553bf7
--- /dev/null
+++ b/node_modules/body-parser/README.md
@@ -0,0 +1,465 @@
+# body-parser
+
+[![NPM Version][npm-version-image]][npm-url]
+[![NPM Downloads][npm-downloads-image]][npm-url]
+[![Build Status][ci-image]][ci-url]
+[![Test Coverage][coveralls-image]][coveralls-url]
+
+Node.js body parsing middleware.
+
+Parse incoming request bodies in a middleware before your handlers, available
+under the `req.body` property.
+
+**Note** As `req.body`'s shape is based on user-controlled input, all
+properties and values in this object are untrusted and should be validated
+before trusting. For example, `req.body.foo.toString()` may fail in multiple
+ways, for example the `foo` property may not be there or may not be a string,
+and `toString` may not be a function and instead a string or other user input.
+
+[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/).
+
+_This does not handle multipart bodies_, due to their complex and typically
+large nature. For multipart bodies, you may be interested in the following
+modules:
+
+ * [busboy](https://www.npmjs.org/package/busboy#readme) and
+ [connect-busboy](https://www.npmjs.org/package/connect-busboy#readme)
+ * [multiparty](https://www.npmjs.org/package/multiparty#readme) and
+ [connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme)
+ * [formidable](https://www.npmjs.org/package/formidable#readme)
+ * [multer](https://www.npmjs.org/package/multer#readme)
+
+This module provides the following parsers:
+
+ * [JSON body parser](#bodyparserjsonoptions)
+ * [Raw body parser](#bodyparserrawoptions)
+ * [Text body parser](#bodyparsertextoptions)
+ * [URL-encoded form body parser](#bodyparserurlencodedoptions)
+
+Other body parsers you might be interested in:
+
+- [body](https://www.npmjs.org/package/body#readme)
+- [co-body](https://www.npmjs.org/package/co-body#readme)
+
+## Installation
+
+```sh
+$ npm install body-parser
+```
+
+## API
+
+```js
+var bodyParser = require('body-parser')
+```
+
+The `bodyParser` object exposes various factories to create middlewares. All
+middlewares will populate the `req.body` property with the parsed body when
+the `Content-Type` request header matches the `type` option, or an empty
+object (`{}`) if there was no body to parse, the `Content-Type` was not matched,
+or an error occurred.
+
+The various errors returned by this module are described in the
+[errors section](#errors).
+
+### bodyParser.json([options])
+
+Returns middleware that only parses `json` and only looks at requests where
+the `Content-Type` header matches the `type` option. This parser accepts any
+Unicode encoding of the body and supports automatic inflation of `gzip` and
+`deflate` encodings.
+
+A new `body` object containing the parsed data is populated on the `request`
+object after the middleware (i.e. `req.body`).
+
+#### Options
+
+The `json` function takes an optional `options` object that may contain any of
+the following keys:
+
+##### inflate
+
+When set to `true`, then deflated (compressed) bodies will be inflated; when
+`false`, deflated bodies are rejected. Defaults to `true`.
+
+##### limit
+
+Controls the maximum request body size. If this is a number, then the value
+specifies the number of bytes; if it is a string, the value is passed to the
+[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
+to `'100kb'`.
+
+##### reviver
+
+The `reviver` option is passed directly to `JSON.parse` as the second
+argument. You can find more information on this argument
+[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter).
+
+##### strict
+
+When set to `true`, will only accept arrays and objects; when `false` will
+accept anything `JSON.parse` accepts. Defaults to `true`.
+
+##### type
+
+The `type` option is used to determine what media type the middleware will
+parse. This option can be a string, array of strings, or a function. If not a
+function, `type` option is passed directly to the
+[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
+be an extension name (like `json`), a mime type (like `application/json`), or
+a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type`
+option is called as `fn(req)` and the request is parsed if it returns a truthy
+value. Defaults to `application/json`.
+
+##### verify
+
+The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
+where `buf` is a `Buffer` of the raw request body and `encoding` is the
+encoding of the request. The parsing can be aborted by throwing an error.
+
+### bodyParser.raw([options])
+
+Returns middleware that parses all bodies as a `Buffer` and only looks at
+requests where the `Content-Type` header matches the `type` option. This
+parser supports automatic inflation of `gzip` and `deflate` encodings.
+
+A new `body` object containing the parsed data is populated on the `request`
+object after the middleware (i.e. `req.body`). This will be a `Buffer` object
+of the body.
+
+#### Options
+
+The `raw` function takes an optional `options` object that may contain any of
+the following keys:
+
+##### inflate
+
+When set to `true`, then deflated (compressed) bodies will be inflated; when
+`false`, deflated bodies are rejected. Defaults to `true`.
+
+##### limit
+
+Controls the maximum request body size. If this is a number, then the value
+specifies the number of bytes; if it is a string, the value is passed to the
+[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
+to `'100kb'`.
+
+##### type
+
+The `type` option is used to determine what media type the middleware will
+parse. This option can be a string, array of strings, or a function.
+If not a function, `type` option is passed directly to the
+[type-is](https://www.npmjs.org/package/type-is#readme) library and this
+can be an extension name (like `bin`), a mime type (like
+`application/octet-stream`), or a mime type with a wildcard (like `*/*` or
+`application/*`). If a function, the `type` option is called as `fn(req)`
+and the request is parsed if it returns a truthy value. Defaults to
+`application/octet-stream`.
+
+##### verify
+
+The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
+where `buf` is a `Buffer` of the raw request body and `encoding` is the
+encoding of the request. The parsing can be aborted by throwing an error.
+
+### bodyParser.text([options])
+
+Returns middleware that parses all bodies as a string and only looks at
+requests where the `Content-Type` header matches the `type` option. This
+parser supports automatic inflation of `gzip` and `deflate` encodings.
+
+A new `body` string containing the parsed data is populated on the `request`
+object after the middleware (i.e. `req.body`). This will be a string of the
+body.
+
+#### Options
+
+The `text` function takes an optional `options` object that may contain any of
+the following keys:
+
+##### defaultCharset
+
+Specify the default character set for the text content if the charset is not
+specified in the `Content-Type` header of the request. Defaults to `utf-8`.
+
+##### inflate
+
+When set to `true`, then deflated (compressed) bodies will be inflated; when
+`false`, deflated bodies are rejected. Defaults to `true`.
+
+##### limit
+
+Controls the maximum request body size. If this is a number, then the value
+specifies the number of bytes; if it is a string, the value is passed to the
+[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
+to `'100kb'`.
+
+##### type
+
+The `type` option is used to determine what media type the middleware will
+parse. This option can be a string, array of strings, or a function. If not
+a function, `type` option is passed directly to the
+[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
+be an extension name (like `txt`), a mime type (like `text/plain`), or a mime
+type with a wildcard (like `*/*` or `text/*`). If a function, the `type`
+option is called as `fn(req)` and the request is parsed if it returns a
+truthy value. Defaults to `text/plain`.
+
+##### verify
+
+The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
+where `buf` is a `Buffer` of the raw request body and `encoding` is the
+encoding of the request. The parsing can be aborted by throwing an error.
+
+### bodyParser.urlencoded([options])
+
+Returns middleware that only parses `urlencoded` bodies and only looks at
+requests where the `Content-Type` header matches the `type` option. This
+parser accepts only UTF-8 encoding of the body and supports automatic
+inflation of `gzip` and `deflate` encodings.
+
+A new `body` object containing the parsed data is populated on the `request`
+object after the middleware (i.e. `req.body`). This object will contain
+key-value pairs, where the value can be a string or array (when `extended` is
+`false`), or any type (when `extended` is `true`).
+
+#### Options
+
+The `urlencoded` function takes an optional `options` object that may contain
+any of the following keys:
+
+##### extended
+
+The `extended` option allows to choose between parsing the URL-encoded data
+with the `querystring` library (when `false`) or the `qs` library (when
+`true`). The "extended" syntax allows for rich objects and arrays to be
+encoded into the URL-encoded format, allowing for a JSON-like experience
+with URL-encoded. For more information, please
+[see the qs library](https://www.npmjs.org/package/qs#readme).
+
+Defaults to `true`, but using the default has been deprecated. Please
+research into the difference between `qs` and `querystring` and choose the
+appropriate setting.
+
+##### inflate
+
+When set to `true`, then deflated (compressed) bodies will be inflated; when
+`false`, deflated bodies are rejected. Defaults to `true`.
+
+##### limit
+
+Controls the maximum request body size. If this is a number, then the value
+specifies the number of bytes; if it is a string, the value is passed to the
+[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
+to `'100kb'`.
+
+##### parameterLimit
+
+The `parameterLimit` option controls the maximum number of parameters that
+are allowed in the URL-encoded data. If a request contains more parameters
+than this value, a 413 will be returned to the client. Defaults to `1000`.
+
+##### type
+
+The `type` option is used to determine what media type the middleware will
+parse. This option can be a string, array of strings, or a function. If not
+a function, `type` option is passed directly to the
+[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
+be an extension name (like `urlencoded`), a mime type (like
+`application/x-www-form-urlencoded`), or a mime type with a wildcard (like
+`*/x-www-form-urlencoded`). If a function, the `type` option is called as
+`fn(req)` and the request is parsed if it returns a truthy value. Defaults
+to `application/x-www-form-urlencoded`.
+
+##### verify
+
+The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
+where `buf` is a `Buffer` of the raw request body and `encoding` is the
+encoding of the request. The parsing can be aborted by throwing an error.
+
+## Errors
+
+The middlewares provided by this module create errors using the
+[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors
+will typically have a `status`/`statusCode` property that contains the suggested
+HTTP response code, an `expose` property to determine if the `message` property
+should be displayed to the client, a `type` property to determine the type of
+error without matching against the `message`, and a `body` property containing
+the read body, if available.
+
+The following are the common errors created, though any error can come through
+for various reasons.
+
+### content encoding unsupported
+
+This error will occur when the request had a `Content-Encoding` header that
+contained an encoding but the "inflation" option was set to `false`. The
+`status` property is set to `415`, the `type` property is set to
+`'encoding.unsupported'`, and the `charset` property will be set to the
+encoding that is unsupported.
+
+### entity parse failed
+
+This error will occur when the request contained an entity that could not be
+parsed by the middleware. The `status` property is set to `400`, the `type`
+property is set to `'entity.parse.failed'`, and the `body` property is set to
+the entity value that failed parsing.
+
+### entity verify failed
+
+This error will occur when the request contained an entity that could not be
+failed verification by the defined `verify` option. The `status` property is
+set to `403`, the `type` property is set to `'entity.verify.failed'`, and the
+`body` property is set to the entity value that failed verification.
+
+### request aborted
+
+This error will occur when the request is aborted by the client before reading
+the body has finished. The `received` property will be set to the number of
+bytes received before the request was aborted and the `expected` property is
+set to the number of expected bytes. The `status` property is set to `400`
+and `type` property is set to `'request.aborted'`.
+
+### request entity too large
+
+This error will occur when the request body's size is larger than the "limit"
+option. The `limit` property will be set to the byte limit and the `length`
+property will be set to the request body's length. The `status` property is
+set to `413` and the `type` property is set to `'entity.too.large'`.
+
+### request size did not match content length
+
+This error will occur when the request's length did not match the length from
+the `Content-Length` header. This typically occurs when the request is malformed,
+typically when the `Content-Length` header was calculated based on characters
+instead of bytes. The `status` property is set to `400` and the `type` property
+is set to `'request.size.invalid'`.
+
+### stream encoding should not be set
+
+This error will occur when something called the `req.setEncoding` method prior
+to this middleware. This module operates directly on bytes only and you cannot
+call `req.setEncoding` when using this module. The `status` property is set to
+`500` and the `type` property is set to `'stream.encoding.set'`.
+
+### stream is not readable
+
+This error will occur when the request is no longer readable when this middleware
+attempts to read it. This typically means something other than a middleware from
+this module read the request body already and the middleware was also configured to
+read the same request. The `status` property is set to `500` and the `type`
+property is set to `'stream.not.readable'`.
+
+### too many parameters
+
+This error will occur when the content of the request exceeds the configured
+`parameterLimit` for the `urlencoded` parser. The `status` property is set to
+`413` and the `type` property is set to `'parameters.too.many'`.
+
+### unsupported charset "BOGUS"
+
+This error will occur when the request had a charset parameter in the
+`Content-Type` header, but the `iconv-lite` module does not support it OR the
+parser does not support it. The charset is contained in the message as well
+as in the `charset` property. The `status` property is set to `415`, the
+`type` property is set to `'charset.unsupported'`, and the `charset` property
+is set to the charset that is unsupported.
+
+### unsupported content encoding "bogus"
+
+This error will occur when the request had a `Content-Encoding` header that
+contained an unsupported encoding. The encoding is contained in the message
+as well as in the `encoding` property. The `status` property is set to `415`,
+the `type` property is set to `'encoding.unsupported'`, and the `encoding`
+property is set to the encoding that is unsupported.
+
+## Examples
+
+### Express/Connect top-level generic
+
+This example demonstrates adding a generic JSON and URL-encoded parser as a
+top-level middleware, which will parse the bodies of all incoming requests.
+This is the simplest setup.
+
+```js
+var express = require('express')
+var bodyParser = require('body-parser')
+
+var app = express()
+
+// parse application/x-www-form-urlencoded
+app.use(bodyParser.urlencoded({ extended: false }))
+
+// parse application/json
+app.use(bodyParser.json())
+
+app.use(function (req, res) {
+ res.setHeader('Content-Type', 'text/plain')
+ res.write('you posted:\n')
+ res.end(JSON.stringify(req.body, null, 2))
+})
+```
+
+### Express route-specific
+
+This example demonstrates adding body parsers specifically to the routes that
+need them. In general, this is the most recommended way to use body-parser with
+Express.
+
+```js
+var express = require('express')
+var bodyParser = require('body-parser')
+
+var app = express()
+
+// create application/json parser
+var jsonParser = bodyParser.json()
+
+// create application/x-www-form-urlencoded parser
+var urlencodedParser = bodyParser.urlencoded({ extended: false })
+
+// POST /login gets urlencoded bodies
+app.post('/login', urlencodedParser, function (req, res) {
+ res.send('welcome, ' + req.body.username)
+})
+
+// POST /api/users gets JSON bodies
+app.post('/api/users', jsonParser, function (req, res) {
+ // create user in req.body
+})
+```
+
+### Change accepted type for parsers
+
+All the parsers accept a `type` option which allows you to change the
+`Content-Type` that the middleware will parse.
+
+```js
+var express = require('express')
+var bodyParser = require('body-parser')
+
+var app = express()
+
+// parse various different custom JSON types as JSON
+app.use(bodyParser.json({ type: 'application/*+json' }))
+
+// parse some custom thing into a Buffer
+app.use(bodyParser.raw({ type: 'application/vnd.custom-type' }))
+
+// parse an HTML body into a string
+app.use(bodyParser.text({ type: 'text/html' }))
+```
+
+## License
+
+[MIT](LICENSE)
+
+[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci
+[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml
+[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master
+[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master
+[node-version-image]: https://badgen.net/npm/node/body-parser
+[node-version-url]: https://nodejs.org/en/download
+[npm-downloads-image]: https://badgen.net/npm/dm/body-parser
+[npm-url]: https://npmjs.org/package/body-parser
+[npm-version-image]: https://badgen.net/npm/v/body-parser
diff --git a/node_modules/body-parser/SECURITY.md b/node_modules/body-parser/SECURITY.md
new file mode 100644
index 00000000..9694d429
--- /dev/null
+++ b/node_modules/body-parser/SECURITY.md
@@ -0,0 +1,25 @@
+# Security Policies and Procedures
+
+## Reporting a Bug
+
+The Express team and community take all security bugs seriously. Thank you
+for improving the security of Express. We appreciate your efforts and
+responsible disclosure and will make every effort to acknowledge your
+contributions.
+
+Report security bugs by emailing the current owner(s) of `body-parser`. This
+information can be found in the npm registry using the command
+`npm owner ls body-parser`.
+If unsure or unable to get the information from the above, open an issue
+in the [project issue tracker](https://github.com/expressjs/body-parser/issues)
+asking for the current contact information.
+
+To ensure the timely response to your report, please ensure that the entirety
+of the report is contained within the email body and not solely behind a web
+link or an attachment.
+
+At least one owner will acknowledge your email within 48 hours, and will send a
+more detailed response within 48 hours indicating the next steps in handling
+your report. After the initial reply to your report, the owners will
+endeavor to keep you informed of the progress towards a fix and full
+announcement, and may ask for additional information or guidance.
diff --git a/node_modules/body-parser/index.js b/node_modules/body-parser/index.js
new file mode 100644
index 00000000..bb24d739
--- /dev/null
+++ b/node_modules/body-parser/index.js
@@ -0,0 +1,156 @@
+/*!
+ * body-parser
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict'
+
+/**
+ * Module dependencies.
+ * @private
+ */
+
+var deprecate = require('depd')('body-parser')
+
+/**
+ * Cache of loaded parsers.
+ * @private
+ */
+
+var parsers = Object.create(null)
+
+/**
+ * @typedef Parsers
+ * @type {function}
+ * @property {function} json
+ * @property {function} raw
+ * @property {function} text
+ * @property {function} urlencoded
+ */
+
+/**
+ * Module exports.
+ * @type {Parsers}
+ */
+
+exports = module.exports = deprecate.function(bodyParser,
+ 'bodyParser: use individual json/urlencoded middlewares')
+
+/**
+ * JSON parser.
+ * @public
+ */
+
+Object.defineProperty(exports, 'json', {
+ configurable: true,
+ enumerable: true,
+ get: createParserGetter('json')
+})
+
+/**
+ * Raw parser.
+ * @public
+ */
+
+Object.defineProperty(exports, 'raw', {
+ configurable: true,
+ enumerable: true,
+ get: createParserGetter('raw')
+})
+
+/**
+ * Text parser.
+ * @public
+ */
+
+Object.defineProperty(exports, 'text', {
+ configurable: true,
+ enumerable: true,
+ get: createParserGetter('text')
+})
+
+/**
+ * URL-encoded parser.
+ * @public
+ */
+
+Object.defineProperty(exports, 'urlencoded', {
+ configurable: true,
+ enumerable: true,
+ get: createParserGetter('urlencoded')
+})
+
+/**
+ * Create a middleware to parse json and urlencoded bodies.
+ *
+ * @param {object} [options]
+ * @return {function}
+ * @deprecated
+ * @public
+ */
+
+function bodyParser (options) {
+ // use default type for parsers
+ var opts = Object.create(options || null, {
+ type: {
+ configurable: true,
+ enumerable: true,
+ value: undefined,
+ writable: true
+ }
+ })
+
+ var _urlencoded = exports.urlencoded(opts)
+ var _json = exports.json(opts)
+
+ return function bodyParser (req, res, next) {
+ _json(req, res, function (err) {
+ if (err) return next(err)
+ _urlencoded(req, res, next)
+ })
+ }
+}
+
+/**
+ * Create a getter for loading a parser.
+ * @private
+ */
+
+function createParserGetter (name) {
+ return function get () {
+ return loadParser(name)
+ }
+}
+
+/**
+ * Load a parser module.
+ * @private
+ */
+
+function loadParser (parserName) {
+ var parser = parsers[parserName]
+
+ if (parser !== undefined) {
+ return parser
+ }
+
+ // this uses a switch for static require analysis
+ switch (parserName) {
+ case 'json':
+ parser = require('./lib/types/json')
+ break
+ case 'raw':
+ parser = require('./lib/types/raw')
+ break
+ case 'text':
+ parser = require('./lib/types/text')
+ break
+ case 'urlencoded':
+ parser = require('./lib/types/urlencoded')
+ break
+ }
+
+ // store to prevent invoking require()
+ return (parsers[parserName] = parser)
+}
diff --git a/node_modules/body-parser/lib/read.js b/node_modules/body-parser/lib/read.js
new file mode 100644
index 00000000..fce6283f
--- /dev/null
+++ b/node_modules/body-parser/lib/read.js
@@ -0,0 +1,205 @@
+/*!
+ * body-parser
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict'
+
+/**
+ * Module dependencies.
+ * @private
+ */
+
+var createError = require('http-errors')
+var destroy = require('destroy')
+var getBody = require('raw-body')
+var iconv = require('iconv-lite')
+var onFinished = require('on-finished')
+var unpipe = require('unpipe')
+var zlib = require('zlib')
+
+/**
+ * Module exports.
+ */
+
+module.exports = read
+
+/**
+ * Read a request into a buffer and parse.
+ *
+ * @param {object} req
+ * @param {object} res
+ * @param {function} next
+ * @param {function} parse
+ * @param {function} debug
+ * @param {object} options
+ * @private
+ */
+
+function read (req, res, next, parse, debug, options) {
+ var length
+ var opts = options
+ var stream
+
+ // flag as parsed
+ req._body = true
+
+ // read options
+ var encoding = opts.encoding !== null
+ ? opts.encoding
+ : null
+ var verify = opts.verify
+
+ try {
+ // get the content stream
+ stream = contentstream(req, debug, opts.inflate)
+ length = stream.length
+ stream.length = undefined
+ } catch (err) {
+ return next(err)
+ }
+
+ // set raw-body options
+ opts.length = length
+ opts.encoding = verify
+ ? null
+ : encoding
+
+ // assert charset is supported
+ if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) {
+ return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
+ charset: encoding.toLowerCase(),
+ type: 'charset.unsupported'
+ }))
+ }
+
+ // read body
+ debug('read body')
+ getBody(stream, opts, function (error, body) {
+ if (error) {
+ var _error
+
+ if (error.type === 'encoding.unsupported') {
+ // echo back charset
+ _error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
+ charset: encoding.toLowerCase(),
+ type: 'charset.unsupported'
+ })
+ } else {
+ // set status code on error
+ _error = createError(400, error)
+ }
+
+ // unpipe from stream and destroy
+ if (stream !== req) {
+ unpipe(req)
+ destroy(stream, true)
+ }
+
+ // read off entire request
+ dump(req, function onfinished () {
+ next(createError(400, _error))
+ })
+ return
+ }
+
+ // verify
+ if (verify) {
+ try {
+ debug('verify body')
+ verify(req, res, body, encoding)
+ } catch (err) {
+ next(createError(403, err, {
+ body: body,
+ type: err.type || 'entity.verify.failed'
+ }))
+ return
+ }
+ }
+
+ // parse
+ var str = body
+ try {
+ debug('parse body')
+ str = typeof body !== 'string' && encoding !== null
+ ? iconv.decode(body, encoding)
+ : body
+ req.body = parse(str)
+ } catch (err) {
+ next(createError(400, err, {
+ body: str,
+ type: err.type || 'entity.parse.failed'
+ }))
+ return
+ }
+
+ next()
+ })
+}
+
+/**
+ * Get the content stream of the request.
+ *
+ * @param {object} req
+ * @param {function} debug
+ * @param {boolean} [inflate=true]
+ * @return {object}
+ * @api private
+ */
+
+function contentstream (req, debug, inflate) {
+ var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase()
+ var length = req.headers['content-length']
+ var stream
+
+ debug('content-encoding "%s"', encoding)
+
+ if (inflate === false && encoding !== 'identity') {
+ throw createError(415, 'content encoding unsupported', {
+ encoding: encoding,
+ type: 'encoding.unsupported'
+ })
+ }
+
+ switch (encoding) {
+ case 'deflate':
+ stream = zlib.createInflate()
+ debug('inflate body')
+ req.pipe(stream)
+ break
+ case 'gzip':
+ stream = zlib.createGunzip()
+ debug('gunzip body')
+ req.pipe(stream)
+ break
+ case 'identity':
+ stream = req
+ stream.length = length
+ break
+ default:
+ throw createError(415, 'unsupported content encoding "' + encoding + '"', {
+ encoding: encoding,
+ type: 'encoding.unsupported'
+ })
+ }
+
+ return stream
+}
+
+/**
+ * Dump the contents of a request.
+ *
+ * @param {object} req
+ * @param {function} callback
+ * @api private
+ */
+
+function dump (req, callback) {
+ if (onFinished.isFinished(req)) {
+ callback(null)
+ } else {
+ onFinished(req, callback)
+ req.resume()
+ }
+}
diff --git a/node_modules/body-parser/lib/types/json.js b/node_modules/body-parser/lib/types/json.js
new file mode 100644
index 00000000..59f3f7e2
--- /dev/null
+++ b/node_modules/body-parser/lib/types/json.js
@@ -0,0 +1,247 @@
+/*!
+ * body-parser
+ * Copyright(c) 2014 Jonathan Ong
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict'
+
+/**
+ * Module dependencies.
+ * @private
+ */
+
+var bytes = require('bytes')
+var contentType = require('content-type')
+var createError = require('http-errors')
+var debug = require('debug')('body-parser:json')
+var read = require('../read')
+var typeis = require('type-is')
+
+/**
+ * Module exports.
+ */
+
+module.exports = json
+
+/**
+ * RegExp to match the first non-space in a string.
+ *
+ * Allowed whitespace is defined in RFC 7159:
+ *
+ * ws = *(
+ * %x20 / ; Space
+ * %x09 / ; Horizontal tab
+ * %x0A / ; Line feed or New line
+ * %x0D ) ; Carriage return
+ */
+
+var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex
+
+var JSON_SYNTAX_CHAR = '#'
+var JSON_SYNTAX_REGEXP = /#+/g
+
+/**
+ * Create a middleware to parse JSON bodies.
+ *
+ * @param {object} [options]
+ * @return {function}
+ * @public
+ */
+
+function json (options) {
+ var opts = options || {}
+
+ var limit = typeof opts.limit !== 'number'
+ ? bytes.parse(opts.limit || '100kb')
+ : opts.limit
+ var inflate = opts.inflate !== false
+ var reviver = opts.reviver
+ var strict = opts.strict !== false
+ var type = opts.type || 'application/json'
+ var verify = opts.verify || false
+
+ if (verify !== false && typeof verify !== 'function') {
+ throw new TypeError('option verify must be function')
+ }
+
+ // create the appropriate type checking function
+ var shouldParse = typeof type !== 'function'
+ ? typeChecker(type)
+ : type
+
+ function parse (body) {
+ if (body.length === 0) {
+ // special-case empty json body, as it's a common client-side mistake
+ // TODO: maybe make this configurable or part of "strict" option
+ return {}
+ }
+
+ if (strict) {
+ var first = firstchar(body)
+
+ if (first !== '{' && first !== '[') {
+ debug('strict violation')
+ throw createStrictSyntaxError(body, first)
+ }
+ }
+
+ try {
+ debug('parse json')
+ return JSON.parse(body, reviver)
+ } catch (e) {
+ throw normalizeJsonSyntaxError(e, {
+ message: e.message,
+ stack: e.stack
+ })
+ }
+ }
+
+ return function jsonParser (req, res, next) {
+ if (req._body) {
+ debug('body already parsed')
+ next()
+ return
+ }
+
+ req.body = req.body || {}
+
+ // skip requests without bodies
+ if (!typeis.hasBody(req)) {
+ debug('skip empty body')
+ next()
+ return
+ }
+
+ debug('content-type %j', req.headers['content-type'])
+
+ // determine if request should be parsed
+ if (!shouldParse(req)) {
+ debug('skip parsing')
+ next()
+ return
+ }
+
+ // assert charset per RFC 7159 sec 8.1
+ var charset = getCharset(req) || 'utf-8'
+ if (charset.slice(0, 4) !== 'utf-') {
+ debug('invalid charset')
+ next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
+ charset: charset,
+ type: 'charset.unsupported'
+ }))
+ return
+ }
+
+ // read
+ read(req, res, next, parse, debug, {
+ encoding: charset,
+ inflate: inflate,
+ limit: limit,
+ verify: verify
+ })
+ }
+}
+
+/**
+ * Create strict violation syntax error matching native error.
+ *
+ * @param {string} str
+ * @param {string} char
+ * @return {Error}
+ * @private
+ */
+
+function createStrictSyntaxError (str, char) {
+ var index = str.indexOf(char)
+ var partial = ''
+
+ if (index !== -1) {
+ partial = str.substring(0, index) + JSON_SYNTAX_CHAR
+
+ for (var i = index + 1; i < str.length; i++) {
+ partial += JSON_SYNTAX_CHAR
+ }
+ }
+
+ try {
+ JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation')
+ } catch (e) {
+ return normalizeJsonSyntaxError(e, {
+ message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) {
+ return str.substring(index, index + placeholder.length)
+ }),
+ stack: e.stack
+ })
+ }
+}
+
+/**
+ * Get the first non-whitespace character in a string.
+ *
+ * @param {string} str
+ * @return {function}
+ * @private
+ */
+
+function firstchar (str) {
+ var match = FIRST_CHAR_REGEXP.exec(str)
+
+ return match
+ ? match[1]
+ : undefined
+}
+
+/**
+ * Get the charset of a request.
+ *
+ * @param {object} req
+ * @api private
+ */
+
+function getCharset (req) {
+ try {
+ return (contentType.parse(req).parameters.charset || '').toLowerCase()
+ } catch (e) {
+ return undefined
+ }
+}
+
+/**
+ * Normalize a SyntaxError for JSON.parse.
+ *
+ * @param {SyntaxError} error
+ * @param {object} obj
+ * @return {SyntaxError}
+ */
+
+function normalizeJsonSyntaxError (error, obj) {
+ var keys = Object.getOwnPropertyNames(error)
+
+ for (var i = 0; i < keys.length; i++) {
+ var key = keys[i]
+ if (key !== 'stack' && key !== 'message') {
+ delete error[key]
+ }
+ }
+
+ // replace stack before message for Node.js 0.10 and below
+ error.stack = obj.stack.replace(error.message, obj.message)
+ error.message = obj.message
+
+ return error
+}
+
+/**
+ * Get the simple type checker.
+ *
+ * @param {string} type
+ * @return {function}
+ */
+
+function typeChecker (type) {
+ return function checkType (req) {
+ return Boolean(typeis(req, type))
+ }
+}
diff --git a/node_modules/body-parser/lib/types/raw.js b/node_modules/body-parser/lib/types/raw.js
new file mode 100644
index 00000000..f5d1b674
--- /dev/null
+++ b/node_modules/body-parser/lib/types/raw.js
@@ -0,0 +1,101 @@
+/*!
+ * body-parser
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict'
+
+/**
+ * Module dependencies.
+ */
+
+var bytes = require('bytes')
+var debug = require('debug')('body-parser:raw')
+var read = require('../read')
+var typeis = require('type-is')
+
+/**
+ * Module exports.
+ */
+
+module.exports = raw
+
+/**
+ * Create a middleware to parse raw bodies.
+ *
+ * @param {object} [options]
+ * @return {function}
+ * @api public
+ */
+
+function raw (options) {
+ var opts = options || {}
+
+ var inflate = opts.inflate !== false
+ var limit = typeof opts.limit !== 'number'
+ ? bytes.parse(opts.limit || '100kb')
+ : opts.limit
+ var type = opts.type || 'application/octet-stream'
+ var verify = opts.verify || false
+
+ if (verify !== false && typeof verify !== 'function') {
+ throw new TypeError('option verify must be function')
+ }
+
+ // create the appropriate type checking function
+ var shouldParse = typeof type !== 'function'
+ ? typeChecker(type)
+ : type
+
+ function parse (buf) {
+ return buf
+ }
+
+ return function rawParser (req, res, next) {
+ if (req._body) {
+ debug('body already parsed')
+ next()
+ return
+ }
+
+ req.body = req.body || {}
+
+ // skip requests without bodies
+ if (!typeis.hasBody(req)) {
+ debug('skip empty body')
+ next()
+ return
+ }
+
+ debug('content-type %j', req.headers['content-type'])
+
+ // determine if request should be parsed
+ if (!shouldParse(req)) {
+ debug('skip parsing')
+ next()
+ return
+ }
+
+ // read
+ read(req, res, next, parse, debug, {
+ encoding: null,
+ inflate: inflate,
+ limit: limit,
+ verify: verify
+ })
+ }
+}
+
+/**
+ * Get the simple type checker.
+ *
+ * @param {string} type
+ * @return {function}
+ */
+
+function typeChecker (type) {
+ return function checkType (req) {
+ return Boolean(typeis(req, type))
+ }
+}
diff --git a/node_modules/body-parser/lib/types/text.js b/node_modules/body-parser/lib/types/text.js
new file mode 100644
index 00000000..083a0090
--- /dev/null
+++ b/node_modules/body-parser/lib/types/text.js
@@ -0,0 +1,121 @@
+/*!
+ * body-parser
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict'
+
+/**
+ * Module dependencies.
+ */
+
+var bytes = require('bytes')
+var contentType = require('content-type')
+var debug = require('debug')('body-parser:text')
+var read = require('../read')
+var typeis = require('type-is')
+
+/**
+ * Module exports.
+ */
+
+module.exports = text
+
+/**
+ * Create a middleware to parse text bodies.
+ *
+ * @param {object} [options]
+ * @return {function}
+ * @api public
+ */
+
+function text (options) {
+ var opts = options || {}
+
+ var defaultCharset = opts.defaultCharset || 'utf-8'
+ var inflate = opts.inflate !== false
+ var limit = typeof opts.limit !== 'number'
+ ? bytes.parse(opts.limit || '100kb')
+ : opts.limit
+ var type = opts.type || 'text/plain'
+ var verify = opts.verify || false
+
+ if (verify !== false && typeof verify !== 'function') {
+ throw new TypeError('option verify must be function')
+ }
+
+ // create the appropriate type checking function
+ var shouldParse = typeof type !== 'function'
+ ? typeChecker(type)
+ : type
+
+ function parse (buf) {
+ return buf
+ }
+
+ return function textParser (req, res, next) {
+ if (req._body) {
+ debug('body already parsed')
+ next()
+ return
+ }
+
+ req.body = req.body || {}
+
+ // skip requests without bodies
+ if (!typeis.hasBody(req)) {
+ debug('skip empty body')
+ next()
+ return
+ }
+
+ debug('content-type %j', req.headers['content-type'])
+
+ // determine if request should be parsed
+ if (!shouldParse(req)) {
+ debug('skip parsing')
+ next()
+ return
+ }
+
+ // get charset
+ var charset = getCharset(req) || defaultCharset
+
+ // read
+ read(req, res, next, parse, debug, {
+ encoding: charset,
+ inflate: inflate,
+ limit: limit,
+ verify: verify
+ })
+ }
+}
+
+/**
+ * Get the charset of a request.
+ *
+ * @param {object} req
+ * @api private
+ */
+
+function getCharset (req) {
+ try {
+ return (contentType.parse(req).parameters.charset || '').toLowerCase()
+ } catch (e) {
+ return undefined
+ }
+}
+
+/**
+ * Get the simple type checker.
+ *
+ * @param {string} type
+ * @return {function}
+ */
+
+function typeChecker (type) {
+ return function checkType (req) {
+ return Boolean(typeis(req, type))
+ }
+}
diff --git a/node_modules/body-parser/lib/types/urlencoded.js b/node_modules/body-parser/lib/types/urlencoded.js
new file mode 100644
index 00000000..b2ca8f16
--- /dev/null
+++ b/node_modules/body-parser/lib/types/urlencoded.js
@@ -0,0 +1,284 @@
+/*!
+ * body-parser
+ * Copyright(c) 2014 Jonathan Ong
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict'
+
+/**
+ * Module dependencies.
+ * @private
+ */
+
+var bytes = require('bytes')
+var contentType = require('content-type')
+var createError = require('http-errors')
+var debug = require('debug')('body-parser:urlencoded')
+var deprecate = require('depd')('body-parser')
+var read = require('../read')
+var typeis = require('type-is')
+
+/**
+ * Module exports.
+ */
+
+module.exports = urlencoded
+
+/**
+ * Cache of parser modules.
+ */
+
+var parsers = Object.create(null)
+
+/**
+ * Create a middleware to parse urlencoded bodies.
+ *
+ * @param {object} [options]
+ * @return {function}
+ * @public
+ */
+
+function urlencoded (options) {
+ var opts = options || {}
+
+ // notice because option default will flip in next major
+ if (opts.extended === undefined) {
+ deprecate('undefined extended: provide extended option')
+ }
+
+ var extended = opts.extended !== false
+ var inflate = opts.inflate !== false
+ var limit = typeof opts.limit !== 'number'
+ ? bytes.parse(opts.limit || '100kb')
+ : opts.limit
+ var type = opts.type || 'application/x-www-form-urlencoded'
+ var verify = opts.verify || false
+
+ if (verify !== false && typeof verify !== 'function') {
+ throw new TypeError('option verify must be function')
+ }
+
+ // create the appropriate query parser
+ var queryparse = extended
+ ? extendedparser(opts)
+ : simpleparser(opts)
+
+ // create the appropriate type checking function
+ var shouldParse = typeof type !== 'function'
+ ? typeChecker(type)
+ : type
+
+ function parse (body) {
+ return body.length
+ ? queryparse(body)
+ : {}
+ }
+
+ return function urlencodedParser (req, res, next) {
+ if (req._body) {
+ debug('body already parsed')
+ next()
+ return
+ }
+
+ req.body = req.body || {}
+
+ // skip requests without bodies
+ if (!typeis.hasBody(req)) {
+ debug('skip empty body')
+ next()
+ return
+ }
+
+ debug('content-type %j', req.headers['content-type'])
+
+ // determine if request should be parsed
+ if (!shouldParse(req)) {
+ debug('skip parsing')
+ next()
+ return
+ }
+
+ // assert charset
+ var charset = getCharset(req) || 'utf-8'
+ if (charset !== 'utf-8') {
+ debug('invalid charset')
+ next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
+ charset: charset,
+ type: 'charset.unsupported'
+ }))
+ return
+ }
+
+ // read
+ read(req, res, next, parse, debug, {
+ debug: debug,
+ encoding: charset,
+ inflate: inflate,
+ limit: limit,
+ verify: verify
+ })
+ }
+}
+
+/**
+ * Get the extended query parser.
+ *
+ * @param {object} options
+ */
+
+function extendedparser (options) {
+ var parameterLimit = options.parameterLimit !== undefined
+ ? options.parameterLimit
+ : 1000
+ var parse = parser('qs')
+
+ if (isNaN(parameterLimit) || parameterLimit < 1) {
+ throw new TypeError('option parameterLimit must be a positive number')
+ }
+
+ if (isFinite(parameterLimit)) {
+ parameterLimit = parameterLimit | 0
+ }
+
+ return function queryparse (body) {
+ var paramCount = parameterCount(body, parameterLimit)
+
+ if (paramCount === undefined) {
+ debug('too many parameters')
+ throw createError(413, 'too many parameters', {
+ type: 'parameters.too.many'
+ })
+ }
+
+ var arrayLimit = Math.max(100, paramCount)
+
+ debug('parse extended urlencoding')
+ return parse(body, {
+ allowPrototypes: true,
+ arrayLimit: arrayLimit,
+ depth: Infinity,
+ parameterLimit: parameterLimit
+ })
+ }
+}
+
+/**
+ * Get the charset of a request.
+ *
+ * @param {object} req
+ * @api private
+ */
+
+function getCharset (req) {
+ try {
+ return (contentType.parse(req).parameters.charset || '').toLowerCase()
+ } catch (e) {
+ return undefined
+ }
+}
+
+/**
+ * Count the number of parameters, stopping once limit reached
+ *
+ * @param {string} body
+ * @param {number} limit
+ * @api private
+ */
+
+function parameterCount (body, limit) {
+ var count = 0
+ var index = 0
+
+ while ((index = body.indexOf('&', index)) !== -1) {
+ count++
+ index++
+
+ if (count === limit) {
+ return undefined
+ }
+ }
+
+ return count
+}
+
+/**
+ * Get parser for module name dynamically.
+ *
+ * @param {string} name
+ * @return {function}
+ * @api private
+ */
+
+function parser (name) {
+ var mod = parsers[name]
+
+ if (mod !== undefined) {
+ return mod.parse
+ }
+
+ // this uses a switch for static require analysis
+ switch (name) {
+ case 'qs':
+ mod = require('qs')
+ break
+ case 'querystring':
+ mod = require('querystring')
+ break
+ }
+
+ // store to prevent invoking require()
+ parsers[name] = mod
+
+ return mod.parse
+}
+
+/**
+ * Get the simple query parser.
+ *
+ * @param {object} options
+ */
+
+function simpleparser (options) {
+ var parameterLimit = options.parameterLimit !== undefined
+ ? options.parameterLimit
+ : 1000
+ var parse = parser('querystring')
+
+ if (isNaN(parameterLimit) || parameterLimit < 1) {
+ throw new TypeError('option parameterLimit must be a positive number')
+ }
+
+ if (isFinite(parameterLimit)) {
+ parameterLimit = parameterLimit | 0
+ }
+
+ return function queryparse (body) {
+ var paramCount = parameterCount(body, parameterLimit)
+
+ if (paramCount === undefined) {
+ debug('too many parameters')
+ throw createError(413, 'too many parameters', {
+ type: 'parameters.too.many'
+ })
+ }
+
+ debug('parse urlencoding')
+ return parse(body, undefined, undefined, { maxKeys: parameterLimit })
+ }
+}
+
+/**
+ * Get the simple type checker.
+ *
+ * @param {string} type
+ * @return {function}
+ */
+
+function typeChecker (type) {
+ return function checkType (req) {
+ return Boolean(typeis(req, type))
+ }
+}
diff --git a/node_modules/body-parser/node_modules/debug/.coveralls.yml b/node_modules/body-parser/node_modules/debug/.coveralls.yml
new file mode 100644
index 00000000..20a70685
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/.coveralls.yml
@@ -0,0 +1 @@
+repo_token: SIAeZjKYlHK74rbcFvNHMUzjRiMpflxve
diff --git a/node_modules/body-parser/node_modules/debug/.eslintrc b/node_modules/body-parser/node_modules/debug/.eslintrc
new file mode 100644
index 00000000..8a37ae2c
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/.eslintrc
@@ -0,0 +1,11 @@
+{
+ "env": {
+ "browser": true,
+ "node": true
+ },
+ "rules": {
+ "no-console": 0,
+ "no-empty": [1, { "allowEmptyCatch": true }]
+ },
+ "extends": "eslint:recommended"
+}
diff --git a/node_modules/body-parser/node_modules/debug/.npmignore b/node_modules/body-parser/node_modules/debug/.npmignore
new file mode 100644
index 00000000..5f60eecc
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/.npmignore
@@ -0,0 +1,9 @@
+support
+test
+examples
+example
+*.sock
+dist
+yarn.lock
+coverage
+bower.json
diff --git a/node_modules/body-parser/node_modules/debug/.travis.yml b/node_modules/body-parser/node_modules/debug/.travis.yml
new file mode 100644
index 00000000..6c6090c3
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/.travis.yml
@@ -0,0 +1,14 @@
+
+language: node_js
+node_js:
+ - "6"
+ - "5"
+ - "4"
+
+install:
+ - make node_modules
+
+script:
+ - make lint
+ - make test
+ - make coveralls
diff --git a/node_modules/body-parser/node_modules/debug/CHANGELOG.md b/node_modules/body-parser/node_modules/debug/CHANGELOG.md
new file mode 100644
index 00000000..eadaa189
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/CHANGELOG.md
@@ -0,0 +1,362 @@
+
+2.6.9 / 2017-09-22
+==================
+
+ * remove ReDoS regexp in %o formatter (#504)
+
+2.6.8 / 2017-05-18
+==================
+
+ * Fix: Check for undefined on browser globals (#462, @marbemac)
+
+2.6.7 / 2017-05-16
+==================
+
+ * Fix: Update ms to 2.0.0 to fix regular expression denial of service vulnerability (#458, @hubdotcom)
+ * Fix: Inline extend function in node implementation (#452, @dougwilson)
+ * Docs: Fix typo (#455, @msasad)
+
+2.6.5 / 2017-04-27
+==================
+
+ * Fix: null reference check on window.documentElement.style.WebkitAppearance (#447, @thebigredgeek)
+ * Misc: clean up browser reference checks (#447, @thebigredgeek)
+ * Misc: add npm-debug.log to .gitignore (@thebigredgeek)
+
+
+2.6.4 / 2017-04-20
+==================
+
+ * Fix: bug that would occure if process.env.DEBUG is a non-string value. (#444, @LucianBuzzo)
+ * Chore: ignore bower.json in npm installations. (#437, @joaovieira)
+ * Misc: update "ms" to v0.7.3 (@tootallnate)
+
+2.6.3 / 2017-03-13
+==================
+
+ * Fix: Electron reference to `process.env.DEBUG` (#431, @paulcbetts)
+ * Docs: Changelog fix (@thebigredgeek)
+
+2.6.2 / 2017-03-10
+==================
+
+ * Fix: DEBUG_MAX_ARRAY_LENGTH (#420, @slavaGanzin)
+ * Docs: Add backers and sponsors from Open Collective (#422, @piamancini)
+ * Docs: Add Slackin invite badge (@tootallnate)
+
+2.6.1 / 2017-02-10
+==================
+
+ * Fix: Module's `export default` syntax fix for IE8 `Expected identifier` error
+ * Fix: Whitelist DEBUG_FD for values 1 and 2 only (#415, @pi0)
+ * Fix: IE8 "Expected identifier" error (#414, @vgoma)
+ * Fix: Namespaces would not disable once enabled (#409, @musikov)
+
+2.6.0 / 2016-12-28
+==================
+
+ * Fix: added better null pointer checks for browser useColors (@thebigredgeek)
+ * Improvement: removed explicit `window.debug` export (#404, @tootallnate)
+ * Improvement: deprecated `DEBUG_FD` environment variable (#405, @tootallnate)
+
+2.5.2 / 2016-12-25
+==================
+
+ * Fix: reference error on window within webworkers (#393, @KlausTrainer)
+ * Docs: fixed README typo (#391, @lurch)
+ * Docs: added notice about v3 api discussion (@thebigredgeek)
+
+2.5.1 / 2016-12-20
+==================
+
+ * Fix: babel-core compatibility
+
+2.5.0 / 2016-12-20
+==================
+
+ * Fix: wrong reference in bower file (@thebigredgeek)
+ * Fix: webworker compatibility (@thebigredgeek)
+ * Fix: output formatting issue (#388, @kribblo)
+ * Fix: babel-loader compatibility (#383, @escwald)
+ * Misc: removed built asset from repo and publications (@thebigredgeek)
+ * Misc: moved source files to /src (#378, @yamikuronue)
+ * Test: added karma integration and replaced babel with browserify for browser tests (#378, @yamikuronue)
+ * Test: coveralls integration (#378, @yamikuronue)
+ * Docs: simplified language in the opening paragraph (#373, @yamikuronue)
+
+2.4.5 / 2016-12-17
+==================
+
+ * Fix: `navigator` undefined in Rhino (#376, @jochenberger)
+ * Fix: custom log function (#379, @hsiliev)
+ * Improvement: bit of cleanup + linting fixes (@thebigredgeek)
+ * Improvement: rm non-maintainted `dist/` dir (#375, @freewil)
+ * Docs: simplified language in the opening paragraph. (#373, @yamikuronue)
+
+2.4.4 / 2016-12-14
+==================
+
+ * Fix: work around debug being loaded in preload scripts for electron (#368, @paulcbetts)
+
+2.4.3 / 2016-12-14
+==================
+
+ * Fix: navigation.userAgent error for react native (#364, @escwald)
+
+2.4.2 / 2016-12-14
+==================
+
+ * Fix: browser colors (#367, @tootallnate)
+ * Misc: travis ci integration (@thebigredgeek)
+ * Misc: added linting and testing boilerplate with sanity check (@thebigredgeek)
+
+2.4.1 / 2016-12-13
+==================
+
+ * Fix: typo that broke the package (#356)
+
+2.4.0 / 2016-12-13
+==================
+
+ * Fix: bower.json references unbuilt src entry point (#342, @justmatt)
+ * Fix: revert "handle regex special characters" (@tootallnate)
+ * Feature: configurable util.inspect()`options for NodeJS (#327, @tootallnate)
+ * Feature: %O`(big O) pretty-prints objects (#322, @tootallnate)
+ * Improvement: allow colors in workers (#335, @botverse)
+ * Improvement: use same color for same namespace. (#338, @lchenay)
+
+2.3.3 / 2016-11-09
+==================
+
+ * Fix: Catch `JSON.stringify()` errors (#195, Jovan Alleyne)
+ * Fix: Returning `localStorage` saved values (#331, Levi Thomason)
+ * Improvement: Don't create an empty object when no `process` (Nathan Rajlich)
+
+2.3.2 / 2016-11-09
+==================
+
+ * Fix: be super-safe in index.js as well (@TooTallNate)
+ * Fix: should check whether process exists (Tom Newby)
+
+2.3.1 / 2016-11-09
+==================
+
+ * Fix: Added electron compatibility (#324, @paulcbetts)
+ * Improvement: Added performance optimizations (@tootallnate)
+ * Readme: Corrected PowerShell environment variable example (#252, @gimre)
+ * Misc: Removed yarn lock file from source control (#321, @fengmk2)
+
+2.3.0 / 2016-11-07
+==================
+
+ * Fix: Consistent placement of ms diff at end of output (#215, @gorangajic)
+ * Fix: Escaping of regex special characters in namespace strings (#250, @zacronos)
+ * Fix: Fixed bug causing crash on react-native (#282, @vkarpov15)
+ * Feature: Enabled ES6+ compatible import via default export (#212 @bucaran)
+ * Feature: Added %O formatter to reflect Chrome's console.log capability (#279, @oncletom)
+ * Package: Update "ms" to 0.7.2 (#315, @DevSide)
+ * Package: removed superfluous version property from bower.json (#207 @kkirsche)
+ * Readme: fix USE_COLORS to DEBUG_COLORS
+ * Readme: Doc fixes for format string sugar (#269, @mlucool)
+ * Readme: Updated docs for DEBUG_FD and DEBUG_COLORS environment variables (#232, @mattlyons0)
+ * Readme: doc fixes for PowerShell (#271 #243, @exoticknight @unreadable)
+ * Readme: better docs for browser support (#224, @matthewmueller)
+ * Tooling: Added yarn integration for development (#317, @thebigredgeek)
+ * Misc: Renamed History.md to CHANGELOG.md (@thebigredgeek)
+ * Misc: Added license file (#226 #274, @CantemoInternal @sdaitzman)
+ * Misc: Updated contributors (@thebigredgeek)
+
+2.2.0 / 2015-05-09
+==================
+
+ * package: update "ms" to v0.7.1 (#202, @dougwilson)
+ * README: add logging to file example (#193, @DanielOchoa)
+ * README: fixed a typo (#191, @amir-s)
+ * browser: expose `storage` (#190, @stephenmathieson)
+ * Makefile: add a `distclean` target (#189, @stephenmathieson)
+
+2.1.3 / 2015-03-13
+==================
+
+ * Updated stdout/stderr example (#186)
+ * Updated example/stdout.js to match debug current behaviour
+ * Renamed example/stderr.js to stdout.js
+ * Update Readme.md (#184)
+ * replace high intensity foreground color for bold (#182, #183)
+
+2.1.2 / 2015-03-01
+==================
+
+ * dist: recompile
+ * update "ms" to v0.7.0
+ * package: update "browserify" to v9.0.3
+ * component: fix "ms.js" repo location
+ * changed bower package name
+ * updated documentation about using debug in a browser
+ * fix: security error on safari (#167, #168, @yields)
+
+2.1.1 / 2014-12-29
+==================
+
+ * browser: use `typeof` to check for `console` existence
+ * browser: check for `console.log` truthiness (fix IE 8/9)
+ * browser: add support for Chrome apps
+ * Readme: added Windows usage remarks
+ * Add `bower.json` to properly support bower install
+
+2.1.0 / 2014-10-15
+==================
+
+ * node: implement `DEBUG_FD` env variable support
+ * package: update "browserify" to v6.1.0
+ * package: add "license" field to package.json (#135, @panuhorsmalahti)
+
+2.0.0 / 2014-09-01
+==================
+
+ * package: update "browserify" to v5.11.0
+ * node: use stderr rather than stdout for logging (#29, @stephenmathieson)
+
+1.0.4 / 2014-07-15
+==================
+
+ * dist: recompile
+ * example: remove `console.info()` log usage
+ * example: add "Content-Type" UTF-8 header to browser example
+ * browser: place %c marker after the space character
+ * browser: reset the "content" color via `color: inherit`
+ * browser: add colors support for Firefox >= v31
+ * debug: prefer an instance `log()` function over the global one (#119)
+ * Readme: update documentation about styled console logs for FF v31 (#116, @wryk)
+
+1.0.3 / 2014-07-09
+==================
+
+ * Add support for multiple wildcards in namespaces (#122, @seegno)
+ * browser: fix lint
+
+1.0.2 / 2014-06-10
+==================
+
+ * browser: update color palette (#113, @gscottolson)
+ * common: make console logging function configurable (#108, @timoxley)
+ * node: fix %o colors on old node <= 0.8.x
+ * Makefile: find node path using shell/which (#109, @timoxley)
+
+1.0.1 / 2014-06-06
+==================
+
+ * browser: use `removeItem()` to clear localStorage
+ * browser, node: don't set DEBUG if namespaces is undefined (#107, @leedm777)
+ * package: add "contributors" section
+ * node: fix comment typo
+ * README: list authors
+
+1.0.0 / 2014-06-04
+==================
+
+ * make ms diff be global, not be scope
+ * debug: ignore empty strings in enable()
+ * node: make DEBUG_COLORS able to disable coloring
+ * *: export the `colors` array
+ * npmignore: don't publish the `dist` dir
+ * Makefile: refactor to use browserify
+ * package: add "browserify" as a dev dependency
+ * Readme: add Web Inspector Colors section
+ * node: reset terminal color for the debug content
+ * node: map "%o" to `util.inspect()`
+ * browser: map "%j" to `JSON.stringify()`
+ * debug: add custom "formatters"
+ * debug: use "ms" module for humanizing the diff
+ * Readme: add "bash" syntax highlighting
+ * browser: add Firebug color support
+ * browser: add colors for WebKit browsers
+ * node: apply log to `console`
+ * rewrite: abstract common logic for Node & browsers
+ * add .jshintrc file
+
+0.8.1 / 2014-04-14
+==================
+
+ * package: re-add the "component" section
+
+0.8.0 / 2014-03-30
+==================
+
+ * add `enable()` method for nodejs. Closes #27
+ * change from stderr to stdout
+ * remove unnecessary index.js file
+
+0.7.4 / 2013-11-13
+==================
+
+ * remove "browserify" key from package.json (fixes something in browserify)
+
+0.7.3 / 2013-10-30
+==================
+
+ * fix: catch localStorage security error when cookies are blocked (Chrome)
+ * add debug(err) support. Closes #46
+ * add .browser prop to package.json. Closes #42
+
+0.7.2 / 2013-02-06
+==================
+
+ * fix package.json
+ * fix: Mobile Safari (private mode) is broken with debug
+ * fix: Use unicode to send escape character to shell instead of octal to work with strict mode javascript
+
+0.7.1 / 2013-02-05
+==================
+
+ * add repository URL to package.json
+ * add DEBUG_COLORED to force colored output
+ * add browserify support
+ * fix component. Closes #24
+
+0.7.0 / 2012-05-04
+==================
+
+ * Added .component to package.json
+ * Added debug.component.js build
+
+0.6.0 / 2012-03-16
+==================
+
+ * Added support for "-" prefix in DEBUG [Vinay Pulim]
+ * Added `.enabled` flag to the node version [TooTallNate]
+
+0.5.0 / 2012-02-02
+==================
+
+ * Added: humanize diffs. Closes #8
+ * Added `debug.disable()` to the CS variant
+ * Removed padding. Closes #10
+ * Fixed: persist client-side variant again. Closes #9
+
+0.4.0 / 2012-02-01
+==================
+
+ * Added browser variant support for older browsers [TooTallNate]
+ * Added `debug.enable('project:*')` to browser variant [TooTallNate]
+ * Added padding to diff (moved it to the right)
+
+0.3.0 / 2012-01-26
+==================
+
+ * Added millisecond diff when isatty, otherwise UTC string
+
+0.2.0 / 2012-01-22
+==================
+
+ * Added wildcard support
+
+0.1.0 / 2011-12-02
+==================
+
+ * Added: remove colors unless stderr isatty [TooTallNate]
+
+0.0.1 / 2010-01-03
+==================
+
+ * Initial release
diff --git a/node_modules/body-parser/node_modules/debug/LICENSE b/node_modules/body-parser/node_modules/debug/LICENSE
new file mode 100644
index 00000000..658c933d
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/LICENSE
@@ -0,0 +1,19 @@
+(The MIT License)
+
+Copyright (c) 2014 TJ Holowaychuk
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the 'Software'), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial
+portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
+LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/node_modules/body-parser/node_modules/debug/Makefile b/node_modules/body-parser/node_modules/debug/Makefile
new file mode 100644
index 00000000..584da8bf
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/Makefile
@@ -0,0 +1,50 @@
+# get Makefile directory name: http://stackoverflow.com/a/5982798/376773
+THIS_MAKEFILE_PATH:=$(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST))
+THIS_DIR:=$(shell cd $(dir $(THIS_MAKEFILE_PATH));pwd)
+
+# BIN directory
+BIN := $(THIS_DIR)/node_modules/.bin
+
+# Path
+PATH := node_modules/.bin:$(PATH)
+SHELL := /bin/bash
+
+# applications
+NODE ?= $(shell which node)
+YARN ?= $(shell which yarn)
+PKG ?= $(if $(YARN),$(YARN),$(NODE) $(shell which npm))
+BROWSERIFY ?= $(NODE) $(BIN)/browserify
+
+.FORCE:
+
+install: node_modules
+
+node_modules: package.json
+ @NODE_ENV= $(PKG) install
+ @touch node_modules
+
+lint: .FORCE
+ eslint browser.js debug.js index.js node.js
+
+test-node: .FORCE
+ istanbul cover node_modules/mocha/bin/_mocha -- test/**.js
+
+test-browser: .FORCE
+ mkdir -p dist
+
+ @$(BROWSERIFY) \
+ --standalone debug \
+ . > dist/debug.js
+
+ karma start --single-run
+ rimraf dist
+
+test: .FORCE
+ concurrently \
+ "make test-node" \
+ "make test-browser"
+
+coveralls:
+ cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js
+
+.PHONY: all install clean distclean
diff --git a/node_modules/body-parser/node_modules/debug/README.md b/node_modules/body-parser/node_modules/debug/README.md
new file mode 100644
index 00000000..f67be6b3
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/README.md
@@ -0,0 +1,312 @@
+# debug
+[](https://travis-ci.org/visionmedia/debug) [](https://coveralls.io/github/visionmedia/debug?branch=master) [](https://visionmedia-community-slackin.now.sh/) [](#backers)
+[](#sponsors)
+
+
+
+A tiny node.js debugging utility modelled after node core's debugging technique.
+
+**Discussion around the V3 API is under way [here](https://github.com/visionmedia/debug/issues/370)**
+
+## Installation
+
+```bash
+$ npm install debug
+```
+
+## Usage
+
+`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole.
+
+Example _app.js_:
+
+```js
+var debug = require('debug')('http')
+ , http = require('http')
+ , name = 'My App';
+
+// fake app
+
+debug('booting %s', name);
+
+http.createServer(function(req, res){
+ debug(req.method + ' ' + req.url);
+ res.end('hello\n');
+}).listen(3000, function(){
+ debug('listening');
+});
+
+// fake worker of some kind
+
+require('./worker');
+```
+
+Example _worker.js_:
+
+```js
+var debug = require('debug')('worker');
+
+setInterval(function(){
+ debug('doing some work');
+}, 1000);
+```
+
+ The __DEBUG__ environment variable is then used to enable these based on space or comma-delimited names. Here are some examples:
+
+ 
+
+ 
+
+#### Windows note
+
+ On Windows the environment variable is set using the `set` command.
+
+ ```cmd
+ set DEBUG=*,-not_this
+ ```
+
+ Note that PowerShell uses different syntax to set environment variables.
+
+ ```cmd
+ $env:DEBUG = "*,-not_this"
+ ```
+
+Then, run the program to be debugged as usual.
+
+## Millisecond diff
+
+ When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls.
+
+ 
+
+ When stdout is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below:
+
+ 
+
+## Conventions
+
+ If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser".
+
+## Wildcards
+
+ The `*` character may be used as a wildcard. Suppose for example your library has debuggers named "connect:bodyParser", "connect:compress", "connect:session", instead of listing all three with `DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`.
+
+ You can also exclude specific debuggers by prefixing them with a "-" character. For example, `DEBUG=*,-connect:*` would include all debuggers except those starting with "connect:".
+
+## Environment Variables
+
+ When running through Node.js, you can set a few environment variables that will
+ change the behavior of the debug logging:
+
+| Name | Purpose |
+|-----------|-------------------------------------------------|
+| `DEBUG` | Enables/disables specific debugging namespaces. |
+| `DEBUG_COLORS`| Whether or not to use colors in the debug output. |
+| `DEBUG_DEPTH` | Object inspection depth. |
+| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. |
+
+
+ __Note:__ The environment variables beginning with `DEBUG_` end up being
+ converted into an Options object that gets used with `%o`/`%O` formatters.
+ See the Node.js documentation for
+ [`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options)
+ for the complete list.
+
+## Formatters
+
+
+ Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. Below are the officially supported formatters:
+
+| Formatter | Representation |
+|-----------|----------------|
+| `%O` | Pretty-print an Object on multiple lines. |
+| `%o` | Pretty-print an Object all on a single line. |
+| `%s` | String. |
+| `%d` | Number (both integer and float). |
+| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. |
+| `%%` | Single percent sign ('%'). This does not consume an argument. |
+
+### Custom formatters
+
+ You can add custom formatters by extending the `debug.formatters` object. For example, if you wanted to add support for rendering a Buffer as hex with `%h`, you could do something like:
+
+```js
+const createDebug = require('debug')
+createDebug.formatters.h = (v) => {
+ return v.toString('hex')
+}
+
+// …elsewhere
+const debug = createDebug('foo')
+debug('this is hex: %h', new Buffer('hello world'))
+// foo this is hex: 68656c6c6f20776f726c6421 +0ms
+```
+
+## Browser support
+ You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify),
+ or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest),
+ if you don't want to build it yourself.
+
+ Debug's enable state is currently persisted by `localStorage`.
+ Consider the situation shown below where you have `worker:a` and `worker:b`,
+ and wish to debug both. You can enable this using `localStorage.debug`:
+
+```js
+localStorage.debug = 'worker:*'
+```
+
+And then refresh the page.
+
+```js
+a = debug('worker:a');
+b = debug('worker:b');
+
+setInterval(function(){
+ a('doing some work');
+}, 1000);
+
+setInterval(function(){
+ b('doing some work');
+}, 1200);
+```
+
+#### Web Inspector Colors
+
+ Colors are also enabled on "Web Inspectors" that understand the `%c` formatting
+ option. These are WebKit web inspectors, Firefox ([since version
+ 31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/))
+ and the Firebug plugin for Firefox (any version).
+
+ Colored output looks something like:
+
+ 
+
+
+## Output streams
+
+ By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method:
+
+Example _stdout.js_:
+
+```js
+var debug = require('debug');
+var error = debug('app:error');
+
+// by default stderr is used
+error('goes to stderr!');
+
+var log = debug('app:log');
+// set this namespace to log via console.log
+log.log = console.log.bind(console); // don't forget to bind to console!
+log('goes to stdout');
+error('still goes to stderr!');
+
+// set all output to go via console.info
+// overrides all per-namespace log settings
+debug.log = console.info.bind(console);
+error('now goes to stdout via console.info');
+log('still goes to stdout, but via console.info now');
+```
+
+
+## Authors
+
+ - TJ Holowaychuk
+ - Nathan Rajlich
+ - Andrew Rhyne
+
+## Backers
+
+Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+## Sponsors
+
+Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+## License
+
+(The MIT License)
+
+Copyright (c) 2014-2016 TJ Holowaychuk <tj@vision-media.ca>
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/body-parser/node_modules/debug/component.json b/node_modules/body-parser/node_modules/debug/component.json
new file mode 100644
index 00000000..9de26410
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/component.json
@@ -0,0 +1,19 @@
+{
+ "name": "debug",
+ "repo": "visionmedia/debug",
+ "description": "small debugging utility",
+ "version": "2.6.9",
+ "keywords": [
+ "debug",
+ "log",
+ "debugger"
+ ],
+ "main": "src/browser.js",
+ "scripts": [
+ "src/browser.js",
+ "src/debug.js"
+ ],
+ "dependencies": {
+ "rauchg/ms.js": "0.7.1"
+ }
+}
diff --git a/node_modules/body-parser/node_modules/debug/karma.conf.js b/node_modules/body-parser/node_modules/debug/karma.conf.js
new file mode 100644
index 00000000..103a82d1
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/karma.conf.js
@@ -0,0 +1,70 @@
+// Karma configuration
+// Generated on Fri Dec 16 2016 13:09:51 GMT+0000 (UTC)
+
+module.exports = function(config) {
+ config.set({
+
+ // base path that will be used to resolve all patterns (eg. files, exclude)
+ basePath: '',
+
+
+ // frameworks to use
+ // available frameworks: https://npmjs.org/browse/keyword/karma-adapter
+ frameworks: ['mocha', 'chai', 'sinon'],
+
+
+ // list of files / patterns to load in the browser
+ files: [
+ 'dist/debug.js',
+ 'test/*spec.js'
+ ],
+
+
+ // list of files to exclude
+ exclude: [
+ 'src/node.js'
+ ],
+
+
+ // preprocess matching files before serving them to the browser
+ // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
+ preprocessors: {
+ },
+
+ // test results reporter to use
+ // possible values: 'dots', 'progress'
+ // available reporters: https://npmjs.org/browse/keyword/karma-reporter
+ reporters: ['progress'],
+
+
+ // web server port
+ port: 9876,
+
+
+ // enable / disable colors in the output (reporters and logs)
+ colors: true,
+
+
+ // level of logging
+ // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
+ logLevel: config.LOG_INFO,
+
+
+ // enable / disable watching file and executing tests whenever any file changes
+ autoWatch: true,
+
+
+ // start these browsers
+ // available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
+ browsers: ['PhantomJS'],
+
+
+ // Continuous Integration mode
+ // if true, Karma captures browsers, runs the tests and exits
+ singleRun: false,
+
+ // Concurrency level
+ // how many browser should be started simultaneous
+ concurrency: Infinity
+ })
+}
diff --git a/node_modules/body-parser/node_modules/debug/node.js b/node_modules/body-parser/node_modules/debug/node.js
new file mode 100644
index 00000000..7fc36fe6
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/node.js
@@ -0,0 +1 @@
+module.exports = require('./src/node');
diff --git a/node_modules/body-parser/node_modules/debug/package.json b/node_modules/body-parser/node_modules/debug/package.json
new file mode 100644
index 00000000..dc787ba7
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/package.json
@@ -0,0 +1,49 @@
+{
+ "name": "debug",
+ "version": "2.6.9",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/visionmedia/debug.git"
+ },
+ "description": "small debugging utility",
+ "keywords": [
+ "debug",
+ "log",
+ "debugger"
+ ],
+ "author": "TJ Holowaychuk ",
+ "contributors": [
+ "Nathan Rajlich (http://n8.io)",
+ "Andrew Rhyne "
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "ms": "2.0.0"
+ },
+ "devDependencies": {
+ "browserify": "9.0.3",
+ "chai": "^3.5.0",
+ "concurrently": "^3.1.0",
+ "coveralls": "^2.11.15",
+ "eslint": "^3.12.1",
+ "istanbul": "^0.4.5",
+ "karma": "^1.3.0",
+ "karma-chai": "^0.1.0",
+ "karma-mocha": "^1.3.0",
+ "karma-phantomjs-launcher": "^1.0.2",
+ "karma-sinon": "^1.0.5",
+ "mocha": "^3.2.0",
+ "mocha-lcov-reporter": "^1.2.0",
+ "rimraf": "^2.5.4",
+ "sinon": "^1.17.6",
+ "sinon-chai": "^2.8.0"
+ },
+ "main": "./src/index.js",
+ "browser": "./src/browser.js",
+ "component": {
+ "scripts": {
+ "debug/index.js": "browser.js",
+ "debug/debug.js": "debug.js"
+ }
+ }
+}
diff --git a/node_modules/body-parser/node_modules/debug/src/browser.js b/node_modules/body-parser/node_modules/debug/src/browser.js
new file mode 100644
index 00000000..71069249
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/src/browser.js
@@ -0,0 +1,185 @@
+/**
+ * This is the web browser implementation of `debug()`.
+ *
+ * Expose `debug()` as the module.
+ */
+
+exports = module.exports = require('./debug');
+exports.log = log;
+exports.formatArgs = formatArgs;
+exports.save = save;
+exports.load = load;
+exports.useColors = useColors;
+exports.storage = 'undefined' != typeof chrome
+ && 'undefined' != typeof chrome.storage
+ ? chrome.storage.local
+ : localstorage();
+
+/**
+ * Colors.
+ */
+
+exports.colors = [
+ 'lightseagreen',
+ 'forestgreen',
+ 'goldenrod',
+ 'dodgerblue',
+ 'darkorchid',
+ 'crimson'
+];
+
+/**
+ * Currently only WebKit-based Web Inspectors, Firefox >= v31,
+ * and the Firebug extension (any Firefox version) are known
+ * to support "%c" CSS customizations.
+ *
+ * TODO: add a `localStorage` variable to explicitly enable/disable colors
+ */
+
+function useColors() {
+ // NB: In an Electron preload script, document will be defined but not fully
+ // initialized. Since we know we're in Chrome, we'll just detect this case
+ // explicitly
+ if (typeof window !== 'undefined' && window.process && window.process.type === 'renderer') {
+ return true;
+ }
+
+ // is webkit? http://stackoverflow.com/a/16459606/376773
+ // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
+ return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
+ // is firebug? http://stackoverflow.com/a/398120/376773
+ (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
+ // is firefox >= v31?
+ // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
+ (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||
+ // double check webkit in userAgent just in case we are in a worker
+ (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
+}
+
+/**
+ * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
+ */
+
+exports.formatters.j = function(v) {
+ try {
+ return JSON.stringify(v);
+ } catch (err) {
+ return '[UnexpectedJSONParseError]: ' + err.message;
+ }
+};
+
+
+/**
+ * Colorize log arguments if enabled.
+ *
+ * @api public
+ */
+
+function formatArgs(args) {
+ var useColors = this.useColors;
+
+ args[0] = (useColors ? '%c' : '')
+ + this.namespace
+ + (useColors ? ' %c' : ' ')
+ + args[0]
+ + (useColors ? '%c ' : ' ')
+ + '+' + exports.humanize(this.diff);
+
+ if (!useColors) return;
+
+ var c = 'color: ' + this.color;
+ args.splice(1, 0, c, 'color: inherit')
+
+ // the final "%c" is somewhat tricky, because there could be other
+ // arguments passed either before or after the %c, so we need to
+ // figure out the correct index to insert the CSS into
+ var index = 0;
+ var lastC = 0;
+ args[0].replace(/%[a-zA-Z%]/g, function(match) {
+ if ('%%' === match) return;
+ index++;
+ if ('%c' === match) {
+ // we only are interested in the *last* %c
+ // (the user may have provided their own)
+ lastC = index;
+ }
+ });
+
+ args.splice(lastC, 0, c);
+}
+
+/**
+ * Invokes `console.log()` when available.
+ * No-op when `console.log` is not a "function".
+ *
+ * @api public
+ */
+
+function log() {
+ // this hackery is required for IE8/9, where
+ // the `console.log` function doesn't have 'apply'
+ return 'object' === typeof console
+ && console.log
+ && Function.prototype.apply.call(console.log, console, arguments);
+}
+
+/**
+ * Save `namespaces`.
+ *
+ * @param {String} namespaces
+ * @api private
+ */
+
+function save(namespaces) {
+ try {
+ if (null == namespaces) {
+ exports.storage.removeItem('debug');
+ } else {
+ exports.storage.debug = namespaces;
+ }
+ } catch(e) {}
+}
+
+/**
+ * Load `namespaces`.
+ *
+ * @return {String} returns the previously persisted debug modes
+ * @api private
+ */
+
+function load() {
+ var r;
+ try {
+ r = exports.storage.debug;
+ } catch(e) {}
+
+ // If debug isn't set in LS, and we're in Electron, try to load $DEBUG
+ if (!r && typeof process !== 'undefined' && 'env' in process) {
+ r = process.env.DEBUG;
+ }
+
+ return r;
+}
+
+/**
+ * Enable namespaces listed in `localStorage.debug` initially.
+ */
+
+exports.enable(load());
+
+/**
+ * Localstorage attempts to return the localstorage.
+ *
+ * This is necessary because safari throws
+ * when a user disables cookies/localstorage
+ * and you attempt to access it.
+ *
+ * @return {LocalStorage}
+ * @api private
+ */
+
+function localstorage() {
+ try {
+ return window.localStorage;
+ } catch (e) {}
+}
diff --git a/node_modules/body-parser/node_modules/debug/src/debug.js b/node_modules/body-parser/node_modules/debug/src/debug.js
new file mode 100644
index 00000000..6a5e3fc9
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/src/debug.js
@@ -0,0 +1,202 @@
+
+/**
+ * This is the common logic for both the Node.js and web browser
+ * implementations of `debug()`.
+ *
+ * Expose `debug()` as the module.
+ */
+
+exports = module.exports = createDebug.debug = createDebug['default'] = createDebug;
+exports.coerce = coerce;
+exports.disable = disable;
+exports.enable = enable;
+exports.enabled = enabled;
+exports.humanize = require('ms');
+
+/**
+ * The currently active debug mode names, and names to skip.
+ */
+
+exports.names = [];
+exports.skips = [];
+
+/**
+ * Map of special "%n" handling functions, for the debug "format" argument.
+ *
+ * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
+ */
+
+exports.formatters = {};
+
+/**
+ * Previous log timestamp.
+ */
+
+var prevTime;
+
+/**
+ * Select a color.
+ * @param {String} namespace
+ * @return {Number}
+ * @api private
+ */
+
+function selectColor(namespace) {
+ var hash = 0, i;
+
+ for (i in namespace) {
+ hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
+ hash |= 0; // Convert to 32bit integer
+ }
+
+ return exports.colors[Math.abs(hash) % exports.colors.length];
+}
+
+/**
+ * Create a debugger with the given `namespace`.
+ *
+ * @param {String} namespace
+ * @return {Function}
+ * @api public
+ */
+
+function createDebug(namespace) {
+
+ function debug() {
+ // disabled?
+ if (!debug.enabled) return;
+
+ var self = debug;
+
+ // set `diff` timestamp
+ var curr = +new Date();
+ var ms = curr - (prevTime || curr);
+ self.diff = ms;
+ self.prev = prevTime;
+ self.curr = curr;
+ prevTime = curr;
+
+ // turn the `arguments` into a proper Array
+ var args = new Array(arguments.length);
+ for (var i = 0; i < args.length; i++) {
+ args[i] = arguments[i];
+ }
+
+ args[0] = exports.coerce(args[0]);
+
+ if ('string' !== typeof args[0]) {
+ // anything else let's inspect with %O
+ args.unshift('%O');
+ }
+
+ // apply any `formatters` transformations
+ var index = 0;
+ args[0] = args[0].replace(/%([a-zA-Z%])/g, function(match, format) {
+ // if we encounter an escaped % then don't increase the array index
+ if (match === '%%') return match;
+ index++;
+ var formatter = exports.formatters[format];
+ if ('function' === typeof formatter) {
+ var val = args[index];
+ match = formatter.call(self, val);
+
+ // now we need to remove `args[index]` since it's inlined in the `format`
+ args.splice(index, 1);
+ index--;
+ }
+ return match;
+ });
+
+ // apply env-specific formatting (colors, etc.)
+ exports.formatArgs.call(self, args);
+
+ var logFn = debug.log || exports.log || console.log.bind(console);
+ logFn.apply(self, args);
+ }
+
+ debug.namespace = namespace;
+ debug.enabled = exports.enabled(namespace);
+ debug.useColors = exports.useColors();
+ debug.color = selectColor(namespace);
+
+ // env-specific initialization logic for debug instances
+ if ('function' === typeof exports.init) {
+ exports.init(debug);
+ }
+
+ return debug;
+}
+
+/**
+ * Enables a debug mode by namespaces. This can include modes
+ * separated by a colon and wildcards.
+ *
+ * @param {String} namespaces
+ * @api public
+ */
+
+function enable(namespaces) {
+ exports.save(namespaces);
+
+ exports.names = [];
+ exports.skips = [];
+
+ var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
+ var len = split.length;
+
+ for (var i = 0; i < len; i++) {
+ if (!split[i]) continue; // ignore empty strings
+ namespaces = split[i].replace(/\*/g, '.*?');
+ if (namespaces[0] === '-') {
+ exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));
+ } else {
+ exports.names.push(new RegExp('^' + namespaces + '$'));
+ }
+ }
+}
+
+/**
+ * Disable debug output.
+ *
+ * @api public
+ */
+
+function disable() {
+ exports.enable('');
+}
+
+/**
+ * Returns true if the given mode name is enabled, false otherwise.
+ *
+ * @param {String} name
+ * @return {Boolean}
+ * @api public
+ */
+
+function enabled(name) {
+ var i, len;
+ for (i = 0, len = exports.skips.length; i < len; i++) {
+ if (exports.skips[i].test(name)) {
+ return false;
+ }
+ }
+ for (i = 0, len = exports.names.length; i < len; i++) {
+ if (exports.names[i].test(name)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/**
+ * Coerce `val`.
+ *
+ * @param {Mixed} val
+ * @return {Mixed}
+ * @api private
+ */
+
+function coerce(val) {
+ if (val instanceof Error) return val.stack || val.message;
+ return val;
+}
diff --git a/node_modules/body-parser/node_modules/debug/src/index.js b/node_modules/body-parser/node_modules/debug/src/index.js
new file mode 100644
index 00000000..e12cf4d5
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/src/index.js
@@ -0,0 +1,10 @@
+/**
+ * Detect Electron renderer process, which is node, but we should
+ * treat as a browser.
+ */
+
+if (typeof process !== 'undefined' && process.type === 'renderer') {
+ module.exports = require('./browser.js');
+} else {
+ module.exports = require('./node.js');
+}
diff --git a/node_modules/body-parser/node_modules/debug/src/inspector-log.js b/node_modules/body-parser/node_modules/debug/src/inspector-log.js
new file mode 100644
index 00000000..60ea6c04
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/src/inspector-log.js
@@ -0,0 +1,15 @@
+module.exports = inspectorLog;
+
+// black hole
+const nullStream = new (require('stream').Writable)();
+nullStream._write = () => {};
+
+/**
+ * Outputs a `console.log()` to the Node.js Inspector console *only*.
+ */
+function inspectorLog() {
+ const stdout = console._stdout;
+ console._stdout = nullStream;
+ console.log.apply(console, arguments);
+ console._stdout = stdout;
+}
diff --git a/node_modules/body-parser/node_modules/debug/src/node.js b/node_modules/body-parser/node_modules/debug/src/node.js
new file mode 100644
index 00000000..b15109c9
--- /dev/null
+++ b/node_modules/body-parser/node_modules/debug/src/node.js
@@ -0,0 +1,248 @@
+/**
+ * Module dependencies.
+ */
+
+var tty = require('tty');
+var util = require('util');
+
+/**
+ * This is the Node.js implementation of `debug()`.
+ *
+ * Expose `debug()` as the module.
+ */
+
+exports = module.exports = require('./debug');
+exports.init = init;
+exports.log = log;
+exports.formatArgs = formatArgs;
+exports.save = save;
+exports.load = load;
+exports.useColors = useColors;
+
+/**
+ * Colors.
+ */
+
+exports.colors = [6, 2, 3, 4, 5, 1];
+
+/**
+ * Build up the default `inspectOpts` object from the environment variables.
+ *
+ * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
+ */
+
+exports.inspectOpts = Object.keys(process.env).filter(function (key) {
+ return /^debug_/i.test(key);
+}).reduce(function (obj, key) {
+ // camel-case
+ var prop = key
+ .substring(6)
+ .toLowerCase()
+ .replace(/_([a-z])/g, function (_, k) { return k.toUpperCase() });
+
+ // coerce string value into JS value
+ var val = process.env[key];
+ if (/^(yes|on|true|enabled)$/i.test(val)) val = true;
+ else if (/^(no|off|false|disabled)$/i.test(val)) val = false;
+ else if (val === 'null') val = null;
+ else val = Number(val);
+
+ obj[prop] = val;
+ return obj;
+}, {});
+
+/**
+ * The file descriptor to write the `debug()` calls to.
+ * Set the `DEBUG_FD` env variable to override with another value. i.e.:
+ *
+ * $ DEBUG_FD=3 node script.js 3>debug.log
+ */
+
+var fd = parseInt(process.env.DEBUG_FD, 10) || 2;
+
+if (1 !== fd && 2 !== fd) {
+ util.deprecate(function(){}, 'except for stderr(2) and stdout(1), any other usage of DEBUG_FD is deprecated. Override debug.log if you want to use a different log function (https://git.io/debug_fd)')()
+}
+
+var stream = 1 === fd ? process.stdout :
+ 2 === fd ? process.stderr :
+ createWritableStdioStream(fd);
+
+/**
+ * Is stdout a TTY? Colored output is enabled when `true`.
+ */
+
+function useColors() {
+ return 'colors' in exports.inspectOpts
+ ? Boolean(exports.inspectOpts.colors)
+ : tty.isatty(fd);
+}
+
+/**
+ * Map %o to `util.inspect()`, all on a single line.
+ */
+
+exports.formatters.o = function(v) {
+ this.inspectOpts.colors = this.useColors;
+ return util.inspect(v, this.inspectOpts)
+ .split('\n').map(function(str) {
+ return str.trim()
+ }).join(' ');
+};
+
+/**
+ * Map %o to `util.inspect()`, allowing multiple lines if needed.
+ */
+
+exports.formatters.O = function(v) {
+ this.inspectOpts.colors = this.useColors;
+ return util.inspect(v, this.inspectOpts);
+};
+
+/**
+ * Adds ANSI color escape codes if enabled.
+ *
+ * @api public
+ */
+
+function formatArgs(args) {
+ var name = this.namespace;
+ var useColors = this.useColors;
+
+ if (useColors) {
+ var c = this.color;
+ var prefix = ' \u001b[3' + c + ';1m' + name + ' ' + '\u001b[0m';
+
+ args[0] = prefix + args[0].split('\n').join('\n' + prefix);
+ args.push('\u001b[3' + c + 'm+' + exports.humanize(this.diff) + '\u001b[0m');
+ } else {
+ args[0] = new Date().toUTCString()
+ + ' ' + name + ' ' + args[0];
+ }
+}
+
+/**
+ * Invokes `util.format()` with the specified arguments and writes to `stream`.
+ */
+
+function log() {
+ return stream.write(util.format.apply(util, arguments) + '\n');
+}
+
+/**
+ * Save `namespaces`.
+ *
+ * @param {String} namespaces
+ * @api private
+ */
+
+function save(namespaces) {
+ if (null == namespaces) {
+ // If you set a process.env field to null or undefined, it gets cast to the
+ // string 'null' or 'undefined'. Just delete instead.
+ delete process.env.DEBUG;
+ } else {
+ process.env.DEBUG = namespaces;
+ }
+}
+
+/**
+ * Load `namespaces`.
+ *
+ * @return {String} returns the previously persisted debug modes
+ * @api private
+ */
+
+function load() {
+ return process.env.DEBUG;
+}
+
+/**
+ * Copied from `node/src/node.js`.
+ *
+ * XXX: It's lame that node doesn't expose this API out-of-the-box. It also
+ * relies on the undocumented `tty_wrap.guessHandleType()` which is also lame.
+ */
+
+function createWritableStdioStream (fd) {
+ var stream;
+ var tty_wrap = process.binding('tty_wrap');
+
+ // Note stream._type is used for test-module-load-list.js
+
+ switch (tty_wrap.guessHandleType(fd)) {
+ case 'TTY':
+ stream = new tty.WriteStream(fd);
+ stream._type = 'tty';
+
+ // Hack to have stream not keep the event loop alive.
+ // See https://github.com/joyent/node/issues/1726
+ if (stream._handle && stream._handle.unref) {
+ stream._handle.unref();
+ }
+ break;
+
+ case 'FILE':
+ var fs = require('fs');
+ stream = new fs.SyncWriteStream(fd, { autoClose: false });
+ stream._type = 'fs';
+ break;
+
+ case 'PIPE':
+ case 'TCP':
+ var net = require('net');
+ stream = new net.Socket({
+ fd: fd,
+ readable: false,
+ writable: true
+ });
+
+ // FIXME Should probably have an option in net.Socket to create a
+ // stream from an existing fd which is writable only. But for now
+ // we'll just add this hack and set the `readable` member to false.
+ // Test: ./node test/fixtures/echo.js < /etc/passwd
+ stream.readable = false;
+ stream.read = null;
+ stream._type = 'pipe';
+
+ // FIXME Hack to have stream not keep the event loop alive.
+ // See https://github.com/joyent/node/issues/1726
+ if (stream._handle && stream._handle.unref) {
+ stream._handle.unref();
+ }
+ break;
+
+ default:
+ // Probably an error on in uv_guess_handle()
+ throw new Error('Implement me. Unknown stream file type!');
+ }
+
+ // For supporting legacy API we put the FD here.
+ stream.fd = fd;
+
+ stream._isStdio = true;
+
+ return stream;
+}
+
+/**
+ * Init logic for `debug` instances.
+ *
+ * Create a new `inspectOpts` object in case `useColors` is set
+ * differently for a particular `debug` instance.
+ */
+
+function init (debug) {
+ debug.inspectOpts = {};
+
+ var keys = Object.keys(exports.inspectOpts);
+ for (var i = 0; i < keys.length; i++) {
+ debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
+ }
+}
+
+/**
+ * Enable namespaces listed in `process.env.DEBUG` initially.
+ */
+
+exports.enable(load());
diff --git a/node_modules/body-parser/node_modules/ms/index.js b/node_modules/body-parser/node_modules/ms/index.js
new file mode 100644
index 00000000..6a522b16
--- /dev/null
+++ b/node_modules/body-parser/node_modules/ms/index.js
@@ -0,0 +1,152 @@
+/**
+ * Helpers.
+ */
+
+var s = 1000;
+var m = s * 60;
+var h = m * 60;
+var d = h * 24;
+var y = d * 365.25;
+
+/**
+ * Parse or format the given `val`.
+ *
+ * Options:
+ *
+ * - `long` verbose formatting [false]
+ *
+ * @param {String|Number} val
+ * @param {Object} [options]
+ * @throws {Error} throw an error if val is not a non-empty string or a number
+ * @return {String|Number}
+ * @api public
+ */
+
+module.exports = function(val, options) {
+ options = options || {};
+ var type = typeof val;
+ if (type === 'string' && val.length > 0) {
+ return parse(val);
+ } else if (type === 'number' && isNaN(val) === false) {
+ return options.long ? fmtLong(val) : fmtShort(val);
+ }
+ throw new Error(
+ 'val is not a non-empty string or a valid number. val=' +
+ JSON.stringify(val)
+ );
+};
+
+/**
+ * Parse the given `str` and return milliseconds.
+ *
+ * @param {String} str
+ * @return {Number}
+ * @api private
+ */
+
+function parse(str) {
+ str = String(str);
+ if (str.length > 100) {
+ return;
+ }
+ var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(
+ str
+ );
+ if (!match) {
+ return;
+ }
+ var n = parseFloat(match[1]);
+ var type = (match[2] || 'ms').toLowerCase();
+ switch (type) {
+ case 'years':
+ case 'year':
+ case 'yrs':
+ case 'yr':
+ case 'y':
+ return n * y;
+ case 'days':
+ case 'day':
+ case 'd':
+ return n * d;
+ case 'hours':
+ case 'hour':
+ case 'hrs':
+ case 'hr':
+ case 'h':
+ return n * h;
+ case 'minutes':
+ case 'minute':
+ case 'mins':
+ case 'min':
+ case 'm':
+ return n * m;
+ case 'seconds':
+ case 'second':
+ case 'secs':
+ case 'sec':
+ case 's':
+ return n * s;
+ case 'milliseconds':
+ case 'millisecond':
+ case 'msecs':
+ case 'msec':
+ case 'ms':
+ return n;
+ default:
+ return undefined;
+ }
+}
+
+/**
+ * Short format for `ms`.
+ *
+ * @param {Number} ms
+ * @return {String}
+ * @api private
+ */
+
+function fmtShort(ms) {
+ if (ms >= d) {
+ return Math.round(ms / d) + 'd';
+ }
+ if (ms >= h) {
+ return Math.round(ms / h) + 'h';
+ }
+ if (ms >= m) {
+ return Math.round(ms / m) + 'm';
+ }
+ if (ms >= s) {
+ return Math.round(ms / s) + 's';
+ }
+ return ms + 'ms';
+}
+
+/**
+ * Long format for `ms`.
+ *
+ * @param {Number} ms
+ * @return {String}
+ * @api private
+ */
+
+function fmtLong(ms) {
+ return plural(ms, d, 'day') ||
+ plural(ms, h, 'hour') ||
+ plural(ms, m, 'minute') ||
+ plural(ms, s, 'second') ||
+ ms + ' ms';
+}
+
+/**
+ * Pluralization helper.
+ */
+
+function plural(ms, n, name) {
+ if (ms < n) {
+ return;
+ }
+ if (ms < n * 1.5) {
+ return Math.floor(ms / n) + ' ' + name;
+ }
+ return Math.ceil(ms / n) + ' ' + name + 's';
+}
diff --git a/node_modules/body-parser/node_modules/ms/license.md b/node_modules/body-parser/node_modules/ms/license.md
new file mode 100644
index 00000000..69b61253
--- /dev/null
+++ b/node_modules/body-parser/node_modules/ms/license.md
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Zeit, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/body-parser/node_modules/ms/package.json b/node_modules/body-parser/node_modules/ms/package.json
new file mode 100644
index 00000000..6a31c81f
--- /dev/null
+++ b/node_modules/body-parser/node_modules/ms/package.json
@@ -0,0 +1,37 @@
+{
+ "name": "ms",
+ "version": "2.0.0",
+ "description": "Tiny milisecond conversion utility",
+ "repository": "zeit/ms",
+ "main": "./index",
+ "files": [
+ "index.js"
+ ],
+ "scripts": {
+ "precommit": "lint-staged",
+ "lint": "eslint lib/* bin/*",
+ "test": "mocha tests.js"
+ },
+ "eslintConfig": {
+ "extends": "eslint:recommended",
+ "env": {
+ "node": true,
+ "es6": true
+ }
+ },
+ "lint-staged": {
+ "*.js": [
+ "npm run lint",
+ "prettier --single-quote --write",
+ "git add"
+ ]
+ },
+ "license": "MIT",
+ "devDependencies": {
+ "eslint": "3.19.0",
+ "expect.js": "0.3.1",
+ "husky": "0.13.3",
+ "lint-staged": "3.4.1",
+ "mocha": "3.4.1"
+ }
+}
diff --git a/node_modules/body-parser/node_modules/ms/readme.md b/node_modules/body-parser/node_modules/ms/readme.md
new file mode 100644
index 00000000..84a9974c
--- /dev/null
+++ b/node_modules/body-parser/node_modules/ms/readme.md
@@ -0,0 +1,51 @@
+# ms
+
+[](https://travis-ci.org/zeit/ms)
+[](https://zeit.chat/)
+
+Use this package to easily convert various time formats to milliseconds.
+
+## Examples
+
+```js
+ms('2 days') // 172800000
+ms('1d') // 86400000
+ms('10h') // 36000000
+ms('2.5 hrs') // 9000000
+ms('2h') // 7200000
+ms('1m') // 60000
+ms('5s') // 5000
+ms('1y') // 31557600000
+ms('100') // 100
+```
+
+### Convert from milliseconds
+
+```js
+ms(60000) // "1m"
+ms(2 * 60000) // "2m"
+ms(ms('10 hours')) // "10h"
+```
+
+### Time format written-out
+
+```js
+ms(60000, { long: true }) // "1 minute"
+ms(2 * 60000, { long: true }) // "2 minutes"
+ms(ms('10 hours'), { long: true }) // "10 hours"
+```
+
+## Features
+
+- Works both in [node](https://nodejs.org) and in the browser.
+- If a number is supplied to `ms`, a string with a unit is returned.
+- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`).
+- If you pass a string with a number and a valid unit, the number of equivalent ms is returned.
+
+## Caught a bug?
+
+1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device
+2. Link the package to the global module directory: `npm link`
+3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, node will now use your clone of ms!
+
+As always, you can run the tests using: `npm test`
diff --git a/node_modules/body-parser/package.json b/node_modules/body-parser/package.json
new file mode 100644
index 00000000..46373043
--- /dev/null
+++ b/node_modules/body-parser/package.json
@@ -0,0 +1,56 @@
+{
+ "name": "body-parser",
+ "description": "Node.js body parsing middleware",
+ "version": "1.20.2",
+ "contributors": [
+ "Douglas Christopher Wilson ",
+ "Jonathan Ong (http://jongleberry.com)"
+ ],
+ "license": "MIT",
+ "repository": "expressjs/body-parser",
+ "dependencies": {
+ "bytes": "3.1.2",
+ "content-type": "~1.0.5",
+ "debug": "2.6.9",
+ "depd": "2.0.0",
+ "destroy": "1.2.0",
+ "http-errors": "2.0.0",
+ "iconv-lite": "0.4.24",
+ "on-finished": "2.4.1",
+ "qs": "6.11.0",
+ "raw-body": "2.5.2",
+ "type-is": "~1.6.18",
+ "unpipe": "1.0.0"
+ },
+ "devDependencies": {
+ "eslint": "8.34.0",
+ "eslint-config-standard": "14.1.1",
+ "eslint-plugin-import": "2.27.5",
+ "eslint-plugin-markdown": "3.0.0",
+ "eslint-plugin-node": "11.1.0",
+ "eslint-plugin-promise": "6.1.1",
+ "eslint-plugin-standard": "4.1.0",
+ "methods": "1.1.2",
+ "mocha": "10.2.0",
+ "nyc": "15.1.0",
+ "safe-buffer": "5.2.1",
+ "supertest": "6.3.3"
+ },
+ "files": [
+ "lib/",
+ "LICENSE",
+ "HISTORY.md",
+ "SECURITY.md",
+ "index.js"
+ ],
+ "engines": {
+ "node": ">= 0.8",
+ "npm": "1.2.8000 || >= 1.4.16"
+ },
+ "scripts": {
+ "lint": "eslint .",
+ "test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/",
+ "test-ci": "nyc --reporter=lcov --reporter=text npm test",
+ "test-cov": "nyc --reporter=html --reporter=text npm test"
+ }
+}
diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE
new file mode 100644
index 00000000..de322667
--- /dev/null
+++ b/node_modules/brace-expansion/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2013 Julian Gruber
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md
new file mode 100644
index 00000000..6b4e0e16
--- /dev/null
+++ b/node_modules/brace-expansion/README.md
@@ -0,0 +1,129 @@
+# brace-expansion
+
+[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
+as known from sh/bash, in JavaScript.
+
+[](http://travis-ci.org/juliangruber/brace-expansion)
+[](https://www.npmjs.org/package/brace-expansion)
+[](https://greenkeeper.io/)
+
+[](https://ci.testling.com/juliangruber/brace-expansion)
+
+## Example
+
+```js
+var expand = require('brace-expansion');
+
+expand('file-{a,b,c}.jpg')
+// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
+
+expand('-v{,,}')
+// => ['-v', '-v', '-v']
+
+expand('file{0..2}.jpg')
+// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
+
+expand('file-{a..c}.jpg')
+// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
+
+expand('file{2..0}.jpg')
+// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
+
+expand('file{0..4..2}.jpg')
+// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
+
+expand('file-{a..e..2}.jpg')
+// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
+
+expand('file{00..10..5}.jpg')
+// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
+
+expand('{{A..C},{a..c}}')
+// => ['A', 'B', 'C', 'a', 'b', 'c']
+
+expand('ppp{,config,oe{,conf}}')
+// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
+```
+
+## API
+
+```js
+var expand = require('brace-expansion');
+```
+
+### var expanded = expand(str)
+
+Return an array of all possible and valid expansions of `str`. If none are
+found, `[str]` is returned.
+
+Valid expansions are:
+
+```js
+/^(.*,)+(.+)?$/
+// {a,b,...}
+```
+
+A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
+
+```js
+/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
+// {x..y[..incr]}
+```
+
+A numeric sequence from `x` to `y` inclusive, with optional increment.
+If `x` or `y` start with a leading `0`, all the numbers will be padded
+to have equal length. Negative numbers and backwards iteration work too.
+
+```js
+/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
+// {x..y[..incr]}
+```
+
+An alphabetic sequence from `x` to `y` inclusive, with optional increment.
+`x` and `y` must be exactly one character, and if given, `incr` must be a
+number.
+
+For compatibility reasons, the string `${` is not eligible for brace expansion.
+
+## Installation
+
+With [npm](https://npmjs.org) do:
+
+```bash
+npm install brace-expansion
+```
+
+## Contributors
+
+- [Julian Gruber](https://github.com/juliangruber)
+- [Isaac Z. Schlueter](https://github.com/isaacs)
+
+## Sponsors
+
+This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
+
+Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
+
+## License
+
+(MIT)
+
+Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js
new file mode 100644
index 00000000..0478be81
--- /dev/null
+++ b/node_modules/brace-expansion/index.js
@@ -0,0 +1,201 @@
+var concatMap = require('concat-map');
+var balanced = require('balanced-match');
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+
+ return expansions;
+}
+
diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json
new file mode 100644
index 00000000..a18faa8f
--- /dev/null
+++ b/node_modules/brace-expansion/package.json
@@ -0,0 +1,47 @@
+{
+ "name": "brace-expansion",
+ "description": "Brace expansion as known from sh/bash",
+ "version": "1.1.11",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/juliangruber/brace-expansion.git"
+ },
+ "homepage": "https://github.com/juliangruber/brace-expansion",
+ "main": "index.js",
+ "scripts": {
+ "test": "tape test/*.js",
+ "gentest": "bash test/generate.sh",
+ "bench": "matcha test/perf/bench.js"
+ },
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ },
+ "devDependencies": {
+ "matcha": "^0.7.0",
+ "tape": "^4.6.0"
+ },
+ "keywords": [],
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "license": "MIT",
+ "testling": {
+ "files": "test/*.js",
+ "browsers": [
+ "ie/8..latest",
+ "firefox/20..latest",
+ "firefox/nightly",
+ "chrome/25..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest",
+ "android-browser/4.2..latest"
+ ]
+ }
+}
diff --git a/node_modules/braces/CHANGELOG.md b/node_modules/braces/CHANGELOG.md
new file mode 100644
index 00000000..36f798b0
--- /dev/null
+++ b/node_modules/braces/CHANGELOG.md
@@ -0,0 +1,184 @@
+# Release history
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
+and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
+
+
+ Guiding Principles
+
+- Changelogs are for humans, not machines.
+- There should be an entry for every single version.
+- The same types of changes should be grouped.
+- Versions and sections should be linkable.
+- The latest version comes first.
+- The release date of each versions is displayed.
+- Mention whether you follow Semantic Versioning.
+
+
+
+
+ Types of changes
+
+Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_):
+
+- `Added` for new features.
+- `Changed` for changes in existing functionality.
+- `Deprecated` for soon-to-be removed features.
+- `Removed` for now removed features.
+- `Fixed` for any bug fixes.
+- `Security` in case of vulnerabilities.
+
+
+
+## [3.0.0] - 2018-04-08
+
+v3.0 is a complete refactor, resulting in a faster, smaller codebase, with fewer deps, and a more accurate parser and compiler.
+
+**Breaking Changes**
+
+- The undocumented `.makeRe` method was removed
+
+**Non-breaking changes**
+
+- Caching was removed
+
+## [2.3.2] - 2018-04-08
+
+- start refactoring
+- cover sets
+- better range handling
+
+## [2.3.1] - 2018-02-17
+
+- Remove unnecessary escape in Regex. (#14)
+
+## [2.3.0] - 2017-10-19
+
+- minor code reorganization
+- optimize regex
+- expose `maxLength` option
+
+## [2.2.1] - 2017-05-30
+
+- don't condense when braces contain extglobs
+
+## [2.2.0] - 2017-05-28
+
+- ensure word boundaries are preserved
+- fixes edge case where extglob characters precede a brace pattern
+
+## [2.1.1] - 2017-04-27
+
+- use snapdragon-node
+- handle edge case
+- optimizations, lint
+
+## [2.0.4] - 2017-04-11
+
+- pass opts to compiler
+- minor optimization in create method
+- re-write parser handlers to remove negation regex
+
+## [2.0.3] - 2016-12-10
+
+- use split-string
+- clear queue at the end
+- adds sequences example
+- add unit tests
+
+## [2.0.2] - 2016-10-21
+
+- fix comma handling in nested extglobs
+
+## [2.0.1] - 2016-10-20
+
+- add comments
+- more tests, ensure quotes are stripped
+
+## [2.0.0] - 2016-10-19
+
+- don't expand braces inside character classes
+- add quantifier pattern
+
+## [1.8.5] - 2016-05-21
+
+- Refactor (#10)
+
+## [1.8.4] - 2016-04-20
+
+- fixes https://github.com/jonschlinkert/micromatch/issues/66
+
+## [1.8.0] - 2015-03-18
+
+- adds exponent examples, tests
+- fixes the first example in https://github.com/jonschlinkert/micromatch/issues/38
+
+## [1.6.0] - 2015-01-30
+
+- optimizations, `bash` mode:
+- improve path escaping
+
+## [1.5.0] - 2015-01-28
+
+- Merge pull request #5 from eush77/lib-files
+
+## [1.4.0] - 2015-01-24
+
+- add extglob tests
+- externalize exponent function
+- better whitespace handling
+
+## [1.3.0] - 2015-01-24
+
+- make regex patterns explicity
+
+## [1.1.0] - 2015-01-11
+
+- don't create a match group with `makeRe`
+
+## [1.0.0] - 2014-12-23
+
+- Merge commit '97b05f5544f8348736a8efaecf5c32bbe3e2ad6e'
+- support empty brace syntax
+- better bash coverage
+- better support for regex strings
+
+## [0.1.4] - 2014-11-14
+
+- improve recognition of bad args, recognize mismatched argument types
+- support escaping
+- remove pathname-expansion
+- support whitespace in patterns
+
+## [0.1.0]
+
+- first commit
+
+[2.3.2]: https://github.com/micromatch/braces/compare/2.3.1...2.3.2
+[2.3.1]: https://github.com/micromatch/braces/compare/2.3.0...2.3.1
+[2.3.0]: https://github.com/micromatch/braces/compare/2.2.1...2.3.0
+[2.2.1]: https://github.com/micromatch/braces/compare/2.2.0...2.2.1
+[2.2.0]: https://github.com/micromatch/braces/compare/2.1.1...2.2.0
+[2.1.1]: https://github.com/micromatch/braces/compare/2.1.0...2.1.1
+[2.1.0]: https://github.com/micromatch/braces/compare/2.0.4...2.1.0
+[2.0.4]: https://github.com/micromatch/braces/compare/2.0.3...2.0.4
+[2.0.3]: https://github.com/micromatch/braces/compare/2.0.2...2.0.3
+[2.0.2]: https://github.com/micromatch/braces/compare/2.0.1...2.0.2
+[2.0.1]: https://github.com/micromatch/braces/compare/2.0.0...2.0.1
+[2.0.0]: https://github.com/micromatch/braces/compare/1.8.5...2.0.0
+[1.8.5]: https://github.com/micromatch/braces/compare/1.8.4...1.8.5
+[1.8.4]: https://github.com/micromatch/braces/compare/1.8.0...1.8.4
+[1.8.0]: https://github.com/micromatch/braces/compare/1.6.0...1.8.0
+[1.6.0]: https://github.com/micromatch/braces/compare/1.5.0...1.6.0
+[1.5.0]: https://github.com/micromatch/braces/compare/1.4.0...1.5.0
+[1.4.0]: https://github.com/micromatch/braces/compare/1.3.0...1.4.0
+[1.3.0]: https://github.com/micromatch/braces/compare/1.2.0...1.3.0
+[1.2.0]: https://github.com/micromatch/braces/compare/1.1.0...1.2.0
+[1.1.0]: https://github.com/micromatch/braces/compare/1.0.0...1.1.0
+[1.0.0]: https://github.com/micromatch/braces/compare/0.1.4...1.0.0
+[0.1.4]: https://github.com/micromatch/braces/compare/0.1.0...0.1.4
+
+[Unreleased]: https://github.com/micromatch/braces/compare/0.1.0...HEAD
+[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog
\ No newline at end of file
diff --git a/node_modules/braces/LICENSE b/node_modules/braces/LICENSE
new file mode 100644
index 00000000..d32ab442
--- /dev/null
+++ b/node_modules/braces/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014-2018, Jon Schlinkert.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/braces/README.md b/node_modules/braces/README.md
new file mode 100644
index 00000000..cba2f600
--- /dev/null
+++ b/node_modules/braces/README.md
@@ -0,0 +1,593 @@
+# braces [](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [](https://www.npmjs.com/package/braces) [](https://npmjs.org/package/braces) [](https://npmjs.org/package/braces) [](https://travis-ci.org/micromatch/braces)
+
+> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.
+
+Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
+
+## Install
+
+Install with [npm](https://www.npmjs.com/):
+
+```sh
+$ npm install --save braces
+```
+
+## v3.0.0 Released!!
+
+See the [changelog](CHANGELOG.md) for details.
+
+## Why use braces?
+
+Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters.
+
+* **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests)
+* **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity.
+* **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up.
+* **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written).
+* **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)).
+* [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']`
+* [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']`
+* [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']`
+* [Supports escaping](#escaping) - To prevent evaluation of special characters.
+
+## Usage
+
+The main export is a function that takes one or more brace `patterns` and `options`.
+
+```js
+const braces = require('braces');
+// braces(patterns[, options]);
+
+console.log(braces(['{01..05}', '{a..e}']));
+//=> ['(0[1-5])', '([a-e])']
+
+console.log(braces(['{01..05}', '{a..e}'], { expand: true }));
+//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e']
+```
+
+### Brace Expansion vs. Compilation
+
+By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching.
+
+**Compiled**
+
+```js
+console.log(braces('a/{x,y,z}/b'));
+//=> ['a/(x|y|z)/b']
+console.log(braces(['a/{01..20}/b', 'a/{1..5}/b']));
+//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ]
+```
+
+**Expanded**
+
+Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)):
+
+```js
+console.log(braces('a/{x,y,z}/b', { expand: true }));
+//=> ['a/x/b', 'a/y/b', 'a/z/b']
+
+console.log(braces.expand('{01..10}'));
+//=> ['01','02','03','04','05','06','07','08','09','10']
+```
+
+### Lists
+
+Expand lists (like Bash "sets"):
+
+```js
+console.log(braces('a/{foo,bar,baz}/*.js'));
+//=> ['a/(foo|bar|baz)/*.js']
+
+console.log(braces.expand('a/{foo,bar,baz}/*.js'));
+//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js']
+```
+
+### Sequences
+
+Expand ranges of characters (like Bash "sequences"):
+
+```js
+console.log(braces.expand('{1..3}')); // ['1', '2', '3']
+console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b']
+console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c']
+console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c']
+
+// supports zero-padded ranges
+console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b']
+console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b']
+```
+
+See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options.
+
+### Steppped ranges
+
+Steps, or increments, may be used with ranges:
+
+```js
+console.log(braces.expand('{2..10..2}'));
+//=> ['2', '4', '6', '8', '10']
+
+console.log(braces('{2..10..2}'));
+//=> ['(2|4|6|8|10)']
+```
+
+When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion.
+
+### Nesting
+
+Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved.
+
+**"Expanded" braces**
+
+```js
+console.log(braces.expand('a{b,c,/{x,y}}/e'));
+//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e']
+
+console.log(braces.expand('a/{x,{1..5},y}/c'));
+//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c']
+```
+
+**"Optimized" braces**
+
+```js
+console.log(braces('a{b,c,/{x,y}}/e'));
+//=> ['a(b|c|/(x|y))/e']
+
+console.log(braces('a/{x,{1..5},y}/c'));
+//=> ['a/(x|([1-5])|y)/c']
+```
+
+### Escaping
+
+**Escaping braces**
+
+A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_:
+
+```js
+console.log(braces.expand('a\\{d,c,b}e'));
+//=> ['a{d,c,b}e']
+
+console.log(braces.expand('a{d,c,b\\}e'));
+//=> ['a{d,c,b}e']
+```
+
+**Escaping commas**
+
+Commas inside braces may also be escaped:
+
+```js
+console.log(braces.expand('a{b\\,c}d'));
+//=> ['a{b,c}d']
+
+console.log(braces.expand('a{d\\,c,b}e'));
+//=> ['ad,ce', 'abe']
+```
+
+**Single items**
+
+Following bash conventions, a brace pattern is also not expanded when it contains a single character:
+
+```js
+console.log(braces.expand('a{b}c'));
+//=> ['a{b}c']
+```
+
+## Options
+
+### options.maxLength
+
+**Type**: `Number`
+
+**Default**: `65,536`
+
+**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera.
+
+```js
+console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error
+```
+
+### options.expand
+
+**Type**: `Boolean`
+
+**Default**: `undefined`
+
+**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing).
+
+```js
+console.log(braces('a/{b,c}/d', { expand: true }));
+//=> [ 'a/b/d', 'a/c/d' ]
+```
+
+### options.nodupes
+
+**Type**: `Boolean`
+
+**Default**: `undefined`
+
+**Description**: Remove duplicates from the returned array.
+
+### options.rangeLimit
+
+**Type**: `Number`
+
+**Default**: `1000`
+
+**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`.
+
+You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether.
+
+**Examples**
+
+```js
+// pattern exceeds the "rangeLimit", so it's optimized automatically
+console.log(braces.expand('{1..1000}'));
+//=> ['([1-9]|[1-9][0-9]{1,2}|1000)']
+
+// pattern does not exceed "rangeLimit", so it's NOT optimized
+console.log(braces.expand('{1..100}'));
+//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100']
+```
+
+### options.transform
+
+**Type**: `Function`
+
+**Default**: `undefined`
+
+**Description**: Customize range expansion.
+
+**Example: Transforming non-numeric values**
+
+```js
+const alpha = braces.expand('x/{a..e}/y', {
+ transform(value, index) {
+ // When non-numeric values are passed, "value" is a character code.
+ return 'foo/' + String.fromCharCode(value) + '-' + index;
+ }
+});
+console.log(alpha);
+//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ]
+```
+
+**Example: Transforming numeric values**
+
+```js
+const numeric = braces.expand('{1..5}', {
+ transform(value) {
+ // when numeric values are passed, "value" is a number
+ return 'foo/' + value * 2;
+ }
+});
+console.log(numeric);
+//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ]
+```
+
+### options.quantifiers
+
+**Type**: `Boolean`
+
+**Default**: `undefined`
+
+**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times.
+
+Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists)
+
+The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists.
+
+**Examples**
+
+```js
+const braces = require('braces');
+console.log(braces('a/b{1,3}/{x,y,z}'));
+//=> [ 'a/b(1|3)/(x|y|z)' ]
+console.log(braces('a/b{1,3}/{x,y,z}', {quantifiers: true}));
+//=> [ 'a/b{1,3}/(x|y|z)' ]
+console.log(braces('a/b{1,3}/{x,y,z}', {quantifiers: true, expand: true}));
+//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ]
+```
+
+### options.unescape
+
+**Type**: `Boolean`
+
+**Default**: `undefined`
+
+**Description**: Strip backslashes that were used for escaping from the result.
+
+## What is "brace expansion"?
+
+Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs).
+
+In addition to "expansion", braces are also used for matching. In other words:
+
+* [brace expansion](#brace-expansion) is for generating new lists
+* [brace matching](#brace-matching) is for filtering existing lists
+
+
+More about brace expansion (click to expand)
+
+There are two main types of brace expansion:
+
+1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}`
+2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges".
+
+Here are some example brace patterns to illustrate how they work:
+
+**Sets**
+
+```
+{a,b,c} => a b c
+{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2
+```
+
+**Sequences**
+
+```
+{1..9} => 1 2 3 4 5 6 7 8 9
+{4..-4} => 4 3 2 1 0 -1 -2 -3 -4
+{1..20..3} => 1 4 7 10 13 16 19
+{a..j} => a b c d e f g h i j
+{j..a} => j i h g f e d c b a
+{a..z..3} => a d g j m p s v y
+```
+
+**Combination**
+
+Sets and sequences can be mixed together or used along with any other strings.
+
+```
+{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3
+foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar
+```
+
+The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases.
+
+## Brace matching
+
+In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching.
+
+For example, the pattern `foo/{1..3}/bar` would match any of following strings:
+
+```
+foo/1/bar
+foo/2/bar
+foo/3/bar
+```
+
+But not:
+
+```
+baz/1/qux
+baz/2/qux
+baz/3/qux
+```
+
+Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings:
+
+```
+foo/1/bar
+foo/2/bar
+foo/3/bar
+baz/1/qux
+baz/2/qux
+baz/3/qux
+```
+
+## Brace matching pitfalls
+
+Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of.
+
+### tldr
+
+**"brace bombs"**
+
+* brace expansion can eat up a huge amount of processing resources
+* as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially
+* users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!)
+
+For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section.
+
+### The solution
+
+Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries.
+
+### Geometric complexity
+
+At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`.
+
+For example, the following sets demonstrate quadratic (`O(n^2)`) complexity:
+
+```
+{1,2}{3,4} => (2X2) => 13 14 23 24
+{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246
+```
+
+But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity:
+
+```
+{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248
+ 249 257 258 259 267 268 269 347 348 349 357
+ 358 359 367 368 369
+```
+
+Now, imagine how this complexity grows given that each element is a n-tuple:
+
+```
+{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB)
+{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB)
+```
+
+Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control.
+
+**More information**
+
+Interested in learning more about brace expansion?
+
+* [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion)
+* [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion)
+* [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product)
+
+
+
+## Performance
+
+Braces is not only screaming fast, it's also more accurate the other brace expansion libraries.
+
+### Better algorithms
+
+Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_.
+
+Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently.
+
+**The proof is in the numbers**
+
+Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively.
+
+| **Pattern** | **braces** | **[minimatch][]** |
+| --- | --- | --- |
+| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs)| N/A (freezes) |
+| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) |
+| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) |
+| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) |
+| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) |
+| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) |
+| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) |
+| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) |
+| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) |
+| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) |
+| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) |
+| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) |
+| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) |
+| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) |
+| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) |
+| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) |
+| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) |
+
+### Faster algorithms
+
+When you need expansion, braces is still much faster.
+
+_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_
+
+| **Pattern** | **braces** | **[minimatch][]** |
+| --- | --- | --- |
+| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) |
+| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) |
+| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) |
+| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) |
+| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) |
+| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) |
+| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) |
+| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) |
+
+If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js).
+
+## Benchmarks
+
+### Running benchmarks
+
+Install dev dependencies:
+
+```bash
+npm i -d && npm benchmark
+```
+
+### Latest results
+
+Braces is more accurate, without sacrificing performance.
+
+```bash
+# range (expanded)
+ braces x 29,040 ops/sec ±3.69% (91 runs sampled))
+ minimatch x 4,735 ops/sec ±1.28% (90 runs sampled)
+
+# range (optimized for regex)
+ braces x 382,878 ops/sec ±0.56% (94 runs sampled)
+ minimatch x 1,040 ops/sec ±0.44% (93 runs sampled)
+
+# nested ranges (expanded)
+ braces x 19,744 ops/sec ±2.27% (92 runs sampled))
+ minimatch x 4,579 ops/sec ±0.50% (93 runs sampled)
+
+# nested ranges (optimized for regex)
+ braces x 246,019 ops/sec ±2.02% (93 runs sampled)
+ minimatch x 1,028 ops/sec ±0.39% (94 runs sampled)
+
+# set (expanded)
+ braces x 138,641 ops/sec ±0.53% (95 runs sampled)
+ minimatch x 219,582 ops/sec ±0.98% (94 runs sampled)
+
+# set (optimized for regex)
+ braces x 388,408 ops/sec ±0.41% (95 runs sampled)
+ minimatch x 44,724 ops/sec ±0.91% (89 runs sampled)
+
+# nested sets (expanded)
+ braces x 84,966 ops/sec ±0.48% (94 runs sampled)
+ minimatch x 140,720 ops/sec ±0.37% (95 runs sampled)
+
+# nested sets (optimized for regex)
+ braces x 263,340 ops/sec ±2.06% (92 runs sampled)
+ minimatch x 28,714 ops/sec ±0.40% (90 runs sampled)
+```
+
+## About
+
+
+Contributing
+
+Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
+
+
+
+
+Running Tests
+
+Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
+
+```sh
+$ npm install && npm test
+```
+
+
+
+
+Building docs
+
+_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
+
+To generate the readme, run the following command:
+
+```sh
+$ npm install -g verbose/verb#dev verb-generate-readme && verb
+```
+
+
+
+### Contributors
+
+| **Commits** | **Contributor** |
+| --- | --- |
+| 197 | [jonschlinkert](https://github.com/jonschlinkert) |
+| 4 | [doowb](https://github.com/doowb) |
+| 1 | [es128](https://github.com/es128) |
+| 1 | [eush77](https://github.com/eush77) |
+| 1 | [hemanth](https://github.com/hemanth) |
+| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
+
+### Author
+
+**Jon Schlinkert**
+
+* [GitHub Profile](https://github.com/jonschlinkert)
+* [Twitter Profile](https://twitter.com/jonschlinkert)
+* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
+
+### License
+
+Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
+Released under the [MIT License](LICENSE).
+
+***
+
+_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._
\ No newline at end of file
diff --git a/node_modules/braces/index.js b/node_modules/braces/index.js
new file mode 100644
index 00000000..0eee0f56
--- /dev/null
+++ b/node_modules/braces/index.js
@@ -0,0 +1,170 @@
+'use strict';
+
+const stringify = require('./lib/stringify');
+const compile = require('./lib/compile');
+const expand = require('./lib/expand');
+const parse = require('./lib/parse');
+
+/**
+ * Expand the given pattern or create a regex-compatible string.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
+ * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
+ * ```
+ * @param {String} `str`
+ * @param {Object} `options`
+ * @return {String}
+ * @api public
+ */
+
+const braces = (input, options = {}) => {
+ let output = [];
+
+ if (Array.isArray(input)) {
+ for (let pattern of input) {
+ let result = braces.create(pattern, options);
+ if (Array.isArray(result)) {
+ output.push(...result);
+ } else {
+ output.push(result);
+ }
+ }
+ } else {
+ output = [].concat(braces.create(input, options));
+ }
+
+ if (options && options.expand === true && options.nodupes === true) {
+ output = [...new Set(output)];
+ }
+ return output;
+};
+
+/**
+ * Parse the given `str` with the given `options`.
+ *
+ * ```js
+ * // braces.parse(pattern, [, options]);
+ * const ast = braces.parse('a/{b,c}/d');
+ * console.log(ast);
+ * ```
+ * @param {String} pattern Brace pattern to parse
+ * @param {Object} options
+ * @return {Object} Returns an AST
+ * @api public
+ */
+
+braces.parse = (input, options = {}) => parse(input, options);
+
+/**
+ * Creates a braces string from an AST, or an AST node.
+ *
+ * ```js
+ * const braces = require('braces');
+ * let ast = braces.parse('foo/{a,b}/bar');
+ * console.log(stringify(ast.nodes[2])); //=> '{a,b}'
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.stringify = (input, options = {}) => {
+ if (typeof input === 'string') {
+ return stringify(braces.parse(input, options), options);
+ }
+ return stringify(input, options);
+};
+
+/**
+ * Compiles a brace pattern into a regex-compatible, optimized string.
+ * This method is called by the main [braces](#braces) function by default.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.compile('a/{b,c}/d'));
+ * //=> ['a/(b|c)/d']
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.compile = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+ return compile(input, options);
+};
+
+/**
+ * Expands a brace pattern into an array. This method is called by the
+ * main [braces](#braces) function when `options.expand` is true. Before
+ * using this method it's recommended that you read the [performance notes](#performance))
+ * and advantages of using [.compile](#compile) instead.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.expand('a/{b,c}/d'));
+ * //=> ['a/b/d', 'a/c/d'];
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.expand = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+
+ let result = expand(input, options);
+
+ // filter out empty strings if specified
+ if (options.noempty === true) {
+ result = result.filter(Boolean);
+ }
+
+ // filter out duplicates if specified
+ if (options.nodupes === true) {
+ result = [...new Set(result)];
+ }
+
+ return result;
+};
+
+/**
+ * Processes a brace pattern and returns either an expanded array
+ * (if `options.expand` is true), a highly optimized regex-compatible string.
+ * This method is called by the main [braces](#braces) function.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
+ * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.create = (input, options = {}) => {
+ if (input === '' || input.length < 3) {
+ return [input];
+ }
+
+ return options.expand !== true
+ ? braces.compile(input, options)
+ : braces.expand(input, options);
+};
+
+/**
+ * Expose "braces"
+ */
+
+module.exports = braces;
diff --git a/node_modules/braces/lib/compile.js b/node_modules/braces/lib/compile.js
new file mode 100644
index 00000000..3e984a4b
--- /dev/null
+++ b/node_modules/braces/lib/compile.js
@@ -0,0 +1,57 @@
+'use strict';
+
+const fill = require('fill-range');
+const utils = require('./utils');
+
+const compile = (ast, options = {}) => {
+ let walk = (node, parent = {}) => {
+ let invalidBlock = utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let invalid = invalidBlock === true || invalidNode === true;
+ let prefix = options.escapeInvalid === true ? '\\' : '';
+ let output = '';
+
+ if (node.isOpen === true) {
+ return prefix + node.value;
+ }
+ if (node.isClose === true) {
+ return prefix + node.value;
+ }
+
+ if (node.type === 'open') {
+ return invalid ? (prefix + node.value) : '(';
+ }
+
+ if (node.type === 'close') {
+ return invalid ? (prefix + node.value) : ')';
+ }
+
+ if (node.type === 'comma') {
+ return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+ let range = fill(...args, { ...options, wrap: false, toRegex: true });
+
+ if (range.length !== 0) {
+ return args.length > 1 && range.length > 1 ? `(${range})` : range;
+ }
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += walk(child, node);
+ }
+ }
+ return output;
+ };
+
+ return walk(ast);
+};
+
+module.exports = compile;
diff --git a/node_modules/braces/lib/constants.js b/node_modules/braces/lib/constants.js
new file mode 100644
index 00000000..a9379436
--- /dev/null
+++ b/node_modules/braces/lib/constants.js
@@ -0,0 +1,57 @@
+'use strict';
+
+module.exports = {
+ MAX_LENGTH: 1024 * 64,
+
+ // Digits
+ CHAR_0: '0', /* 0 */
+ CHAR_9: '9', /* 9 */
+
+ // Alphabet chars.
+ CHAR_UPPERCASE_A: 'A', /* A */
+ CHAR_LOWERCASE_A: 'a', /* a */
+ CHAR_UPPERCASE_Z: 'Z', /* Z */
+ CHAR_LOWERCASE_Z: 'z', /* z */
+
+ CHAR_LEFT_PARENTHESES: '(', /* ( */
+ CHAR_RIGHT_PARENTHESES: ')', /* ) */
+
+ CHAR_ASTERISK: '*', /* * */
+
+ // Non-alphabetic chars.
+ CHAR_AMPERSAND: '&', /* & */
+ CHAR_AT: '@', /* @ */
+ CHAR_BACKSLASH: '\\', /* \ */
+ CHAR_BACKTICK: '`', /* ` */
+ CHAR_CARRIAGE_RETURN: '\r', /* \r */
+ CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
+ CHAR_COLON: ':', /* : */
+ CHAR_COMMA: ',', /* , */
+ CHAR_DOLLAR: '$', /* . */
+ CHAR_DOT: '.', /* . */
+ CHAR_DOUBLE_QUOTE: '"', /* " */
+ CHAR_EQUAL: '=', /* = */
+ CHAR_EXCLAMATION_MARK: '!', /* ! */
+ CHAR_FORM_FEED: '\f', /* \f */
+ CHAR_FORWARD_SLASH: '/', /* / */
+ CHAR_HASH: '#', /* # */
+ CHAR_HYPHEN_MINUS: '-', /* - */
+ CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
+ CHAR_LEFT_CURLY_BRACE: '{', /* { */
+ CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
+ CHAR_LINE_FEED: '\n', /* \n */
+ CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
+ CHAR_PERCENT: '%', /* % */
+ CHAR_PLUS: '+', /* + */
+ CHAR_QUESTION_MARK: '?', /* ? */
+ CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
+ CHAR_RIGHT_CURLY_BRACE: '}', /* } */
+ CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
+ CHAR_SEMICOLON: ';', /* ; */
+ CHAR_SINGLE_QUOTE: '\'', /* ' */
+ CHAR_SPACE: ' ', /* */
+ CHAR_TAB: '\t', /* \t */
+ CHAR_UNDERSCORE: '_', /* _ */
+ CHAR_VERTICAL_LINE: '|', /* | */
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
+};
diff --git a/node_modules/braces/lib/expand.js b/node_modules/braces/lib/expand.js
new file mode 100644
index 00000000..376c748a
--- /dev/null
+++ b/node_modules/braces/lib/expand.js
@@ -0,0 +1,113 @@
+'use strict';
+
+const fill = require('fill-range');
+const stringify = require('./stringify');
+const utils = require('./utils');
+
+const append = (queue = '', stash = '', enclose = false) => {
+ let result = [];
+
+ queue = [].concat(queue);
+ stash = [].concat(stash);
+
+ if (!stash.length) return queue;
+ if (!queue.length) {
+ return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
+ }
+
+ for (let item of queue) {
+ if (Array.isArray(item)) {
+ for (let value of item) {
+ result.push(append(value, stash, enclose));
+ }
+ } else {
+ for (let ele of stash) {
+ if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
+ result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));
+ }
+ }
+ }
+ return utils.flatten(result);
+};
+
+const expand = (ast, options = {}) => {
+ let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;
+
+ let walk = (node, parent = {}) => {
+ node.queue = [];
+
+ let p = parent;
+ let q = parent.queue;
+
+ while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
+ p = p.parent;
+ q = p.queue;
+ }
+
+ if (node.invalid || node.dollar) {
+ q.push(append(q.pop(), stringify(node, options)));
+ return;
+ }
+
+ if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
+ q.push(append(q.pop(), ['{}']));
+ return;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+
+ if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
+ throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
+ }
+
+ let range = fill(...args, options);
+ if (range.length === 0) {
+ range = stringify(node, options);
+ }
+
+ q.push(append(q.pop(), range));
+ node.nodes = [];
+ return;
+ }
+
+ let enclose = utils.encloseBrace(node);
+ let queue = node.queue;
+ let block = node;
+
+ while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
+ block = block.parent;
+ queue = block.queue;
+ }
+
+ for (let i = 0; i < node.nodes.length; i++) {
+ let child = node.nodes[i];
+
+ if (child.type === 'comma' && node.type === 'brace') {
+ if (i === 1) queue.push('');
+ queue.push('');
+ continue;
+ }
+
+ if (child.type === 'close') {
+ q.push(append(q.pop(), queue, enclose));
+ continue;
+ }
+
+ if (child.value && child.type !== 'open') {
+ queue.push(append(queue.pop(), child.value));
+ continue;
+ }
+
+ if (child.nodes) {
+ walk(child, node);
+ }
+ }
+
+ return queue;
+ };
+
+ return utils.flatten(walk(ast));
+};
+
+module.exports = expand;
diff --git a/node_modules/braces/lib/parse.js b/node_modules/braces/lib/parse.js
new file mode 100644
index 00000000..145ea264
--- /dev/null
+++ b/node_modules/braces/lib/parse.js
@@ -0,0 +1,333 @@
+'use strict';
+
+const stringify = require('./stringify');
+
+/**
+ * Constants
+ */
+
+const {
+ MAX_LENGTH,
+ CHAR_BACKSLASH, /* \ */
+ CHAR_BACKTICK, /* ` */
+ CHAR_COMMA, /* , */
+ CHAR_DOT, /* . */
+ CHAR_LEFT_PARENTHESES, /* ( */
+ CHAR_RIGHT_PARENTHESES, /* ) */
+ CHAR_LEFT_CURLY_BRACE, /* { */
+ CHAR_RIGHT_CURLY_BRACE, /* } */
+ CHAR_LEFT_SQUARE_BRACKET, /* [ */
+ CHAR_RIGHT_SQUARE_BRACKET, /* ] */
+ CHAR_DOUBLE_QUOTE, /* " */
+ CHAR_SINGLE_QUOTE, /* ' */
+ CHAR_NO_BREAK_SPACE,
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE
+} = require('./constants');
+
+/**
+ * parse
+ */
+
+const parse = (input, options = {}) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected a string');
+ }
+
+ let opts = options || {};
+ let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+ if (input.length > max) {
+ throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
+ }
+
+ let ast = { type: 'root', input, nodes: [] };
+ let stack = [ast];
+ let block = ast;
+ let prev = ast;
+ let brackets = 0;
+ let length = input.length;
+ let index = 0;
+ let depth = 0;
+ let value;
+ let memo = {};
+
+ /**
+ * Helpers
+ */
+
+ const advance = () => input[index++];
+ const push = node => {
+ if (node.type === 'text' && prev.type === 'dot') {
+ prev.type = 'text';
+ }
+
+ if (prev && prev.type === 'text' && node.type === 'text') {
+ prev.value += node.value;
+ return;
+ }
+
+ block.nodes.push(node);
+ node.parent = block;
+ node.prev = prev;
+ prev = node;
+ return node;
+ };
+
+ push({ type: 'bos' });
+
+ while (index < length) {
+ block = stack[stack.length - 1];
+ value = advance();
+
+ /**
+ * Invalid chars
+ */
+
+ if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
+ continue;
+ }
+
+ /**
+ * Escaped chars
+ */
+
+ if (value === CHAR_BACKSLASH) {
+ push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
+ continue;
+ }
+
+ /**
+ * Right square bracket (literal): ']'
+ */
+
+ if (value === CHAR_RIGHT_SQUARE_BRACKET) {
+ push({ type: 'text', value: '\\' + value });
+ continue;
+ }
+
+ /**
+ * Left square bracket: '['
+ */
+
+ if (value === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+
+ let closed = true;
+ let next;
+
+ while (index < length && (next = advance())) {
+ value += next;
+
+ if (next === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+ continue;
+ }
+
+ if (next === CHAR_BACKSLASH) {
+ value += advance();
+ continue;
+ }
+
+ if (next === CHAR_RIGHT_SQUARE_BRACKET) {
+ brackets--;
+
+ if (brackets === 0) {
+ break;
+ }
+ }
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Parentheses
+ */
+
+ if (value === CHAR_LEFT_PARENTHESES) {
+ block = push({ type: 'paren', nodes: [] });
+ stack.push(block);
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (value === CHAR_RIGHT_PARENTHESES) {
+ if (block.type !== 'paren') {
+ push({ type: 'text', value });
+ continue;
+ }
+ block = stack.pop();
+ push({ type: 'text', value });
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Quotes: '|"|`
+ */
+
+ if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
+ let open = value;
+ let next;
+
+ if (options.keepQuotes !== true) {
+ value = '';
+ }
+
+ while (index < length && (next = advance())) {
+ if (next === CHAR_BACKSLASH) {
+ value += next + advance();
+ continue;
+ }
+
+ if (next === open) {
+ if (options.keepQuotes === true) value += next;
+ break;
+ }
+
+ value += next;
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Left curly brace: '{'
+ */
+
+ if (value === CHAR_LEFT_CURLY_BRACE) {
+ depth++;
+
+ let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
+ let brace = {
+ type: 'brace',
+ open: true,
+ close: false,
+ dollar,
+ depth,
+ commas: 0,
+ ranges: 0,
+ nodes: []
+ };
+
+ block = push(brace);
+ stack.push(block);
+ push({ type: 'open', value });
+ continue;
+ }
+
+ /**
+ * Right curly brace: '}'
+ */
+
+ if (value === CHAR_RIGHT_CURLY_BRACE) {
+ if (block.type !== 'brace') {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ let type = 'close';
+ block = stack.pop();
+ block.close = true;
+
+ push({ type, value });
+ depth--;
+
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Comma: ','
+ */
+
+ if (value === CHAR_COMMA && depth > 0) {
+ if (block.ranges > 0) {
+ block.ranges = 0;
+ let open = block.nodes.shift();
+ block.nodes = [open, { type: 'text', value: stringify(block) }];
+ }
+
+ push({ type: 'comma', value });
+ block.commas++;
+ continue;
+ }
+
+ /**
+ * Dot: '.'
+ */
+
+ if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
+ let siblings = block.nodes;
+
+ if (depth === 0 || siblings.length === 0) {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (prev.type === 'dot') {
+ block.range = [];
+ prev.value += value;
+ prev.type = 'range';
+
+ if (block.nodes.length !== 3 && block.nodes.length !== 5) {
+ block.invalid = true;
+ block.ranges = 0;
+ prev.type = 'text';
+ continue;
+ }
+
+ block.ranges++;
+ block.args = [];
+ continue;
+ }
+
+ if (prev.type === 'range') {
+ siblings.pop();
+
+ let before = siblings[siblings.length - 1];
+ before.value += prev.value + value;
+ prev = before;
+ block.ranges--;
+ continue;
+ }
+
+ push({ type: 'dot', value });
+ continue;
+ }
+
+ /**
+ * Text
+ */
+
+ push({ type: 'text', value });
+ }
+
+ // Mark imbalanced braces and brackets as invalid
+ do {
+ block = stack.pop();
+
+ if (block.type !== 'root') {
+ block.nodes.forEach(node => {
+ if (!node.nodes) {
+ if (node.type === 'open') node.isOpen = true;
+ if (node.type === 'close') node.isClose = true;
+ if (!node.nodes) node.type = 'text';
+ node.invalid = true;
+ }
+ });
+
+ // get the location of the block on parent.nodes (block's siblings)
+ let parent = stack[stack.length - 1];
+ let index = parent.nodes.indexOf(block);
+ // replace the (invalid) block with it's nodes
+ parent.nodes.splice(index, 1, ...block.nodes);
+ }
+ } while (stack.length > 0);
+
+ push({ type: 'eos' });
+ return ast;
+};
+
+module.exports = parse;
diff --git a/node_modules/braces/lib/stringify.js b/node_modules/braces/lib/stringify.js
new file mode 100644
index 00000000..414b7bcc
--- /dev/null
+++ b/node_modules/braces/lib/stringify.js
@@ -0,0 +1,32 @@
+'use strict';
+
+const utils = require('./utils');
+
+module.exports = (ast, options = {}) => {
+ let stringify = (node, parent = {}) => {
+ let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let output = '';
+
+ if (node.value) {
+ if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
+ return '\\' + node.value;
+ }
+ return node.value;
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += stringify(child);
+ }
+ }
+ return output;
+ };
+
+ return stringify(ast);
+};
+
diff --git a/node_modules/braces/lib/utils.js b/node_modules/braces/lib/utils.js
new file mode 100644
index 00000000..e3551a67
--- /dev/null
+++ b/node_modules/braces/lib/utils.js
@@ -0,0 +1,112 @@
+'use strict';
+
+exports.isInteger = num => {
+ if (typeof num === 'number') {
+ return Number.isInteger(num);
+ }
+ if (typeof num === 'string' && num.trim() !== '') {
+ return Number.isInteger(Number(num));
+ }
+ return false;
+};
+
+/**
+ * Find a node of the given type
+ */
+
+exports.find = (node, type) => node.nodes.find(node => node.type === type);
+
+/**
+ * Find a node of the given type
+ */
+
+exports.exceedsLimit = (min, max, step = 1, limit) => {
+ if (limit === false) return false;
+ if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
+ return ((Number(max) - Number(min)) / Number(step)) >= limit;
+};
+
+/**
+ * Escape the given node with '\\' before node.value
+ */
+
+exports.escapeNode = (block, n = 0, type) => {
+ let node = block.nodes[n];
+ if (!node) return;
+
+ if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
+ if (node.escaped !== true) {
+ node.value = '\\' + node.value;
+ node.escaped = true;
+ }
+ }
+};
+
+/**
+ * Returns true if the given brace node should be enclosed in literal braces
+ */
+
+exports.encloseBrace = node => {
+ if (node.type !== 'brace') return false;
+ if ((node.commas >> 0 + node.ranges >> 0) === 0) {
+ node.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a brace node is invalid.
+ */
+
+exports.isInvalidBrace = block => {
+ if (block.type !== 'brace') return false;
+ if (block.invalid === true || block.dollar) return true;
+ if ((block.commas >> 0 + block.ranges >> 0) === 0) {
+ block.invalid = true;
+ return true;
+ }
+ if (block.open !== true || block.close !== true) {
+ block.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a node is an open or close node
+ */
+
+exports.isOpenOrClose = node => {
+ if (node.type === 'open' || node.type === 'close') {
+ return true;
+ }
+ return node.open === true || node.close === true;
+};
+
+/**
+ * Reduce an array of text nodes.
+ */
+
+exports.reduce = nodes => nodes.reduce((acc, node) => {
+ if (node.type === 'text') acc.push(node.value);
+ if (node.type === 'range') node.type = 'text';
+ return acc;
+}, []);
+
+/**
+ * Flatten an array
+ */
+
+exports.flatten = (...args) => {
+ const result = [];
+ const flat = arr => {
+ for (let i = 0; i < arr.length; i++) {
+ let ele = arr[i];
+ Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);
+ }
+ return result;
+ };
+ flat(args);
+ return result;
+};
diff --git a/node_modules/braces/package.json b/node_modules/braces/package.json
new file mode 100644
index 00000000..3f52e346
--- /dev/null
+++ b/node_modules/braces/package.json
@@ -0,0 +1,77 @@
+{
+ "name": "braces",
+ "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.",
+ "version": "3.0.2",
+ "homepage": "https://github.com/micromatch/braces",
+ "author": "Jon Schlinkert (https://github.com/jonschlinkert)",
+ "contributors": [
+ "Brian Woodward (https://twitter.com/doowb)",
+ "Elan Shanker (https://github.com/es128)",
+ "Eugene Sharygin (https://github.com/eush77)",
+ "hemanth.hm (http://h3manth.com)",
+ "Jon Schlinkert (http://twitter.com/jonschlinkert)"
+ ],
+ "repository": "micromatch/braces",
+ "bugs": {
+ "url": "https://github.com/micromatch/braces/issues"
+ },
+ "license": "MIT",
+ "files": [
+ "index.js",
+ "lib"
+ ],
+ "main": "index.js",
+ "engines": {
+ "node": ">=8"
+ },
+ "scripts": {
+ "test": "mocha",
+ "benchmark": "node benchmark"
+ },
+ "dependencies": {
+ "fill-range": "^7.0.1"
+ },
+ "devDependencies": {
+ "ansi-colors": "^3.2.4",
+ "bash-path": "^2.0.1",
+ "gulp-format-md": "^2.0.0",
+ "mocha": "^6.1.1"
+ },
+ "keywords": [
+ "alpha",
+ "alphabetical",
+ "bash",
+ "brace",
+ "braces",
+ "expand",
+ "expansion",
+ "filepath",
+ "fill",
+ "fs",
+ "glob",
+ "globbing",
+ "letter",
+ "match",
+ "matches",
+ "matching",
+ "number",
+ "numerical",
+ "path",
+ "range",
+ "ranges",
+ "sh"
+ ],
+ "verb": {
+ "toc": false,
+ "layout": "default",
+ "tasks": [
+ "readme"
+ ],
+ "lint": {
+ "reflinks": true
+ },
+ "plugins": [
+ "gulp-format-md"
+ ]
+ }
+}
diff --git a/node_modules/buffer-alloc-unsafe/index.js b/node_modules/buffer-alloc-unsafe/index.js
new file mode 100644
index 00000000..0bd335ff
--- /dev/null
+++ b/node_modules/buffer-alloc-unsafe/index.js
@@ -0,0 +1,17 @@
+function allocUnsafe (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('"size" argument must be a number')
+ }
+
+ if (size < 0) {
+ throw new RangeError('"size" argument must not be negative')
+ }
+
+ if (Buffer.allocUnsafe) {
+ return Buffer.allocUnsafe(size)
+ } else {
+ return new Buffer(size)
+ }
+}
+
+module.exports = allocUnsafe
diff --git a/node_modules/buffer-alloc-unsafe/package.json b/node_modules/buffer-alloc-unsafe/package.json
new file mode 100644
index 00000000..c2ab9042
--- /dev/null
+++ b/node_modules/buffer-alloc-unsafe/package.json
@@ -0,0 +1,24 @@
+{
+ "name": "buffer-alloc-unsafe",
+ "version": "1.1.0",
+ "license": "MIT",
+ "repository": "LinusU/buffer-alloc-unsafe",
+ "files": [
+ "index.js"
+ ],
+ "scripts": {
+ "test": "standard && node test"
+ },
+ "devDependencies": {
+ "standard": "^7.1.2"
+ },
+ "keywords": [
+ "allocUnsafe",
+ "allocate",
+ "buffer allocUnsafe",
+ "buffer unsafe allocate",
+ "buffer",
+ "ponyfill",
+ "unsafe allocate"
+ ]
+}
diff --git a/node_modules/buffer-alloc-unsafe/readme.md b/node_modules/buffer-alloc-unsafe/readme.md
new file mode 100644
index 00000000..8725ecf6
--- /dev/null
+++ b/node_modules/buffer-alloc-unsafe/readme.md
@@ -0,0 +1,46 @@
+# Buffer Alloc Unsafe
+
+A [ponyfill](https://ponyfill.com) for `Buffer.allocUnsafe`.
+
+Works as Node.js: `v7.0.0`
+Works on Node.js: `v0.10.0`
+
+## Installation
+
+```sh
+npm install --save buffer-alloc-unsafe
+```
+
+## Usage
+
+```js
+const allocUnsafe = require('buffer-alloc-unsafe')
+
+console.log(allocUnsafe(10))
+//=>
+
+console.log(allocUnsafe(10))
+//=>
+
+console.log(allocUnsafe(10))
+//=>
+
+allocUnsafe(-10)
+//=> RangeError: "size" argument must not be negative
+```
+
+## API
+
+### allocUnsafe(size)
+
+- `size` <Integer> The desired length of the new `Buffer`
+
+Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must be
+less than or equal to the value of `buffer.kMaxLength` and greater than or equal
+to zero. Otherwise, a `RangeError` is thrown.
+
+## See also
+
+- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc`
+- [buffer-fill](https://github.com/LinusU/buffer-fill) A ponyfill for `Buffer.fill`
+- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from`
diff --git a/node_modules/buffer-alloc/index.js b/node_modules/buffer-alloc/index.js
new file mode 100644
index 00000000..fe658606
--- /dev/null
+++ b/node_modules/buffer-alloc/index.js
@@ -0,0 +1,32 @@
+var bufferFill = require('buffer-fill')
+var allocUnsafe = require('buffer-alloc-unsafe')
+
+module.exports = function alloc (size, fill, encoding) {
+ if (typeof size !== 'number') {
+ throw new TypeError('"size" argument must be a number')
+ }
+
+ if (size < 0) {
+ throw new RangeError('"size" argument must not be negative')
+ }
+
+ if (Buffer.alloc) {
+ return Buffer.alloc(size, fill, encoding)
+ }
+
+ var buffer = allocUnsafe(size)
+
+ if (size === 0) {
+ return buffer
+ }
+
+ if (fill === undefined) {
+ return bufferFill(buffer, 0)
+ }
+
+ if (typeof encoding !== 'string') {
+ encoding = undefined
+ }
+
+ return bufferFill(buffer, fill, encoding)
+}
diff --git a/node_modules/buffer-alloc/package.json b/node_modules/buffer-alloc/package.json
new file mode 100644
index 00000000..a8a3c3ac
--- /dev/null
+++ b/node_modules/buffer-alloc/package.json
@@ -0,0 +1,26 @@
+{
+ "name": "buffer-alloc",
+ "version": "1.2.0",
+ "license": "MIT",
+ "repository": "LinusU/buffer-alloc",
+ "files": [
+ "index.js"
+ ],
+ "scripts": {
+ "test": "standard && node test"
+ },
+ "dependencies": {
+ "buffer-alloc-unsafe": "^1.1.0",
+ "buffer-fill": "^1.0.0"
+ },
+ "devDependencies": {
+ "standard": "^7.1.2"
+ },
+ "keywords": [
+ "alloc",
+ "allocate",
+ "buffer alloc",
+ "buffer allocate",
+ "buffer"
+ ]
+}
diff --git a/node_modules/buffer-alloc/readme.md b/node_modules/buffer-alloc/readme.md
new file mode 100644
index 00000000..80c7d7bb
--- /dev/null
+++ b/node_modules/buffer-alloc/readme.md
@@ -0,0 +1,43 @@
+# Buffer Alloc
+
+A [ponyfill](https://ponyfill.com) for `Buffer.alloc`.
+
+Works as Node.js: `v7.0.0`
+Works on Node.js: `v0.10.0`
+
+## Installation
+
+```sh
+npm install --save buffer-alloc
+```
+
+## Usage
+
+```js
+const alloc = require('buffer-alloc')
+
+console.log(alloc(4))
+//=>
+
+console.log(alloc(6, 0x41))
+//=>
+
+console.log(alloc(10, 'linus', 'utf8'))
+//=>
+```
+
+## API
+
+### alloc(size[, fill[, encoding]])
+
+- `size` <Integer> The desired length of the new `Buffer`
+- `fill` <String> | <Buffer> | <Integer> A value to pre-fill the new `Buffer` with. **Default:** `0`
+- `encoding` <String> If `fill` is a string, this is its encoding. **Default:** `'utf8'`
+
+Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the `Buffer` will be zero-filled.
+
+## See also
+
+- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe`
+- [buffer-fill](https://github.com/LinusU/buffer-fill) A ponyfill for `Buffer.fill`
+- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from`
diff --git a/node_modules/buffer-crc32/LICENSE b/node_modules/buffer-crc32/LICENSE
new file mode 100644
index 00000000..4cef10eb
--- /dev/null
+++ b/node_modules/buffer-crc32/LICENSE
@@ -0,0 +1,19 @@
+The MIT License
+
+Copyright (c) 2013 Brian J. Brennan
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
+Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/buffer-crc32/README.md b/node_modules/buffer-crc32/README.md
new file mode 100644
index 00000000..0d9d8b83
--- /dev/null
+++ b/node_modules/buffer-crc32/README.md
@@ -0,0 +1,47 @@
+# buffer-crc32
+
+[](http://travis-ci.org/brianloveswords/buffer-crc32)
+
+crc32 that works with binary data and fancy character sets, outputs
+buffer, signed or unsigned data and has tests.
+
+Derived from the sample CRC implementation in the PNG specification: http://www.w3.org/TR/PNG/#D-CRCAppendix
+
+# install
+```
+npm install buffer-crc32
+```
+
+# example
+```js
+var crc32 = require('buffer-crc32');
+// works with buffers
+var buf = Buffer([0x00, 0x73, 0x75, 0x70, 0x20, 0x62, 0x72, 0x6f, 0x00])
+crc32(buf) // ->
+
+// has convenience methods for getting signed or unsigned ints
+crc32.signed(buf) // -> -1805997238
+crc32.unsigned(buf) // -> 2488970058
+
+// will cast to buffer if given a string, so you can
+// directly use foreign characters safely
+crc32('自動販売機') // ->
+
+// and works in append mode too
+var partialCrc = crc32('hey');
+var partialCrc = crc32(' ', partialCrc);
+var partialCrc = crc32('sup', partialCrc);
+var partialCrc = crc32(' ', partialCrc);
+var finalCrc = crc32('bros', partialCrc); // ->
+```
+
+# tests
+This was tested against the output of zlib's crc32 method. You can run
+the tests with`npm test` (requires tap)
+
+# see also
+https://github.com/alexgorbatchev/node-crc, `crc.buffer.crc32` also
+supports buffer inputs and return unsigned ints (thanks @tjholowaychuk).
+
+# license
+MIT/X11
diff --git a/node_modules/buffer-crc32/index.js b/node_modules/buffer-crc32/index.js
new file mode 100644
index 00000000..6727dd39
--- /dev/null
+++ b/node_modules/buffer-crc32/index.js
@@ -0,0 +1,111 @@
+var Buffer = require('buffer').Buffer;
+
+var CRC_TABLE = [
+ 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
+ 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
+ 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
+ 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
+ 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
+ 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
+ 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
+ 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
+ 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
+ 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
+ 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
+ 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
+ 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
+ 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
+ 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
+ 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
+ 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
+ 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
+ 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
+ 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
+ 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
+ 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
+ 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
+ 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
+ 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
+ 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
+ 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
+ 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
+ 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
+ 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
+ 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
+ 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
+ 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
+ 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
+ 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
+ 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
+ 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
+ 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
+ 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
+ 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
+ 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
+ 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
+ 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
+ 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
+ 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
+ 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
+ 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
+ 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
+ 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
+ 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
+ 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
+ 0x2d02ef8d
+];
+
+if (typeof Int32Array !== 'undefined') {
+ CRC_TABLE = new Int32Array(CRC_TABLE);
+}
+
+function ensureBuffer(input) {
+ if (Buffer.isBuffer(input)) {
+ return input;
+ }
+
+ var hasNewBufferAPI =
+ typeof Buffer.alloc === "function" &&
+ typeof Buffer.from === "function";
+
+ if (typeof input === "number") {
+ return hasNewBufferAPI ? Buffer.alloc(input) : new Buffer(input);
+ }
+ else if (typeof input === "string") {
+ return hasNewBufferAPI ? Buffer.from(input) : new Buffer(input);
+ }
+ else {
+ throw new Error("input must be buffer, number, or string, received " +
+ typeof input);
+ }
+}
+
+function bufferizeInt(num) {
+ var tmp = ensureBuffer(4);
+ tmp.writeInt32BE(num, 0);
+ return tmp;
+}
+
+function _crc32(buf, previous) {
+ buf = ensureBuffer(buf);
+ if (Buffer.isBuffer(previous)) {
+ previous = previous.readUInt32BE(0);
+ }
+ var crc = ~~previous ^ -1;
+ for (var n = 0; n < buf.length; n++) {
+ crc = CRC_TABLE[(crc ^ buf[n]) & 0xff] ^ (crc >>> 8);
+ }
+ return (crc ^ -1);
+}
+
+function crc32() {
+ return bufferizeInt(_crc32.apply(null, arguments));
+}
+crc32.signed = function () {
+ return _crc32.apply(null, arguments);
+};
+crc32.unsigned = function () {
+ return _crc32.apply(null, arguments) >>> 0;
+};
+
+module.exports = crc32;
diff --git a/node_modules/buffer-crc32/package.json b/node_modules/buffer-crc32/package.json
new file mode 100644
index 00000000..e896bec5
--- /dev/null
+++ b/node_modules/buffer-crc32/package.json
@@ -0,0 +1,39 @@
+{
+ "author": "Brian J. Brennan ",
+ "name": "buffer-crc32",
+ "description": "A pure javascript CRC32 algorithm that plays nice with binary data",
+ "version": "0.2.13",
+ "licenses": [
+ {
+ "type": "MIT",
+ "url": "https://github.com/brianloveswords/buffer-crc32/raw/master/LICENSE"
+ }
+ ],
+ "contributors": [
+ {
+ "name": "Vladimir Kuznetsov",
+ "github": "mistakster"
+ }
+ ],
+ "homepage": "https://github.com/brianloveswords/buffer-crc32",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/brianloveswords/buffer-crc32.git"
+ },
+ "main": "index.js",
+ "scripts": {
+ "test": "./node_modules/.bin/tap tests/*.test.js"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "~0.2.5"
+ },
+ "optionalDependencies": {},
+ "engines": {
+ "node": "*"
+ },
+ "license": "MIT",
+ "files": [
+ "index.js"
+ ]
+}
diff --git a/node_modules/buffer-fill/index.js b/node_modules/buffer-fill/index.js
new file mode 100644
index 00000000..428a9e1f
--- /dev/null
+++ b/node_modules/buffer-fill/index.js
@@ -0,0 +1,113 @@
+/* Node.js 6.4.0 and up has full support */
+var hasFullSupport = (function () {
+ try {
+ if (!Buffer.isEncoding('latin1')) {
+ return false
+ }
+
+ var buf = Buffer.alloc ? Buffer.alloc(4) : new Buffer(4)
+
+ buf.fill('ab', 'ucs2')
+
+ return (buf.toString('hex') === '61006200')
+ } catch (_) {
+ return false
+ }
+}())
+
+function isSingleByte (val) {
+ return (val.length === 1 && val.charCodeAt(0) < 256)
+}
+
+function fillWithNumber (buffer, val, start, end) {
+ if (start < 0 || end > buffer.length) {
+ throw new RangeError('Out of range index')
+ }
+
+ start = start >>> 0
+ end = end === undefined ? buffer.length : end >>> 0
+
+ if (end > start) {
+ buffer.fill(val, start, end)
+ }
+
+ return buffer
+}
+
+function fillWithBuffer (buffer, val, start, end) {
+ if (start < 0 || end > buffer.length) {
+ throw new RangeError('Out of range index')
+ }
+
+ if (end <= start) {
+ return buffer
+ }
+
+ start = start >>> 0
+ end = end === undefined ? buffer.length : end >>> 0
+
+ var pos = start
+ var len = val.length
+ while (pos <= (end - len)) {
+ val.copy(buffer, pos)
+ pos += len
+ }
+
+ if (pos !== end) {
+ val.copy(buffer, pos, 0, end - pos)
+ }
+
+ return buffer
+}
+
+function fill (buffer, val, start, end, encoding) {
+ if (hasFullSupport) {
+ return buffer.fill(val, start, end, encoding)
+ }
+
+ if (typeof val === 'number') {
+ return fillWithNumber(buffer, val, start, end)
+ }
+
+ if (typeof val === 'string') {
+ if (typeof start === 'string') {
+ encoding = start
+ start = 0
+ end = buffer.length
+ } else if (typeof end === 'string') {
+ encoding = end
+ end = buffer.length
+ }
+
+ if (encoding !== undefined && typeof encoding !== 'string') {
+ throw new TypeError('encoding must be a string')
+ }
+
+ if (encoding === 'latin1') {
+ encoding = 'binary'
+ }
+
+ if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
+ throw new TypeError('Unknown encoding: ' + encoding)
+ }
+
+ if (val === '') {
+ return fillWithNumber(buffer, 0, start, end)
+ }
+
+ if (isSingleByte(val)) {
+ return fillWithNumber(buffer, val.charCodeAt(0), start, end)
+ }
+
+ val = new Buffer(val, encoding)
+ }
+
+ if (Buffer.isBuffer(val)) {
+ return fillWithBuffer(buffer, val, start, end)
+ }
+
+ // Other values (e.g. undefined, boolean, object) results in zero-fill
+ return fillWithNumber(buffer, 0, start, end)
+}
+
+module.exports = fill
diff --git a/node_modules/buffer-fill/package.json b/node_modules/buffer-fill/package.json
new file mode 100644
index 00000000..b8f67c58
--- /dev/null
+++ b/node_modules/buffer-fill/package.json
@@ -0,0 +1,16 @@
+{
+ "name": "buffer-fill",
+ "version": "1.0.0",
+ "license": "MIT",
+ "repository": "LinusU/buffer-fill",
+ "files": [
+ "index.js"
+ ],
+ "scripts": {
+ "test": "standard && node test"
+ },
+ "devDependencies": {
+ "buffer-alloc-unsafe": "^1.1.0",
+ "standard": "^7.1.2"
+ }
+}
diff --git a/node_modules/buffer-fill/readme.md b/node_modules/buffer-fill/readme.md
new file mode 100644
index 00000000..ac307383
--- /dev/null
+++ b/node_modules/buffer-fill/readme.md
@@ -0,0 +1,54 @@
+# Buffer Fill
+
+A [ponyfill](https://ponyfill.com) for `Buffer.fill`.
+
+Works as Node.js: `v6.4.0`
+Works on Node.js: `v0.10.0`
+
+## Installation
+
+```sh
+npm install --save buffer-fill
+```
+
+## Usage
+
+```js
+const fill = require('buffer-fill')
+const buf = Buffer.allocUnsafe(5)
+
+console.log(buf.fill(8))
+//=>
+
+console.log(buf.fill(9, 2, 4))
+//=>
+
+console.log(buf.fill('linus', 'latin1'))
+//=>
+
+console.log(buf.fill('\u0222'))
+//=>
+```
+
+## API
+
+### fill(buf, value[, offset[, end]][, encoding])
+
+- `value` <String> | <Buffer> | <Integer> The value to fill `buf` with
+- `offset` <Integer> Where to start filling `buf`. **Default:** `0`
+- `end` <Integer> Where to stop filling `buf` (not inclusive). **Default:** `buf.length`
+- `encoding` <String> If `value` is a string, this is its encoding. **Default:** `'utf8'`
+- Return: <Buffer> A reference to `buf`
+
+Fills `buf` with the specified `value`. If the `offset` and `end` are not given,
+the entire `buf` will be filled. This is meant to be a small simplification to
+allow the creation and filling of a `Buffer` to be done on a single line.
+
+If the final write of a `fill()` operation falls on a multi-byte character, then
+only the first bytes of that character that fit into `buf` are written.
+
+## See also
+
+- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe`
+- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc`
+- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from`
diff --git a/node_modules/buffer/AUTHORS.md b/node_modules/buffer/AUTHORS.md
new file mode 100644
index 00000000..22eb1712
--- /dev/null
+++ b/node_modules/buffer/AUTHORS.md
@@ -0,0 +1,70 @@
+# Authors
+
+#### Ordered by first contribution.
+
+- Romain Beauxis (toots@rastageeks.org)
+- Tobias Koppers (tobias.koppers@googlemail.com)
+- Janus (ysangkok@gmail.com)
+- Rainer Dreyer (rdrey1@gmail.com)
+- Tõnis Tiigi (tonistiigi@gmail.com)
+- James Halliday (mail@substack.net)
+- Michael Williamson (mike@zwobble.org)
+- elliottcable (github@elliottcable.name)
+- rafael (rvalle@livelens.net)
+- Andrew Kelley (superjoe30@gmail.com)
+- Andreas Madsen (amwebdk@gmail.com)
+- Mike Brevoort (mike.brevoort@pearson.com)
+- Brian White (mscdex@mscdex.net)
+- Feross Aboukhadijeh (feross@feross.org)
+- Ruben Verborgh (ruben@verborgh.org)
+- eliang (eliang.cs@gmail.com)
+- Jesse Tane (jesse.tane@gmail.com)
+- Alfonso Boza (alfonso@cloud.com)
+- Mathias Buus (mathiasbuus@gmail.com)
+- Devon Govett (devongovett@gmail.com)
+- Daniel Cousens (github@dcousens.com)
+- Joseph Dykstra (josephdykstra@gmail.com)
+- Parsha Pourkhomami (parshap+git@gmail.com)
+- Damjan Košir (damjan.kosir@gmail.com)
+- daverayment (dave.rayment@gmail.com)
+- kawanet (u-suke@kawa.net)
+- Linus Unnebäck (linus@folkdatorn.se)
+- Nolan Lawson (nolan.lawson@gmail.com)
+- Calvin Metcalf (calvin.metcalf@gmail.com)
+- Koki Takahashi (hakatasiloving@gmail.com)
+- Guy Bedford (guybedford@gmail.com)
+- Jan Schär (jscissr@gmail.com)
+- RaulTsc (tomescu.raul@gmail.com)
+- Matthieu Monsch (monsch@alum.mit.edu)
+- Dan Ehrenberg (littledan@chromium.org)
+- Kirill Fomichev (fanatid@ya.ru)
+- Yusuke Kawasaki (u-suke@kawa.net)
+- DC (dcposch@dcpos.ch)
+- John-David Dalton (john.david.dalton@gmail.com)
+- adventure-yunfei (adventure030@gmail.com)
+- Emil Bay (github@tixz.dk)
+- Sam Sudar (sudar.sam@gmail.com)
+- Volker Mische (volker.mische@gmail.com)
+- David Walton (support@geekstocks.com)
+- Сковорода Никита Андреевич (chalkerx@gmail.com)
+- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com)
+- ukstv (sergey.ukustov@machinomy.com)
+- Renée Kooi (renee@kooi.me)
+- ranbochen (ranbochen@qq.com)
+- Vladimir Borovik (bobahbdb@gmail.com)
+- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com)
+- kumavis (aaron@kumavis.me)
+- Sergey Ukustov (sergey.ukustov@machinomy.com)
+- Fei Liu (liu.feiwood@gmail.com)
+- Blaine Bublitz (blaine.bublitz@gmail.com)
+- clement (clement@seald.io)
+- Koushik Dutta (koushd@gmail.com)
+- Jordan Harband (ljharb@gmail.com)
+- Niklas Mischkulnig (mischnic@users.noreply.github.com)
+- Nikolai Vavilov (vvnicholas@gmail.com)
+- Fedor Nezhivoi (gyzerok@users.noreply.github.com)
+- Peter Newman (peternewman@users.noreply.github.com)
+- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com)
+- jkkang (jkkang@smartauth.kr)
+
+#### Generated by bin/update-authors.sh.
diff --git a/node_modules/buffer/LICENSE b/node_modules/buffer/LICENSE
new file mode 100644
index 00000000..d6bf75dc
--- /dev/null
+++ b/node_modules/buffer/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Feross Aboukhadijeh, and other contributors.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/buffer/README.md b/node_modules/buffer/README.md
new file mode 100644
index 00000000..9a23d7cf
--- /dev/null
+++ b/node_modules/buffer/README.md
@@ -0,0 +1,410 @@
+# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
+
+[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg
+[travis-url]: https://travis-ci.org/feross/buffer
+[npm-image]: https://img.shields.io/npm/v/buffer.svg
+[npm-url]: https://npmjs.org/package/buffer
+[downloads-image]: https://img.shields.io/npm/dm/buffer.svg
+[downloads-url]: https://npmjs.org/package/buffer
+[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
+[standard-url]: https://standardjs.com
+
+#### The buffer module from [node.js](https://nodejs.org/), for the browser.
+
+[![saucelabs][saucelabs-image]][saucelabs-url]
+
+[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg
+[saucelabs-url]: https://saucelabs.com/u/buffer
+
+With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module.
+
+The goal is to provide an API that is 100% identical to
+[node's Buffer API](https://nodejs.org/api/buffer.html). Read the
+[official docs](https://nodejs.org/api/buffer.html) for the full list of properties,
+instance methods, and class methods that are supported.
+
+## features
+
+- Manipulate binary data like a boss, in all browsers!
+- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`)
+- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments)
+- Excellent browser support (Chrome, Firefox, Edge, Safari 9+, IE 11, iOS 9+, Android, etc.)
+- Preserves Node API exactly, with one minor difference (see below)
+- Square-bracket `buf[4]` notation works!
+- Does not modify any browser prototypes or put anything on `window`
+- Comprehensive test suite (including all buffer tests from node.js core)
+
+## install
+
+To use this module directly (without browserify), install it:
+
+```bash
+npm install buffer
+```
+
+This module was previously called **native-buffer-browserify**, but please use **buffer**
+from now on.
+
+If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer).
+
+## usage
+
+The module's API is identical to node's `Buffer` API. Read the
+[official docs](https://nodejs.org/api/buffer.html) for the full list of properties,
+instance methods, and class methods that are supported.
+
+As mentioned above, `require('buffer')` or use the `Buffer` global with
+[browserify](http://browserify.org) and this module will automatically be included
+in your bundle. Almost any npm module will work in the browser, even if it assumes that
+the node `Buffer` API will be available.
+
+To depend on this module explicitly (without browserify), require it like this:
+
+```js
+var Buffer = require('buffer/').Buffer // note: the trailing slash is important!
+```
+
+To require this module explicitly, use `require('buffer/')` which tells the node.js module
+lookup algorithm (also used by browserify) to use the **npm module** named `buffer`
+instead of the **node.js core** module named `buffer`!
+
+
+## how does it work?
+
+The Buffer constructor returns instances of `Uint8Array` that have their prototype
+changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`,
+so the returned instances will have all the node `Buffer` methods and the
+`Uint8Array` methods. Square bracket notation works as expected -- it returns a
+single octet.
+
+The `Uint8Array` prototype remains unmodified.
+
+
+## tracking the latest node api
+
+This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer
+API is considered **stable** in the
+[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index),
+so it is unlikely that there will ever be breaking changes.
+Nonetheless, when/if the Buffer API changes in node, this module's API will change
+accordingly.
+
+## related packages
+
+- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer
+- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer
+- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package
+
+## conversion packages
+
+### convert typed array to buffer
+
+Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast.
+
+### convert buffer to typed array
+
+`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`.
+
+### convert blob to buffer
+
+Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`.
+
+### convert buffer to blob
+
+To convert a `Buffer` to a `Blob`, use the `Blob` constructor:
+
+```js
+var blob = new Blob([ buffer ])
+```
+
+Optionally, specify a mimetype:
+
+```js
+var blob = new Blob([ buffer ], { type: 'text/html' })
+```
+
+### convert arraybuffer to buffer
+
+To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast.
+
+```js
+var buffer = Buffer.from(arrayBuffer)
+```
+
+### convert buffer to arraybuffer
+
+To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects):
+
+```js
+var arrayBuffer = buffer.buffer.slice(
+ buffer.byteOffset, buffer.byteOffset + buffer.byteLength
+)
+```
+
+Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module.
+
+## performance
+
+See perf tests in `/perf`.
+
+`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a
+sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will
+always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module,
+which is included to compare against.
+
+NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README.
+
+### Chrome 38
+
+| Method | Operations | Accuracy | Sampled | Fastest |
+|:-------|:-----------|:---------|:--------|:-------:|
+| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ |
+| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | |
+| | | | |
+| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | |
+| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ |
+| | | | |
+| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | |
+| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ |
+| | | | |
+| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | |
+| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ |
+| | | | |
+| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | |
+| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ |
+| | | | |
+| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | |
+| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ |
+| | | | |
+| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ |
+| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | |
+| | | | |
+| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ |
+| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | |
+| | | | |
+| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ |
+| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | |
+| | | | |
+| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | |
+| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ |
+| | | | |
+| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | |
+| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ |
+
+
+### Firefox 33
+
+| Method | Operations | Accuracy | Sampled | Fastest |
+|:-------|:-----------|:---------|:--------|:-------:|
+| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | |
+| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ |
+| | | | |
+| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | |
+| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ |
+| | | | |
+| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | |
+| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ |
+| | | | |
+| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | |
+| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ |
+| | | | |
+| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | |
+| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ |
+| | | | |
+| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | |
+| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ |
+| | | | |
+| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | |
+| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ |
+| | | | |
+| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | |
+| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ |
+| | | | |
+| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ |
+| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | |
+| | | | |
+| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | |
+| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ |
+| | | | |
+| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | |
+| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ |
+
+### Safari 8
+
+| Method | Operations | Accuracy | Sampled | Fastest |
+|:-------|:-----------|:---------|:--------|:-------:|
+| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ |
+| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | |
+| | | | |
+| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | |
+| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ |
+| | | | |
+| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | |
+| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ |
+| | | | |
+| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | |
+| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ |
+| | | | |
+| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | |
+| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ |
+| | | | |
+| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | |
+| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ |
+| | | | |
+| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | |
+| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ |
+| | | | |
+| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | |
+| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ |
+| | | | |
+| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | |
+| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ |
+| | | | |
+| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | |
+| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ |
+| | | | |
+| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | |
+| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ |
+
+
+### Node 0.11.14
+
+| Method | Operations | Accuracy | Sampled | Fastest |
+|:-------|:-----------|:---------|:--------|:-------:|
+| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | |
+| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ |
+| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | |
+| | | | |
+| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | |
+| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ |
+| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | |
+| | | | |
+| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | |
+| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ |
+| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | |
+| | | | |
+| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | |
+| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ |
+| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | |
+| | | | |
+| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | |
+| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | |
+| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ |
+| | | | |
+| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | |
+| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ |
+| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | |
+| | | | |
+| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ |
+| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | |
+| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | |
+| | | | |
+| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ |
+| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | |
+| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | |
+| | | | |
+| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | |
+| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | |
+| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ |
+| | | | |
+| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | |
+| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ |
+| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | |
+| | | | |
+| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | |
+| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ |
+| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | |
+
+### iojs 1.8.1
+
+| Method | Operations | Accuracy | Sampled | Fastest |
+|:-------|:-----------|:---------|:--------|:-------:|
+| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | |
+| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | |
+| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ |
+| | | | |
+| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | |
+| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | |
+| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ |
+| | | | |
+| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | |
+| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | |
+| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ |
+| | | | |
+| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | |
+| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ |
+| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | |
+| | | | |
+| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | |
+| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | |
+| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ |
+| | | | |
+| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | |
+| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ |
+| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | |
+| | | | |
+| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ |
+| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | |
+| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | |
+| | | | |
+| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ |
+| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | |
+| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | |
+| | | | |
+| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | |
+| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | |
+| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ |
+| | | | |
+| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | |
+| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | |
+| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ |
+| | | | |
+| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | |
+| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ |
+| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | |
+| | | | |
+
+## Testing the project
+
+First, install the project:
+
+ npm install
+
+Then, to run tests in Node.js, run:
+
+ npm run test-node
+
+To test locally in a browser, you can run:
+
+ npm run test-browser-es5-local # For ES5 browsers that don't support ES6
+ npm run test-browser-es6-local # For ES6 compliant browsers
+
+This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap).
+
+To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run:
+
+ npm test
+
+This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files.
+
+## JavaScript Standard Style
+
+This module uses [JavaScript Standard Style](https://github.com/feross/standard).
+
+[](https://github.com/feross/standard)
+
+To test that the code conforms to the style, `npm install` and run:
+
+ ./node_modules/.bin/standard
+
+## credit
+
+This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify).
+
+## Security Policies and Procedures
+
+The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues.
+
+## license
+
+MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis.
diff --git a/node_modules/buffer/index.d.ts b/node_modules/buffer/index.d.ts
new file mode 100644
index 00000000..5d1a804e
--- /dev/null
+++ b/node_modules/buffer/index.d.ts
@@ -0,0 +1,186 @@
+export class Buffer extends Uint8Array {
+ length: number
+ write(string: string, offset?: number, length?: number, encoding?: string): number;
+ toString(encoding?: string, start?: number, end?: number): string;
+ toJSON(): { type: 'Buffer', data: any[] };
+ equals(otherBuffer: Buffer): boolean;
+ compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
+ copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
+ slice(start?: number, end?: number): Buffer;
+ writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
+ writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
+ writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
+ writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
+ readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
+ readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
+ readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
+ readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
+ readUInt8(offset: number, noAssert?: boolean): number;
+ readUInt16LE(offset: number, noAssert?: boolean): number;
+ readUInt16BE(offset: number, noAssert?: boolean): number;
+ readUInt32LE(offset: number, noAssert?: boolean): number;
+ readUInt32BE(offset: number, noAssert?: boolean): number;
+ readInt8(offset: number, noAssert?: boolean): number;
+ readInt16LE(offset: number, noAssert?: boolean): number;
+ readInt16BE(offset: number, noAssert?: boolean): number;
+ readInt32LE(offset: number, noAssert?: boolean): number;
+ readInt32BE(offset: number, noAssert?: boolean): number;
+ readFloatLE(offset: number, noAssert?: boolean): number;
+ readFloatBE(offset: number, noAssert?: boolean): number;
+ readDoubleLE(offset: number, noAssert?: boolean): number;
+ readDoubleBE(offset: number, noAssert?: boolean): number;
+ reverse(): this;
+ swap16(): Buffer;
+ swap32(): Buffer;
+ swap64(): Buffer;
+ writeUInt8(value: number, offset: number, noAssert?: boolean): number;
+ writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
+ writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
+ writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
+ writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
+ writeInt8(value: number, offset: number, noAssert?: boolean): number;
+ writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
+ writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
+ writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
+ writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
+ writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
+ writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
+ writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
+ writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
+ fill(value: any, offset?: number, end?: number): this;
+ indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
+ lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
+ includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
+
+ /**
+ * Allocates a new buffer containing the given {str}.
+ *
+ * @param str String to store in buffer.
+ * @param encoding encoding to use, optional. Default is 'utf8'
+ */
+ constructor (str: string, encoding?: string);
+ /**
+ * Allocates a new buffer of {size} octets.
+ *
+ * @param size count of octets to allocate.
+ */
+ constructor (size: number);
+ /**
+ * Allocates a new buffer containing the given {array} of octets.
+ *
+ * @param array The octets to store.
+ */
+ constructor (array: Uint8Array);
+ /**
+ * Produces a Buffer backed by the same allocated memory as
+ * the given {ArrayBuffer}.
+ *
+ *
+ * @param arrayBuffer The ArrayBuffer with which to share memory.
+ */
+ constructor (arrayBuffer: ArrayBuffer);
+ /**
+ * Allocates a new buffer containing the given {array} of octets.
+ *
+ * @param array The octets to store.
+ */
+ constructor (array: any[]);
+ /**
+ * Copies the passed {buffer} data onto a new {Buffer} instance.
+ *
+ * @param buffer The buffer to copy.
+ */
+ constructor (buffer: Buffer);
+ prototype: Buffer;
+ /**
+ * Allocates a new Buffer using an {array} of octets.
+ *
+ * @param array
+ */
+ static from(array: any[]): Buffer;
+ /**
+ * When passed a reference to the .buffer property of a TypedArray instance,
+ * the newly created Buffer will share the same allocated memory as the TypedArray.
+ * The optional {byteOffset} and {length} arguments specify a memory range
+ * within the {arrayBuffer} that will be shared by the Buffer.
+ *
+ * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
+ * @param byteOffset
+ * @param length
+ */
+ static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
+ /**
+ * Copies the passed {buffer} data onto a new Buffer instance.
+ *
+ * @param buffer
+ */
+ static from(buffer: Buffer | Uint8Array): Buffer;
+ /**
+ * Creates a new Buffer containing the given JavaScript string {str}.
+ * If provided, the {encoding} parameter identifies the character encoding.
+ * If not provided, {encoding} defaults to 'utf8'.
+ *
+ * @param str
+ */
+ static from(str: string, encoding?: string): Buffer;
+ /**
+ * Returns true if {obj} is a Buffer
+ *
+ * @param obj object to test.
+ */
+ static isBuffer(obj: any): obj is Buffer;
+ /**
+ * Returns true if {encoding} is a valid encoding argument.
+ * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
+ *
+ * @param encoding string to test.
+ */
+ static isEncoding(encoding: string): boolean;
+ /**
+ * Gives the actual byte length of a string. encoding defaults to 'utf8'.
+ * This is not the same as String.prototype.length since that returns the number of characters in a string.
+ *
+ * @param string string to test.
+ * @param encoding encoding used to evaluate (defaults to 'utf8')
+ */
+ static byteLength(string: string, encoding?: string): number;
+ /**
+ * Returns a buffer which is the result of concatenating all the buffers in the list together.
+ *
+ * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
+ * If the list has exactly one item, then the first item of the list is returned.
+ * If the list has more than one item, then a new Buffer is created.
+ *
+ * @param list An array of Buffer objects to concatenate
+ * @param totalLength Total length of the buffers when concatenated.
+ * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
+ */
+ static concat(list: Buffer[], totalLength?: number): Buffer;
+ /**
+ * The same as buf1.compare(buf2).
+ */
+ static compare(buf1: Buffer, buf2: Buffer): number;
+ /**
+ * Allocates a new buffer of {size} octets.
+ *
+ * @param size count of octets to allocate.
+ * @param fill if specified, buffer will be initialized by calling buf.fill(fill).
+ * If parameter is omitted, buffer will be filled with zeros.
+ * @param encoding encoding used for call to buf.fill while initializing
+ */
+ static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
+ /**
+ * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
+ * of the newly created Buffer are unknown and may contain sensitive data.
+ *
+ * @param size count of octets to allocate
+ */
+ static allocUnsafe(size: number): Buffer;
+ /**
+ * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
+ * of the newly created Buffer are unknown and may contain sensitive data.
+ *
+ * @param size count of octets to allocate
+ */
+ static allocUnsafeSlow(size: number): Buffer;
+}
diff --git a/node_modules/buffer/index.js b/node_modules/buffer/index.js
new file mode 100644
index 00000000..609cf311
--- /dev/null
+++ b/node_modules/buffer/index.js
@@ -0,0 +1,1817 @@
+/*!
+ * The buffer module from node.js, for the browser.
+ *
+ * @author Feross Aboukhadijeh
+ * @license MIT
+ */
+/* eslint-disable no-proto */
+
+'use strict'
+
+var base64 = require('base64-js')
+var ieee754 = require('ieee754')
+var customInspectSymbol =
+ (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation
+ ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation
+ : null
+
+exports.Buffer = Buffer
+exports.SlowBuffer = SlowBuffer
+exports.INSPECT_MAX_BYTES = 50
+
+var K_MAX_LENGTH = 0x7fffffff
+exports.kMaxLength = K_MAX_LENGTH
+
+/**
+ * If `Buffer.TYPED_ARRAY_SUPPORT`:
+ * === true Use Uint8Array implementation (fastest)
+ * === false Print warning and recommend using `buffer` v4.x which has an Object
+ * implementation (most compatible, even IE6)
+ *
+ * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
+ * Opera 11.6+, iOS 4.2+.
+ *
+ * We report that the browser does not support typed arrays if the are not subclassable
+ * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array`
+ * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support
+ * for __proto__ and has a buggy typed array implementation.
+ */
+Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport()
+
+if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' &&
+ typeof console.error === 'function') {
+ console.error(
+ 'This browser lacks typed array (Uint8Array) support which is required by ' +
+ '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.'
+ )
+}
+
+function typedArraySupport () {
+ // Can typed array instances can be augmented?
+ try {
+ var arr = new Uint8Array(1)
+ var proto = { foo: function () { return 42 } }
+ Object.setPrototypeOf(proto, Uint8Array.prototype)
+ Object.setPrototypeOf(arr, proto)
+ return arr.foo() === 42
+ } catch (e) {
+ return false
+ }
+}
+
+Object.defineProperty(Buffer.prototype, 'parent', {
+ enumerable: true,
+ get: function () {
+ if (!Buffer.isBuffer(this)) return undefined
+ return this.buffer
+ }
+})
+
+Object.defineProperty(Buffer.prototype, 'offset', {
+ enumerable: true,
+ get: function () {
+ if (!Buffer.isBuffer(this)) return undefined
+ return this.byteOffset
+ }
+})
+
+function createBuffer (length) {
+ if (length > K_MAX_LENGTH) {
+ throw new RangeError('The value "' + length + '" is invalid for option "size"')
+ }
+ // Return an augmented `Uint8Array` instance
+ var buf = new Uint8Array(length)
+ Object.setPrototypeOf(buf, Buffer.prototype)
+ return buf
+}
+
+/**
+ * The Buffer constructor returns instances of `Uint8Array` that have their
+ * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
+ * `Uint8Array`, so the returned instances will have all the node `Buffer` methods
+ * and the `Uint8Array` methods. Square bracket notation works as expected -- it
+ * returns a single octet.
+ *
+ * The `Uint8Array` prototype remains unmodified.
+ */
+
+function Buffer (arg, encodingOrOffset, length) {
+ // Common case.
+ if (typeof arg === 'number') {
+ if (typeof encodingOrOffset === 'string') {
+ throw new TypeError(
+ 'The "string" argument must be of type string. Received type number'
+ )
+ }
+ return allocUnsafe(arg)
+ }
+ return from(arg, encodingOrOffset, length)
+}
+
+Buffer.poolSize = 8192 // not used by this implementation
+
+function from (value, encodingOrOffset, length) {
+ if (typeof value === 'string') {
+ return fromString(value, encodingOrOffset)
+ }
+
+ if (ArrayBuffer.isView(value)) {
+ return fromArrayView(value)
+ }
+
+ if (value == null) {
+ throw new TypeError(
+ 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
+ 'or Array-like Object. Received type ' + (typeof value)
+ )
+ }
+
+ if (isInstance(value, ArrayBuffer) ||
+ (value && isInstance(value.buffer, ArrayBuffer))) {
+ return fromArrayBuffer(value, encodingOrOffset, length)
+ }
+
+ if (typeof SharedArrayBuffer !== 'undefined' &&
+ (isInstance(value, SharedArrayBuffer) ||
+ (value && isInstance(value.buffer, SharedArrayBuffer)))) {
+ return fromArrayBuffer(value, encodingOrOffset, length)
+ }
+
+ if (typeof value === 'number') {
+ throw new TypeError(
+ 'The "value" argument must not be of type number. Received type number'
+ )
+ }
+
+ var valueOf = value.valueOf && value.valueOf()
+ if (valueOf != null && valueOf !== value) {
+ return Buffer.from(valueOf, encodingOrOffset, length)
+ }
+
+ var b = fromObject(value)
+ if (b) return b
+
+ if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null &&
+ typeof value[Symbol.toPrimitive] === 'function') {
+ return Buffer.from(
+ value[Symbol.toPrimitive]('string'), encodingOrOffset, length
+ )
+ }
+
+ throw new TypeError(
+ 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +
+ 'or Array-like Object. Received type ' + (typeof value)
+ )
+}
+
+/**
+ * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
+ * if value is a number.
+ * Buffer.from(str[, encoding])
+ * Buffer.from(array)
+ * Buffer.from(buffer)
+ * Buffer.from(arrayBuffer[, byteOffset[, length]])
+ **/
+Buffer.from = function (value, encodingOrOffset, length) {
+ return from(value, encodingOrOffset, length)
+}
+
+// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:
+// https://github.com/feross/buffer/pull/148
+Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype)
+Object.setPrototypeOf(Buffer, Uint8Array)
+
+function assertSize (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('"size" argument must be of type number')
+ } else if (size < 0) {
+ throw new RangeError('The value "' + size + '" is invalid for option "size"')
+ }
+}
+
+function alloc (size, fill, encoding) {
+ assertSize(size)
+ if (size <= 0) {
+ return createBuffer(size)
+ }
+ if (fill !== undefined) {
+ // Only pay attention to encoding if it's a string. This
+ // prevents accidentally sending in a number that would
+ // be interpreted as a start offset.
+ return typeof encoding === 'string'
+ ? createBuffer(size).fill(fill, encoding)
+ : createBuffer(size).fill(fill)
+ }
+ return createBuffer(size)
+}
+
+/**
+ * Creates a new filled Buffer instance.
+ * alloc(size[, fill[, encoding]])
+ **/
+Buffer.alloc = function (size, fill, encoding) {
+ return alloc(size, fill, encoding)
+}
+
+function allocUnsafe (size) {
+ assertSize(size)
+ return createBuffer(size < 0 ? 0 : checked(size) | 0)
+}
+
+/**
+ * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
+ * */
+Buffer.allocUnsafe = function (size) {
+ return allocUnsafe(size)
+}
+/**
+ * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
+ */
+Buffer.allocUnsafeSlow = function (size) {
+ return allocUnsafe(size)
+}
+
+function fromString (string, encoding) {
+ if (typeof encoding !== 'string' || encoding === '') {
+ encoding = 'utf8'
+ }
+
+ if (!Buffer.isEncoding(encoding)) {
+ throw new TypeError('Unknown encoding: ' + encoding)
+ }
+
+ var length = byteLength(string, encoding) | 0
+ var buf = createBuffer(length)
+
+ var actual = buf.write(string, encoding)
+
+ if (actual !== length) {
+ // Writing a hex string, for example, that contains invalid characters will
+ // cause everything after the first invalid character to be ignored. (e.g.
+ // 'abxxcd' will be treated as 'ab')
+ buf = buf.slice(0, actual)
+ }
+
+ return buf
+}
+
+function fromArrayLike (array) {
+ var length = array.length < 0 ? 0 : checked(array.length) | 0
+ var buf = createBuffer(length)
+ for (var i = 0; i < length; i += 1) {
+ buf[i] = array[i] & 255
+ }
+ return buf
+}
+
+function fromArrayView (arrayView) {
+ if (isInstance(arrayView, Uint8Array)) {
+ var copy = new Uint8Array(arrayView)
+ return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength)
+ }
+ return fromArrayLike(arrayView)
+}
+
+function fromArrayBuffer (array, byteOffset, length) {
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
+ throw new RangeError('"offset" is outside of buffer bounds')
+ }
+
+ if (array.byteLength < byteOffset + (length || 0)) {
+ throw new RangeError('"length" is outside of buffer bounds')
+ }
+
+ var buf
+ if (byteOffset === undefined && length === undefined) {
+ buf = new Uint8Array(array)
+ } else if (length === undefined) {
+ buf = new Uint8Array(array, byteOffset)
+ } else {
+ buf = new Uint8Array(array, byteOffset, length)
+ }
+
+ // Return an augmented `Uint8Array` instance
+ Object.setPrototypeOf(buf, Buffer.prototype)
+
+ return buf
+}
+
+function fromObject (obj) {
+ if (Buffer.isBuffer(obj)) {
+ var len = checked(obj.length) | 0
+ var buf = createBuffer(len)
+
+ if (buf.length === 0) {
+ return buf
+ }
+
+ obj.copy(buf, 0, 0, len)
+ return buf
+ }
+
+ if (obj.length !== undefined) {
+ if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) {
+ return createBuffer(0)
+ }
+ return fromArrayLike(obj)
+ }
+
+ if (obj.type === 'Buffer' && Array.isArray(obj.data)) {
+ return fromArrayLike(obj.data)
+ }
+}
+
+function checked (length) {
+ // Note: cannot use `length < K_MAX_LENGTH` here because that fails when
+ // length is NaN (which is otherwise coerced to zero.)
+ if (length >= K_MAX_LENGTH) {
+ throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
+ 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes')
+ }
+ return length | 0
+}
+
+function SlowBuffer (length) {
+ if (+length != length) { // eslint-disable-line eqeqeq
+ length = 0
+ }
+ return Buffer.alloc(+length)
+}
+
+Buffer.isBuffer = function isBuffer (b) {
+ return b != null && b._isBuffer === true &&
+ b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false
+}
+
+Buffer.compare = function compare (a, b) {
+ if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength)
+ if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength)
+ if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
+ throw new TypeError(
+ 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array'
+ )
+ }
+
+ if (a === b) return 0
+
+ var x = a.length
+ var y = b.length
+
+ for (var i = 0, len = Math.min(x, y); i < len; ++i) {
+ if (a[i] !== b[i]) {
+ x = a[i]
+ y = b[i]
+ break
+ }
+ }
+
+ if (x < y) return -1
+ if (y < x) return 1
+ return 0
+}
+
+Buffer.isEncoding = function isEncoding (encoding) {
+ switch (String(encoding).toLowerCase()) {
+ case 'hex':
+ case 'utf8':
+ case 'utf-8':
+ case 'ascii':
+ case 'latin1':
+ case 'binary':
+ case 'base64':
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return true
+ default:
+ return false
+ }
+}
+
+Buffer.concat = function concat (list, length) {
+ if (!Array.isArray(list)) {
+ throw new TypeError('"list" argument must be an Array of Buffers')
+ }
+
+ if (list.length === 0) {
+ return Buffer.alloc(0)
+ }
+
+ var i
+ if (length === undefined) {
+ length = 0
+ for (i = 0; i < list.length; ++i) {
+ length += list[i].length
+ }
+ }
+
+ var buffer = Buffer.allocUnsafe(length)
+ var pos = 0
+ for (i = 0; i < list.length; ++i) {
+ var buf = list[i]
+ if (isInstance(buf, Uint8Array)) {
+ if (pos + buf.length > buffer.length) {
+ Buffer.from(buf).copy(buffer, pos)
+ } else {
+ Uint8Array.prototype.set.call(
+ buffer,
+ buf,
+ pos
+ )
+ }
+ } else if (!Buffer.isBuffer(buf)) {
+ throw new TypeError('"list" argument must be an Array of Buffers')
+ } else {
+ buf.copy(buffer, pos)
+ }
+ pos += buf.length
+ }
+ return buffer
+}
+
+function byteLength (string, encoding) {
+ if (Buffer.isBuffer(string)) {
+ return string.length
+ }
+ if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) {
+ return string.byteLength
+ }
+ if (typeof string !== 'string') {
+ throw new TypeError(
+ 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' +
+ 'Received type ' + typeof string
+ )
+ }
+
+ var len = string.length
+ var mustMatch = (arguments.length > 2 && arguments[2] === true)
+ if (!mustMatch && len === 0) return 0
+
+ // Use a for loop to avoid recursion
+ var loweredCase = false
+ for (;;) {
+ switch (encoding) {
+ case 'ascii':
+ case 'latin1':
+ case 'binary':
+ return len
+ case 'utf8':
+ case 'utf-8':
+ return utf8ToBytes(string).length
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return len * 2
+ case 'hex':
+ return len >>> 1
+ case 'base64':
+ return base64ToBytes(string).length
+ default:
+ if (loweredCase) {
+ return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8
+ }
+ encoding = ('' + encoding).toLowerCase()
+ loweredCase = true
+ }
+ }
+}
+Buffer.byteLength = byteLength
+
+function slowToString (encoding, start, end) {
+ var loweredCase = false
+
+ // No need to verify that "this.length <= MAX_UINT32" since it's a read-only
+ // property of a typed array.
+
+ // This behaves neither like String nor Uint8Array in that we set start/end
+ // to their upper/lower bounds if the value passed is out of range.
+ // undefined is handled specially as per ECMA-262 6th Edition,
+ // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
+ if (start === undefined || start < 0) {
+ start = 0
+ }
+ // Return early if start > this.length. Done here to prevent potential uint32
+ // coercion fail below.
+ if (start > this.length) {
+ return ''
+ }
+
+ if (end === undefined || end > this.length) {
+ end = this.length
+ }
+
+ if (end <= 0) {
+ return ''
+ }
+
+ // Force coercion to uint32. This will also coerce falsey/NaN values to 0.
+ end >>>= 0
+ start >>>= 0
+
+ if (end <= start) {
+ return ''
+ }
+
+ if (!encoding) encoding = 'utf8'
+
+ while (true) {
+ switch (encoding) {
+ case 'hex':
+ return hexSlice(this, start, end)
+
+ case 'utf8':
+ case 'utf-8':
+ return utf8Slice(this, start, end)
+
+ case 'ascii':
+ return asciiSlice(this, start, end)
+
+ case 'latin1':
+ case 'binary':
+ return latin1Slice(this, start, end)
+
+ case 'base64':
+ return base64Slice(this, start, end)
+
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return utf16leSlice(this, start, end)
+
+ default:
+ if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
+ encoding = (encoding + '').toLowerCase()
+ loweredCase = true
+ }
+ }
+}
+
+// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package)
+// to detect a Buffer instance. It's not possible to use `instanceof Buffer`
+// reliably in a browserify context because there could be multiple different
+// copies of the 'buffer' package in use. This method works even for Buffer
+// instances that were created from another copy of the `buffer` package.
+// See: https://github.com/feross/buffer/issues/154
+Buffer.prototype._isBuffer = true
+
+function swap (b, n, m) {
+ var i = b[n]
+ b[n] = b[m]
+ b[m] = i
+}
+
+Buffer.prototype.swap16 = function swap16 () {
+ var len = this.length
+ if (len % 2 !== 0) {
+ throw new RangeError('Buffer size must be a multiple of 16-bits')
+ }
+ for (var i = 0; i < len; i += 2) {
+ swap(this, i, i + 1)
+ }
+ return this
+}
+
+Buffer.prototype.swap32 = function swap32 () {
+ var len = this.length
+ if (len % 4 !== 0) {
+ throw new RangeError('Buffer size must be a multiple of 32-bits')
+ }
+ for (var i = 0; i < len; i += 4) {
+ swap(this, i, i + 3)
+ swap(this, i + 1, i + 2)
+ }
+ return this
+}
+
+Buffer.prototype.swap64 = function swap64 () {
+ var len = this.length
+ if (len % 8 !== 0) {
+ throw new RangeError('Buffer size must be a multiple of 64-bits')
+ }
+ for (var i = 0; i < len; i += 8) {
+ swap(this, i, i + 7)
+ swap(this, i + 1, i + 6)
+ swap(this, i + 2, i + 5)
+ swap(this, i + 3, i + 4)
+ }
+ return this
+}
+
+Buffer.prototype.toString = function toString () {
+ var length = this.length
+ if (length === 0) return ''
+ if (arguments.length === 0) return utf8Slice(this, 0, length)
+ return slowToString.apply(this, arguments)
+}
+
+Buffer.prototype.toLocaleString = Buffer.prototype.toString
+
+Buffer.prototype.equals = function equals (b) {
+ if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
+ if (this === b) return true
+ return Buffer.compare(this, b) === 0
+}
+
+Buffer.prototype.inspect = function inspect () {
+ var str = ''
+ var max = exports.INSPECT_MAX_BYTES
+ str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim()
+ if (this.length > max) str += ' ... '
+ return ''
+}
+if (customInspectSymbol) {
+ Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect
+}
+
+Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
+ if (isInstance(target, Uint8Array)) {
+ target = Buffer.from(target, target.offset, target.byteLength)
+ }
+ if (!Buffer.isBuffer(target)) {
+ throw new TypeError(
+ 'The "target" argument must be one of type Buffer or Uint8Array. ' +
+ 'Received type ' + (typeof target)
+ )
+ }
+
+ if (start === undefined) {
+ start = 0
+ }
+ if (end === undefined) {
+ end = target ? target.length : 0
+ }
+ if (thisStart === undefined) {
+ thisStart = 0
+ }
+ if (thisEnd === undefined) {
+ thisEnd = this.length
+ }
+
+ if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
+ throw new RangeError('out of range index')
+ }
+
+ if (thisStart >= thisEnd && start >= end) {
+ return 0
+ }
+ if (thisStart >= thisEnd) {
+ return -1
+ }
+ if (start >= end) {
+ return 1
+ }
+
+ start >>>= 0
+ end >>>= 0
+ thisStart >>>= 0
+ thisEnd >>>= 0
+
+ if (this === target) return 0
+
+ var x = thisEnd - thisStart
+ var y = end - start
+ var len = Math.min(x, y)
+
+ var thisCopy = this.slice(thisStart, thisEnd)
+ var targetCopy = target.slice(start, end)
+
+ for (var i = 0; i < len; ++i) {
+ if (thisCopy[i] !== targetCopy[i]) {
+ x = thisCopy[i]
+ y = targetCopy[i]
+ break
+ }
+ }
+
+ if (x < y) return -1
+ if (y < x) return 1
+ return 0
+}
+
+// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
+// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
+//
+// Arguments:
+// - buffer - a Buffer to search
+// - val - a string, Buffer, or number
+// - byteOffset - an index into `buffer`; will be clamped to an int32
+// - encoding - an optional encoding, relevant is val is a string
+// - dir - true for indexOf, false for lastIndexOf
+function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
+ // Empty buffer means no match
+ if (buffer.length === 0) return -1
+
+ // Normalize byteOffset
+ if (typeof byteOffset === 'string') {
+ encoding = byteOffset
+ byteOffset = 0
+ } else if (byteOffset > 0x7fffffff) {
+ byteOffset = 0x7fffffff
+ } else if (byteOffset < -0x80000000) {
+ byteOffset = -0x80000000
+ }
+ byteOffset = +byteOffset // Coerce to Number.
+ if (numberIsNaN(byteOffset)) {
+ // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
+ byteOffset = dir ? 0 : (buffer.length - 1)
+ }
+
+ // Normalize byteOffset: negative offsets start from the end of the buffer
+ if (byteOffset < 0) byteOffset = buffer.length + byteOffset
+ if (byteOffset >= buffer.length) {
+ if (dir) return -1
+ else byteOffset = buffer.length - 1
+ } else if (byteOffset < 0) {
+ if (dir) byteOffset = 0
+ else return -1
+ }
+
+ // Normalize val
+ if (typeof val === 'string') {
+ val = Buffer.from(val, encoding)
+ }
+
+ // Finally, search either indexOf (if dir is true) or lastIndexOf
+ if (Buffer.isBuffer(val)) {
+ // Special case: looking for empty string/buffer always fails
+ if (val.length === 0) {
+ return -1
+ }
+ return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
+ } else if (typeof val === 'number') {
+ val = val & 0xFF // Search for a byte value [0-255]
+ if (typeof Uint8Array.prototype.indexOf === 'function') {
+ if (dir) {
+ return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
+ } else {
+ return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
+ }
+ }
+ return arrayIndexOf(buffer, [val], byteOffset, encoding, dir)
+ }
+
+ throw new TypeError('val must be string, number or Buffer')
+}
+
+function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
+ var indexSize = 1
+ var arrLength = arr.length
+ var valLength = val.length
+
+ if (encoding !== undefined) {
+ encoding = String(encoding).toLowerCase()
+ if (encoding === 'ucs2' || encoding === 'ucs-2' ||
+ encoding === 'utf16le' || encoding === 'utf-16le') {
+ if (arr.length < 2 || val.length < 2) {
+ return -1
+ }
+ indexSize = 2
+ arrLength /= 2
+ valLength /= 2
+ byteOffset /= 2
+ }
+ }
+
+ function read (buf, i) {
+ if (indexSize === 1) {
+ return buf[i]
+ } else {
+ return buf.readUInt16BE(i * indexSize)
+ }
+ }
+
+ var i
+ if (dir) {
+ var foundIndex = -1
+ for (i = byteOffset; i < arrLength; i++) {
+ if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
+ if (foundIndex === -1) foundIndex = i
+ if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
+ } else {
+ if (foundIndex !== -1) i -= i - foundIndex
+ foundIndex = -1
+ }
+ }
+ } else {
+ if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength
+ for (i = byteOffset; i >= 0; i--) {
+ var found = true
+ for (var j = 0; j < valLength; j++) {
+ if (read(arr, i + j) !== read(val, j)) {
+ found = false
+ break
+ }
+ }
+ if (found) return i
+ }
+ }
+
+ return -1
+}
+
+Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
+ return this.indexOf(val, byteOffset, encoding) !== -1
+}
+
+Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
+ return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
+}
+
+Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
+ return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
+}
+
+function hexWrite (buf, string, offset, length) {
+ offset = Number(offset) || 0
+ var remaining = buf.length - offset
+ if (!length) {
+ length = remaining
+ } else {
+ length = Number(length)
+ if (length > remaining) {
+ length = remaining
+ }
+ }
+
+ var strLen = string.length
+
+ if (length > strLen / 2) {
+ length = strLen / 2
+ }
+ for (var i = 0; i < length; ++i) {
+ var parsed = parseInt(string.substr(i * 2, 2), 16)
+ if (numberIsNaN(parsed)) return i
+ buf[offset + i] = parsed
+ }
+ return i
+}
+
+function utf8Write (buf, string, offset, length) {
+ return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
+}
+
+function asciiWrite (buf, string, offset, length) {
+ return blitBuffer(asciiToBytes(string), buf, offset, length)
+}
+
+function base64Write (buf, string, offset, length) {
+ return blitBuffer(base64ToBytes(string), buf, offset, length)
+}
+
+function ucs2Write (buf, string, offset, length) {
+ return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
+}
+
+Buffer.prototype.write = function write (string, offset, length, encoding) {
+ // Buffer#write(string)
+ if (offset === undefined) {
+ encoding = 'utf8'
+ length = this.length
+ offset = 0
+ // Buffer#write(string, encoding)
+ } else if (length === undefined && typeof offset === 'string') {
+ encoding = offset
+ length = this.length
+ offset = 0
+ // Buffer#write(string, offset[, length][, encoding])
+ } else if (isFinite(offset)) {
+ offset = offset >>> 0
+ if (isFinite(length)) {
+ length = length >>> 0
+ if (encoding === undefined) encoding = 'utf8'
+ } else {
+ encoding = length
+ length = undefined
+ }
+ } else {
+ throw new Error(
+ 'Buffer.write(string, encoding, offset[, length]) is no longer supported'
+ )
+ }
+
+ var remaining = this.length - offset
+ if (length === undefined || length > remaining) length = remaining
+
+ if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
+ throw new RangeError('Attempt to write outside buffer bounds')
+ }
+
+ if (!encoding) encoding = 'utf8'
+
+ var loweredCase = false
+ for (;;) {
+ switch (encoding) {
+ case 'hex':
+ return hexWrite(this, string, offset, length)
+
+ case 'utf8':
+ case 'utf-8':
+ return utf8Write(this, string, offset, length)
+
+ case 'ascii':
+ case 'latin1':
+ case 'binary':
+ return asciiWrite(this, string, offset, length)
+
+ case 'base64':
+ // Warning: maxLength not taken into account in base64Write
+ return base64Write(this, string, offset, length)
+
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return ucs2Write(this, string, offset, length)
+
+ default:
+ if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
+ encoding = ('' + encoding).toLowerCase()
+ loweredCase = true
+ }
+ }
+}
+
+Buffer.prototype.toJSON = function toJSON () {
+ return {
+ type: 'Buffer',
+ data: Array.prototype.slice.call(this._arr || this, 0)
+ }
+}
+
+function base64Slice (buf, start, end) {
+ if (start === 0 && end === buf.length) {
+ return base64.fromByteArray(buf)
+ } else {
+ return base64.fromByteArray(buf.slice(start, end))
+ }
+}
+
+function utf8Slice (buf, start, end) {
+ end = Math.min(buf.length, end)
+ var res = []
+
+ var i = start
+ while (i < end) {
+ var firstByte = buf[i]
+ var codePoint = null
+ var bytesPerSequence = (firstByte > 0xEF)
+ ? 4
+ : (firstByte > 0xDF)
+ ? 3
+ : (firstByte > 0xBF)
+ ? 2
+ : 1
+
+ if (i + bytesPerSequence <= end) {
+ var secondByte, thirdByte, fourthByte, tempCodePoint
+
+ switch (bytesPerSequence) {
+ case 1:
+ if (firstByte < 0x80) {
+ codePoint = firstByte
+ }
+ break
+ case 2:
+ secondByte = buf[i + 1]
+ if ((secondByte & 0xC0) === 0x80) {
+ tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)
+ if (tempCodePoint > 0x7F) {
+ codePoint = tempCodePoint
+ }
+ }
+ break
+ case 3:
+ secondByte = buf[i + 1]
+ thirdByte = buf[i + 2]
+ if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
+ tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)
+ if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
+ codePoint = tempCodePoint
+ }
+ }
+ break
+ case 4:
+ secondByte = buf[i + 1]
+ thirdByte = buf[i + 2]
+ fourthByte = buf[i + 3]
+ if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
+ tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)
+ if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
+ codePoint = tempCodePoint
+ }
+ }
+ }
+ }
+
+ if (codePoint === null) {
+ // we did not generate a valid codePoint so insert a
+ // replacement char (U+FFFD) and advance only 1 byte
+ codePoint = 0xFFFD
+ bytesPerSequence = 1
+ } else if (codePoint > 0xFFFF) {
+ // encode to utf16 (surrogate pair dance)
+ codePoint -= 0x10000
+ res.push(codePoint >>> 10 & 0x3FF | 0xD800)
+ codePoint = 0xDC00 | codePoint & 0x3FF
+ }
+
+ res.push(codePoint)
+ i += bytesPerSequence
+ }
+
+ return decodeCodePointsArray(res)
+}
+
+// Based on http://stackoverflow.com/a/22747272/680742, the browser with
+// the lowest limit is Chrome, with 0x10000 args.
+// We go 1 magnitude less, for safety
+var MAX_ARGUMENTS_LENGTH = 0x1000
+
+function decodeCodePointsArray (codePoints) {
+ var len = codePoints.length
+ if (len <= MAX_ARGUMENTS_LENGTH) {
+ return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
+ }
+
+ // Decode in chunks to avoid "call stack size exceeded".
+ var res = ''
+ var i = 0
+ while (i < len) {
+ res += String.fromCharCode.apply(
+ String,
+ codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
+ )
+ }
+ return res
+}
+
+function asciiSlice (buf, start, end) {
+ var ret = ''
+ end = Math.min(buf.length, end)
+
+ for (var i = start; i < end; ++i) {
+ ret += String.fromCharCode(buf[i] & 0x7F)
+ }
+ return ret
+}
+
+function latin1Slice (buf, start, end) {
+ var ret = ''
+ end = Math.min(buf.length, end)
+
+ for (var i = start; i < end; ++i) {
+ ret += String.fromCharCode(buf[i])
+ }
+ return ret
+}
+
+function hexSlice (buf, start, end) {
+ var len = buf.length
+
+ if (!start || start < 0) start = 0
+ if (!end || end < 0 || end > len) end = len
+
+ var out = ''
+ for (var i = start; i < end; ++i) {
+ out += hexSliceLookupTable[buf[i]]
+ }
+ return out
+}
+
+function utf16leSlice (buf, start, end) {
+ var bytes = buf.slice(start, end)
+ var res = ''
+ // If bytes.length is odd, the last 8 bits must be ignored (same as node.js)
+ for (var i = 0; i < bytes.length - 1; i += 2) {
+ res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256))
+ }
+ return res
+}
+
+Buffer.prototype.slice = function slice (start, end) {
+ var len = this.length
+ start = ~~start
+ end = end === undefined ? len : ~~end
+
+ if (start < 0) {
+ start += len
+ if (start < 0) start = 0
+ } else if (start > len) {
+ start = len
+ }
+
+ if (end < 0) {
+ end += len
+ if (end < 0) end = 0
+ } else if (end > len) {
+ end = len
+ }
+
+ if (end < start) end = start
+
+ var newBuf = this.subarray(start, end)
+ // Return an augmented `Uint8Array` instance
+ Object.setPrototypeOf(newBuf, Buffer.prototype)
+
+ return newBuf
+}
+
+/*
+ * Need to make sure that buffer isn't trying to write out of bounds.
+ */
+function checkOffset (offset, ext, length) {
+ if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
+ if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
+}
+
+Buffer.prototype.readUintLE =
+Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
+ offset = offset >>> 0
+ byteLength = byteLength >>> 0
+ if (!noAssert) checkOffset(offset, byteLength, this.length)
+
+ var val = this[offset]
+ var mul = 1
+ var i = 0
+ while (++i < byteLength && (mul *= 0x100)) {
+ val += this[offset + i] * mul
+ }
+
+ return val
+}
+
+Buffer.prototype.readUintBE =
+Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
+ offset = offset >>> 0
+ byteLength = byteLength >>> 0
+ if (!noAssert) {
+ checkOffset(offset, byteLength, this.length)
+ }
+
+ var val = this[offset + --byteLength]
+ var mul = 1
+ while (byteLength > 0 && (mul *= 0x100)) {
+ val += this[offset + --byteLength] * mul
+ }
+
+ return val
+}
+
+Buffer.prototype.readUint8 =
+Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 1, this.length)
+ return this[offset]
+}
+
+Buffer.prototype.readUint16LE =
+Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 2, this.length)
+ return this[offset] | (this[offset + 1] << 8)
+}
+
+Buffer.prototype.readUint16BE =
+Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 2, this.length)
+ return (this[offset] << 8) | this[offset + 1]
+}
+
+Buffer.prototype.readUint32LE =
+Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 4, this.length)
+
+ return ((this[offset]) |
+ (this[offset + 1] << 8) |
+ (this[offset + 2] << 16)) +
+ (this[offset + 3] * 0x1000000)
+}
+
+Buffer.prototype.readUint32BE =
+Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 4, this.length)
+
+ return (this[offset] * 0x1000000) +
+ ((this[offset + 1] << 16) |
+ (this[offset + 2] << 8) |
+ this[offset + 3])
+}
+
+Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
+ offset = offset >>> 0
+ byteLength = byteLength >>> 0
+ if (!noAssert) checkOffset(offset, byteLength, this.length)
+
+ var val = this[offset]
+ var mul = 1
+ var i = 0
+ while (++i < byteLength && (mul *= 0x100)) {
+ val += this[offset + i] * mul
+ }
+ mul *= 0x80
+
+ if (val >= mul) val -= Math.pow(2, 8 * byteLength)
+
+ return val
+}
+
+Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
+ offset = offset >>> 0
+ byteLength = byteLength >>> 0
+ if (!noAssert) checkOffset(offset, byteLength, this.length)
+
+ var i = byteLength
+ var mul = 1
+ var val = this[offset + --i]
+ while (i > 0 && (mul *= 0x100)) {
+ val += this[offset + --i] * mul
+ }
+ mul *= 0x80
+
+ if (val >= mul) val -= Math.pow(2, 8 * byteLength)
+
+ return val
+}
+
+Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 1, this.length)
+ if (!(this[offset] & 0x80)) return (this[offset])
+ return ((0xff - this[offset] + 1) * -1)
+}
+
+Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 2, this.length)
+ var val = this[offset] | (this[offset + 1] << 8)
+ return (val & 0x8000) ? val | 0xFFFF0000 : val
+}
+
+Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 2, this.length)
+ var val = this[offset + 1] | (this[offset] << 8)
+ return (val & 0x8000) ? val | 0xFFFF0000 : val
+}
+
+Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 4, this.length)
+
+ return (this[offset]) |
+ (this[offset + 1] << 8) |
+ (this[offset + 2] << 16) |
+ (this[offset + 3] << 24)
+}
+
+Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 4, this.length)
+
+ return (this[offset] << 24) |
+ (this[offset + 1] << 16) |
+ (this[offset + 2] << 8) |
+ (this[offset + 3])
+}
+
+Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 4, this.length)
+ return ieee754.read(this, offset, true, 23, 4)
+}
+
+Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 4, this.length)
+ return ieee754.read(this, offset, false, 23, 4)
+}
+
+Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 8, this.length)
+ return ieee754.read(this, offset, true, 52, 8)
+}
+
+Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
+ offset = offset >>> 0
+ if (!noAssert) checkOffset(offset, 8, this.length)
+ return ieee754.read(this, offset, false, 52, 8)
+}
+
+function checkInt (buf, value, offset, ext, max, min) {
+ if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
+ if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
+ if (offset + ext > buf.length) throw new RangeError('Index out of range')
+}
+
+Buffer.prototype.writeUintLE =
+Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ byteLength = byteLength >>> 0
+ if (!noAssert) {
+ var maxBytes = Math.pow(2, 8 * byteLength) - 1
+ checkInt(this, value, offset, byteLength, maxBytes, 0)
+ }
+
+ var mul = 1
+ var i = 0
+ this[offset] = value & 0xFF
+ while (++i < byteLength && (mul *= 0x100)) {
+ this[offset + i] = (value / mul) & 0xFF
+ }
+
+ return offset + byteLength
+}
+
+Buffer.prototype.writeUintBE =
+Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ byteLength = byteLength >>> 0
+ if (!noAssert) {
+ var maxBytes = Math.pow(2, 8 * byteLength) - 1
+ checkInt(this, value, offset, byteLength, maxBytes, 0)
+ }
+
+ var i = byteLength - 1
+ var mul = 1
+ this[offset + i] = value & 0xFF
+ while (--i >= 0 && (mul *= 0x100)) {
+ this[offset + i] = (value / mul) & 0xFF
+ }
+
+ return offset + byteLength
+}
+
+Buffer.prototype.writeUint8 =
+Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)
+ this[offset] = (value & 0xff)
+ return offset + 1
+}
+
+Buffer.prototype.writeUint16LE =
+Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
+ this[offset] = (value & 0xff)
+ this[offset + 1] = (value >>> 8)
+ return offset + 2
+}
+
+Buffer.prototype.writeUint16BE =
+Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
+ this[offset] = (value >>> 8)
+ this[offset + 1] = (value & 0xff)
+ return offset + 2
+}
+
+Buffer.prototype.writeUint32LE =
+Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
+ this[offset + 3] = (value >>> 24)
+ this[offset + 2] = (value >>> 16)
+ this[offset + 1] = (value >>> 8)
+ this[offset] = (value & 0xff)
+ return offset + 4
+}
+
+Buffer.prototype.writeUint32BE =
+Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
+ this[offset] = (value >>> 24)
+ this[offset + 1] = (value >>> 16)
+ this[offset + 2] = (value >>> 8)
+ this[offset + 3] = (value & 0xff)
+ return offset + 4
+}
+
+Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) {
+ var limit = Math.pow(2, (8 * byteLength) - 1)
+
+ checkInt(this, value, offset, byteLength, limit - 1, -limit)
+ }
+
+ var i = 0
+ var mul = 1
+ var sub = 0
+ this[offset] = value & 0xFF
+ while (++i < byteLength && (mul *= 0x100)) {
+ if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
+ sub = 1
+ }
+ this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
+ }
+
+ return offset + byteLength
+}
+
+Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) {
+ var limit = Math.pow(2, (8 * byteLength) - 1)
+
+ checkInt(this, value, offset, byteLength, limit - 1, -limit)
+ }
+
+ var i = byteLength - 1
+ var mul = 1
+ var sub = 0
+ this[offset + i] = value & 0xFF
+ while (--i >= 0 && (mul *= 0x100)) {
+ if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
+ sub = 1
+ }
+ this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
+ }
+
+ return offset + byteLength
+}
+
+Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)
+ if (value < 0) value = 0xff + value + 1
+ this[offset] = (value & 0xff)
+ return offset + 1
+}
+
+Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
+ this[offset] = (value & 0xff)
+ this[offset + 1] = (value >>> 8)
+ return offset + 2
+}
+
+Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
+ this[offset] = (value >>> 8)
+ this[offset + 1] = (value & 0xff)
+ return offset + 2
+}
+
+Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
+ this[offset] = (value & 0xff)
+ this[offset + 1] = (value >>> 8)
+ this[offset + 2] = (value >>> 16)
+ this[offset + 3] = (value >>> 24)
+ return offset + 4
+}
+
+Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
+ if (value < 0) value = 0xffffffff + value + 1
+ this[offset] = (value >>> 24)
+ this[offset + 1] = (value >>> 16)
+ this[offset + 2] = (value >>> 8)
+ this[offset + 3] = (value & 0xff)
+ return offset + 4
+}
+
+function checkIEEE754 (buf, value, offset, ext, max, min) {
+ if (offset + ext > buf.length) throw new RangeError('Index out of range')
+ if (offset < 0) throw new RangeError('Index out of range')
+}
+
+function writeFloat (buf, value, offset, littleEndian, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) {
+ checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)
+ }
+ ieee754.write(buf, value, offset, littleEndian, 23, 4)
+ return offset + 4
+}
+
+Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
+ return writeFloat(this, value, offset, true, noAssert)
+}
+
+Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
+ return writeFloat(this, value, offset, false, noAssert)
+}
+
+function writeDouble (buf, value, offset, littleEndian, noAssert) {
+ value = +value
+ offset = offset >>> 0
+ if (!noAssert) {
+ checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)
+ }
+ ieee754.write(buf, value, offset, littleEndian, 52, 8)
+ return offset + 8
+}
+
+Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
+ return writeDouble(this, value, offset, true, noAssert)
+}
+
+Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
+ return writeDouble(this, value, offset, false, noAssert)
+}
+
+// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
+Buffer.prototype.copy = function copy (target, targetStart, start, end) {
+ if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer')
+ if (!start) start = 0
+ if (!end && end !== 0) end = this.length
+ if (targetStart >= target.length) targetStart = target.length
+ if (!targetStart) targetStart = 0
+ if (end > 0 && end < start) end = start
+
+ // Copy 0 bytes; we're done
+ if (end === start) return 0
+ if (target.length === 0 || this.length === 0) return 0
+
+ // Fatal error conditions
+ if (targetStart < 0) {
+ throw new RangeError('targetStart out of bounds')
+ }
+ if (start < 0 || start >= this.length) throw new RangeError('Index out of range')
+ if (end < 0) throw new RangeError('sourceEnd out of bounds')
+
+ // Are we oob?
+ if (end > this.length) end = this.length
+ if (target.length - targetStart < end - start) {
+ end = target.length - targetStart + start
+ }
+
+ var len = end - start
+
+ if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') {
+ // Use built-in when available, missing from IE11
+ this.copyWithin(targetStart, start, end)
+ } else {
+ Uint8Array.prototype.set.call(
+ target,
+ this.subarray(start, end),
+ targetStart
+ )
+ }
+
+ return len
+}
+
+// Usage:
+// buffer.fill(number[, offset[, end]])
+// buffer.fill(buffer[, offset[, end]])
+// buffer.fill(string[, offset[, end]][, encoding])
+Buffer.prototype.fill = function fill (val, start, end, encoding) {
+ // Handle string cases:
+ if (typeof val === 'string') {
+ if (typeof start === 'string') {
+ encoding = start
+ start = 0
+ end = this.length
+ } else if (typeof end === 'string') {
+ encoding = end
+ end = this.length
+ }
+ if (encoding !== undefined && typeof encoding !== 'string') {
+ throw new TypeError('encoding must be a string')
+ }
+ if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
+ throw new TypeError('Unknown encoding: ' + encoding)
+ }
+ if (val.length === 1) {
+ var code = val.charCodeAt(0)
+ if ((encoding === 'utf8' && code < 128) ||
+ encoding === 'latin1') {
+ // Fast path: If `val` fits into a single byte, use that numeric value.
+ val = code
+ }
+ }
+ } else if (typeof val === 'number') {
+ val = val & 255
+ } else if (typeof val === 'boolean') {
+ val = Number(val)
+ }
+
+ // Invalid ranges are not set to a default, so can range check early.
+ if (start < 0 || this.length < start || this.length < end) {
+ throw new RangeError('Out of range index')
+ }
+
+ if (end <= start) {
+ return this
+ }
+
+ start = start >>> 0
+ end = end === undefined ? this.length : end >>> 0
+
+ if (!val) val = 0
+
+ var i
+ if (typeof val === 'number') {
+ for (i = start; i < end; ++i) {
+ this[i] = val
+ }
+ } else {
+ var bytes = Buffer.isBuffer(val)
+ ? val
+ : Buffer.from(val, encoding)
+ var len = bytes.length
+ if (len === 0) {
+ throw new TypeError('The value "' + val +
+ '" is invalid for argument "value"')
+ }
+ for (i = 0; i < end - start; ++i) {
+ this[i + start] = bytes[i % len]
+ }
+ }
+
+ return this
+}
+
+// HELPER FUNCTIONS
+// ================
+
+var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g
+
+function base64clean (str) {
+ // Node takes equal signs as end of the Base64 encoding
+ str = str.split('=')[0]
+ // Node strips out invalid characters like \n and \t from the string, base64-js does not
+ str = str.trim().replace(INVALID_BASE64_RE, '')
+ // Node converts strings with length < 2 to ''
+ if (str.length < 2) return ''
+ // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
+ while (str.length % 4 !== 0) {
+ str = str + '='
+ }
+ return str
+}
+
+function utf8ToBytes (string, units) {
+ units = units || Infinity
+ var codePoint
+ var length = string.length
+ var leadSurrogate = null
+ var bytes = []
+
+ for (var i = 0; i < length; ++i) {
+ codePoint = string.charCodeAt(i)
+
+ // is surrogate component
+ if (codePoint > 0xD7FF && codePoint < 0xE000) {
+ // last char was a lead
+ if (!leadSurrogate) {
+ // no lead yet
+ if (codePoint > 0xDBFF) {
+ // unexpected trail
+ if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
+ continue
+ } else if (i + 1 === length) {
+ // unpaired lead
+ if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
+ continue
+ }
+
+ // valid lead
+ leadSurrogate = codePoint
+
+ continue
+ }
+
+ // 2 leads in a row
+ if (codePoint < 0xDC00) {
+ if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
+ leadSurrogate = codePoint
+ continue
+ }
+
+ // valid surrogate pair
+ codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000
+ } else if (leadSurrogate) {
+ // valid bmp char, but last char was a lead
+ if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
+ }
+
+ leadSurrogate = null
+
+ // encode utf8
+ if (codePoint < 0x80) {
+ if ((units -= 1) < 0) break
+ bytes.push(codePoint)
+ } else if (codePoint < 0x800) {
+ if ((units -= 2) < 0) break
+ bytes.push(
+ codePoint >> 0x6 | 0xC0,
+ codePoint & 0x3F | 0x80
+ )
+ } else if (codePoint < 0x10000) {
+ if ((units -= 3) < 0) break
+ bytes.push(
+ codePoint >> 0xC | 0xE0,
+ codePoint >> 0x6 & 0x3F | 0x80,
+ codePoint & 0x3F | 0x80
+ )
+ } else if (codePoint < 0x110000) {
+ if ((units -= 4) < 0) break
+ bytes.push(
+ codePoint >> 0x12 | 0xF0,
+ codePoint >> 0xC & 0x3F | 0x80,
+ codePoint >> 0x6 & 0x3F | 0x80,
+ codePoint & 0x3F | 0x80
+ )
+ } else {
+ throw new Error('Invalid code point')
+ }
+ }
+
+ return bytes
+}
+
+function asciiToBytes (str) {
+ var byteArray = []
+ for (var i = 0; i < str.length; ++i) {
+ // Node's code seems to be doing this and not & 0x7F..
+ byteArray.push(str.charCodeAt(i) & 0xFF)
+ }
+ return byteArray
+}
+
+function utf16leToBytes (str, units) {
+ var c, hi, lo
+ var byteArray = []
+ for (var i = 0; i < str.length; ++i) {
+ if ((units -= 2) < 0) break
+
+ c = str.charCodeAt(i)
+ hi = c >> 8
+ lo = c % 256
+ byteArray.push(lo)
+ byteArray.push(hi)
+ }
+
+ return byteArray
+}
+
+function base64ToBytes (str) {
+ return base64.toByteArray(base64clean(str))
+}
+
+function blitBuffer (src, dst, offset, length) {
+ for (var i = 0; i < length; ++i) {
+ if ((i + offset >= dst.length) || (i >= src.length)) break
+ dst[i + offset] = src[i]
+ }
+ return i
+}
+
+// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass
+// the `instanceof` check but they should be treated as of that type.
+// See: https://github.com/feross/buffer/issues/166
+function isInstance (obj, type) {
+ return obj instanceof type ||
+ (obj != null && obj.constructor != null && obj.constructor.name != null &&
+ obj.constructor.name === type.name)
+}
+function numberIsNaN (obj) {
+ // For IE11 support
+ return obj !== obj // eslint-disable-line no-self-compare
+}
+
+// Create lookup table for `toString('hex')`
+// See: https://github.com/feross/buffer/issues/219
+var hexSliceLookupTable = (function () {
+ var alphabet = '0123456789abcdef'
+ var table = new Array(256)
+ for (var i = 0; i < 16; ++i) {
+ var i16 = i * 16
+ for (var j = 0; j < 16; ++j) {
+ table[i16 + j] = alphabet[i] + alphabet[j]
+ }
+ }
+ return table
+})()
diff --git a/node_modules/buffer/package.json b/node_modules/buffer/package.json
new file mode 100644
index 00000000..3b1b4986
--- /dev/null
+++ b/node_modules/buffer/package.json
@@ -0,0 +1,96 @@
+{
+ "name": "buffer",
+ "description": "Node.js Buffer API, for the browser",
+ "version": "5.7.1",
+ "author": {
+ "name": "Feross Aboukhadijeh",
+ "email": "feross@feross.org",
+ "url": "https://feross.org"
+ },
+ "bugs": {
+ "url": "https://github.com/feross/buffer/issues"
+ },
+ "contributors": [
+ "Romain Beauxis ",
+ "James Halliday "
+ ],
+ "dependencies": {
+ "base64-js": "^1.3.1",
+ "ieee754": "^1.1.13"
+ },
+ "devDependencies": {
+ "airtap": "^3.0.0",
+ "benchmark": "^2.1.4",
+ "browserify": "^17.0.0",
+ "concat-stream": "^2.0.0",
+ "hyperquest": "^2.1.3",
+ "is-buffer": "^2.0.4",
+ "is-nan": "^1.3.0",
+ "split": "^1.0.1",
+ "standard": "*",
+ "tape": "^5.0.1",
+ "through2": "^4.0.2",
+ "uglify-js": "^3.11.3"
+ },
+ "homepage": "https://github.com/feross/buffer",
+ "jspm": {
+ "map": {
+ "./index.js": {
+ "node": "@node/buffer"
+ }
+ }
+ },
+ "keywords": [
+ "arraybuffer",
+ "browser",
+ "browserify",
+ "buffer",
+ "compatible",
+ "dataview",
+ "uint8array"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "types": "index.d.ts",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/feross/buffer.git"
+ },
+ "scripts": {
+ "perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html",
+ "perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js",
+ "size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c",
+ "test": "standard && node ./bin/test.js",
+ "test-browser-es5": "airtap -- test/*.js",
+ "test-browser-es5-local": "airtap --local -- test/*.js",
+ "test-browser-es6": "airtap -- test/*.js test/node/*.js",
+ "test-browser-es6-local": "airtap --local -- test/*.js test/node/*.js",
+ "test-node": "tape test/*.js test/node/*.js",
+ "update-authors": "./bin/update-authors.sh"
+ },
+ "standard": {
+ "ignore": [
+ "test/node/**/*.js",
+ "test/common.js",
+ "test/_polyfill.js",
+ "perf/**/*.js"
+ ],
+ "globals": [
+ "SharedArrayBuffer"
+ ]
+ },
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+}
diff --git a/node_modules/bytes/History.md b/node_modules/bytes/History.md
new file mode 100644
index 00000000..d60ce0e6
--- /dev/null
+++ b/node_modules/bytes/History.md
@@ -0,0 +1,97 @@
+3.1.2 / 2022-01-27
+==================
+
+ * Fix return value for un-parsable strings
+
+3.1.1 / 2021-11-15
+==================
+
+ * Fix "thousandsSeparator" incorrecting formatting fractional part
+
+3.1.0 / 2019-01-22
+==================
+
+ * Add petabyte (`pb`) support
+
+3.0.0 / 2017-08-31
+==================
+
+ * Change "kB" to "KB" in format output
+ * Remove support for Node.js 0.6
+ * Remove support for ComponentJS
+
+2.5.0 / 2017-03-24
+==================
+
+ * Add option "unit"
+
+2.4.0 / 2016-06-01
+==================
+
+ * Add option "unitSeparator"
+
+2.3.0 / 2016-02-15
+==================
+
+ * Drop partial bytes on all parsed units
+ * Fix non-finite numbers to `.format` to return `null`
+ * Fix parsing byte string that looks like hex
+ * perf: hoist regular expressions
+
+2.2.0 / 2015-11-13
+==================
+
+ * add option "decimalPlaces"
+ * add option "fixedDecimals"
+
+2.1.0 / 2015-05-21
+==================
+
+ * add `.format` export
+ * add `.parse` export
+
+2.0.2 / 2015-05-20
+==================
+
+ * remove map recreation
+ * remove unnecessary object construction
+
+2.0.1 / 2015-05-07
+==================
+
+ * fix browserify require
+ * remove node.extend dependency
+
+2.0.0 / 2015-04-12
+==================
+
+ * add option "case"
+ * add option "thousandsSeparator"
+ * return "null" on invalid parse input
+ * support proper round-trip: bytes(bytes(num)) === num
+ * units no longer case sensitive when parsing
+
+1.0.0 / 2014-05-05
+==================
+
+ * add negative support. fixes #6
+
+0.3.0 / 2014-03-19
+==================
+
+ * added terabyte support
+
+0.2.1 / 2013-04-01
+==================
+
+ * add .component
+
+0.2.0 / 2012-10-28
+==================
+
+ * bytes(200).should.eql('200b')
+
+0.1.0 / 2012-07-04
+==================
+
+ * add bytes to string conversion [yields]
diff --git a/node_modules/bytes/LICENSE b/node_modules/bytes/LICENSE
new file mode 100644
index 00000000..63e95a96
--- /dev/null
+++ b/node_modules/bytes/LICENSE
@@ -0,0 +1,23 @@
+(The MIT License)
+
+Copyright (c) 2012-2014 TJ Holowaychuk
+Copyright (c) 2015 Jed Watson
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/bytes/Readme.md b/node_modules/bytes/Readme.md
new file mode 100644
index 00000000..5790e23e
--- /dev/null
+++ b/node_modules/bytes/Readme.md
@@ -0,0 +1,152 @@
+# Bytes utility
+
+[![NPM Version][npm-image]][npm-url]
+[![NPM Downloads][downloads-image]][downloads-url]
+[![Build Status][ci-image]][ci-url]
+[![Test Coverage][coveralls-image]][coveralls-url]
+
+Utility to parse a string bytes (ex: `1TB`) to bytes (`1099511627776`) and vice-versa.
+
+## Installation
+
+This is a [Node.js](https://nodejs.org/en/) module available through the
+[npm registry](https://www.npmjs.com/). Installation is done using the
+[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
+
+```bash
+$ npm install bytes
+```
+
+## Usage
+
+```js
+var bytes = require('bytes');
+```
+
+#### bytes(number|string value, [options]): number|string|null
+
+Default export function. Delegates to either `bytes.format` or `bytes.parse` based on the type of `value`.
+
+**Arguments**
+
+| Name | Type | Description |
+|---------|----------|--------------------|
+| value | `number`|`string` | Number value to format or string value to parse |
+| options | `Object` | Conversion options for `format` |
+
+**Returns**
+
+| Name | Type | Description |
+|---------|------------------|-------------------------------------------------|
+| results | `string`|`number`|`null` | Return null upon error. Numeric value in bytes, or string value otherwise. |
+
+**Example**
+
+```js
+bytes(1024);
+// output: '1KB'
+
+bytes('1KB');
+// output: 1024
+```
+
+#### bytes.format(number value, [options]): string|null
+
+Format the given value in bytes into a string. If the value is negative, it is kept as such. If it is a float, it is
+ rounded.
+
+**Arguments**
+
+| Name | Type | Description |
+|---------|----------|--------------------|
+| value | `number` | Value in bytes |
+| options | `Object` | Conversion options |
+
+**Options**
+
+| Property | Type | Description |
+|-------------------|--------|-----------------------------------------------------------------------------------------|
+| decimalPlaces | `number`|`null` | Maximum number of decimal places to include in output. Default value to `2`. |
+| fixedDecimals | `boolean`|`null` | Whether to always display the maximum number of decimal places. Default value to `false` |
+| thousandsSeparator | `string`|`null` | Example of values: `' '`, `','` and `'.'`... Default value to `''`. |
+| unit | `string`|`null` | The unit in which the result will be returned (B/KB/MB/GB/TB). Default value to `''` (which means auto detect). |
+| unitSeparator | `string`|`null` | Separator to use between number and unit. Default value to `''`. |
+
+**Returns**
+
+| Name | Type | Description |
+|---------|------------------|-------------------------------------------------|
+| results | `string`|`null` | Return null upon error. String value otherwise. |
+
+**Example**
+
+```js
+bytes.format(1024);
+// output: '1KB'
+
+bytes.format(1000);
+// output: '1000B'
+
+bytes.format(1000, {thousandsSeparator: ' '});
+// output: '1 000B'
+
+bytes.format(1024 * 1.7, {decimalPlaces: 0});
+// output: '2KB'
+
+bytes.format(1024, {unitSeparator: ' '});
+// output: '1 KB'
+```
+
+#### bytes.parse(string|number value): number|null
+
+Parse the string value into an integer in bytes. If no unit is given, or `value`
+is a number, it is assumed the value is in bytes.
+
+Supported units and abbreviations are as follows and are case-insensitive:
+
+ * `b` for bytes
+ * `kb` for kilobytes
+ * `mb` for megabytes
+ * `gb` for gigabytes
+ * `tb` for terabytes
+ * `pb` for petabytes
+
+The units are in powers of two, not ten. This means 1kb = 1024b according to this parser.
+
+**Arguments**
+
+| Name | Type | Description |
+|---------------|--------|--------------------|
+| value | `string`|`number` | String to parse, or number in bytes. |
+
+**Returns**
+
+| Name | Type | Description |
+|---------|-------------|-------------------------|
+| results | `number`|`null` | Return null upon error. Value in bytes otherwise. |
+
+**Example**
+
+```js
+bytes.parse('1KB');
+// output: 1024
+
+bytes.parse('1024');
+// output: 1024
+
+bytes.parse(1024);
+// output: 1024
+```
+
+## License
+
+[MIT](LICENSE)
+
+[ci-image]: https://badgen.net/github/checks/visionmedia/bytes.js/master?label=ci
+[ci-url]: https://github.com/visionmedia/bytes.js/actions?query=workflow%3Aci
+[coveralls-image]: https://badgen.net/coveralls/c/github/visionmedia/bytes.js/master
+[coveralls-url]: https://coveralls.io/r/visionmedia/bytes.js?branch=master
+[downloads-image]: https://badgen.net/npm/dm/bytes
+[downloads-url]: https://npmjs.org/package/bytes
+[npm-image]: https://badgen.net/npm/v/bytes
+[npm-url]: https://npmjs.org/package/bytes
diff --git a/node_modules/bytes/index.js b/node_modules/bytes/index.js
new file mode 100644
index 00000000..6f2d0f89
--- /dev/null
+++ b/node_modules/bytes/index.js
@@ -0,0 +1,170 @@
+/*!
+ * bytes
+ * Copyright(c) 2012-2014 TJ Holowaychuk
+ * Copyright(c) 2015 Jed Watson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = bytes;
+module.exports.format = format;
+module.exports.parse = parse;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g;
+
+var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/;
+
+var map = {
+ b: 1,
+ kb: 1 << 10,
+ mb: 1 << 20,
+ gb: 1 << 30,
+ tb: Math.pow(1024, 4),
+ pb: Math.pow(1024, 5),
+};
+
+var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i;
+
+/**
+ * Convert the given value in bytes into a string or parse to string to an integer in bytes.
+ *
+ * @param {string|number} value
+ * @param {{
+ * case: [string],
+ * decimalPlaces: [number]
+ * fixedDecimals: [boolean]
+ * thousandsSeparator: [string]
+ * unitSeparator: [string]
+ * }} [options] bytes options.
+ *
+ * @returns {string|number|null}
+ */
+
+function bytes(value, options) {
+ if (typeof value === 'string') {
+ return parse(value);
+ }
+
+ if (typeof value === 'number') {
+ return format(value, options);
+ }
+
+ return null;
+}
+
+/**
+ * Format the given value in bytes into a string.
+ *
+ * If the value is negative, it is kept as such. If it is a float,
+ * it is rounded.
+ *
+ * @param {number} value
+ * @param {object} [options]
+ * @param {number} [options.decimalPlaces=2]
+ * @param {number} [options.fixedDecimals=false]
+ * @param {string} [options.thousandsSeparator=]
+ * @param {string} [options.unit=]
+ * @param {string} [options.unitSeparator=]
+ *
+ * @returns {string|null}
+ * @public
+ */
+
+function format(value, options) {
+ if (!Number.isFinite(value)) {
+ return null;
+ }
+
+ var mag = Math.abs(value);
+ var thousandsSeparator = (options && options.thousandsSeparator) || '';
+ var unitSeparator = (options && options.unitSeparator) || '';
+ var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2;
+ var fixedDecimals = Boolean(options && options.fixedDecimals);
+ var unit = (options && options.unit) || '';
+
+ if (!unit || !map[unit.toLowerCase()]) {
+ if (mag >= map.pb) {
+ unit = 'PB';
+ } else if (mag >= map.tb) {
+ unit = 'TB';
+ } else if (mag >= map.gb) {
+ unit = 'GB';
+ } else if (mag >= map.mb) {
+ unit = 'MB';
+ } else if (mag >= map.kb) {
+ unit = 'KB';
+ } else {
+ unit = 'B';
+ }
+ }
+
+ var val = value / map[unit.toLowerCase()];
+ var str = val.toFixed(decimalPlaces);
+
+ if (!fixedDecimals) {
+ str = str.replace(formatDecimalsRegExp, '$1');
+ }
+
+ if (thousandsSeparator) {
+ str = str.split('.').map(function (s, i) {
+ return i === 0
+ ? s.replace(formatThousandsRegExp, thousandsSeparator)
+ : s
+ }).join('.');
+ }
+
+ return str + unitSeparator + unit;
+}
+
+/**
+ * Parse the string value into an integer in bytes.
+ *
+ * If no unit is given, it is assumed the value is in bytes.
+ *
+ * @param {number|string} val
+ *
+ * @returns {number|null}
+ * @public
+ */
+
+function parse(val) {
+ if (typeof val === 'number' && !isNaN(val)) {
+ return val;
+ }
+
+ if (typeof val !== 'string') {
+ return null;
+ }
+
+ // Test if the string passed is valid
+ var results = parseRegExp.exec(val);
+ var floatValue;
+ var unit = 'b';
+
+ if (!results) {
+ // Nothing could be extracted from the given string
+ floatValue = parseInt(val, 10);
+ unit = 'b'
+ } else {
+ // Retrieve the value and the unit
+ floatValue = parseFloat(results[1]);
+ unit = results[4].toLowerCase();
+ }
+
+ if (isNaN(floatValue)) {
+ return null;
+ }
+
+ return Math.floor(map[unit] * floatValue);
+}
diff --git a/node_modules/bytes/package.json b/node_modules/bytes/package.json
new file mode 100644
index 00000000..f2b6a8b0
--- /dev/null
+++ b/node_modules/bytes/package.json
@@ -0,0 +1,42 @@
+{
+ "name": "bytes",
+ "description": "Utility to parse a string bytes to bytes and vice-versa",
+ "version": "3.1.2",
+ "author": "TJ Holowaychuk (http://tjholowaychuk.com)",
+ "contributors": [
+ "Jed Watson ",
+ "Théo FIDRY "
+ ],
+ "license": "MIT",
+ "keywords": [
+ "byte",
+ "bytes",
+ "utility",
+ "parse",
+ "parser",
+ "convert",
+ "converter"
+ ],
+ "repository": "visionmedia/bytes.js",
+ "devDependencies": {
+ "eslint": "7.32.0",
+ "eslint-plugin-markdown": "2.2.1",
+ "mocha": "9.2.0",
+ "nyc": "15.1.0"
+ },
+ "files": [
+ "History.md",
+ "LICENSE",
+ "Readme.md",
+ "index.js"
+ ],
+ "engines": {
+ "node": ">= 0.8"
+ },
+ "scripts": {
+ "lint": "eslint .",
+ "test": "mocha --check-leaks --reporter spec",
+ "test-ci": "nyc --reporter=lcov --reporter=text npm test",
+ "test-cov": "nyc --reporter=html --reporter=text npm test"
+ }
+}
diff --git a/node_modules/cacheable-request/LICENSE b/node_modules/cacheable-request/LICENSE
new file mode 100644
index 00000000..f27ee9b4
--- /dev/null
+++ b/node_modules/cacheable-request/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Luke Childs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/cacheable-request/README.md b/node_modules/cacheable-request/README.md
new file mode 100644
index 00000000..84a55682
--- /dev/null
+++ b/node_modules/cacheable-request/README.md
@@ -0,0 +1,190 @@
+# cacheable-request
+
+> Wrap native HTTP requests with RFC compliant cache support
+
+[](https://travis-ci.org/lukechilds/cacheable-request)
+[](https://coveralls.io/github/lukechilds/cacheable-request?branch=master)
+[](https://www.npmjs.com/package/cacheable-request)
+[](https://www.npmjs.com/package/cacheable-request)
+
+[RFC 7234](http://httpwg.org/specs/rfc7234.html) compliant HTTP caching for native Node.js HTTP/HTTPS requests. Caching works out of the box in memory or is easily pluggable with a wide range of storage adapters.
+
+**Note:** This is a low level wrapper around the core HTTP modules, it's not a high level request library.
+
+## Features
+
+- Only stores cacheable responses as defined by RFC 7234
+- Fresh cache entries are served directly from cache
+- Stale cache entries are revalidated with `If-None-Match`/`If-Modified-Since` headers
+- 304 responses from revalidation requests use cached body
+- Updates `Age` header on cached responses
+- Can completely bypass cache on a per request basis
+- In memory cache by default
+- Official support for Redis, MongoDB, SQLite, PostgreSQL and MySQL storage adapters
+- Easily plug in your own or third-party storage adapters
+- If DB connection fails, cache is automatically bypassed ([disabled by default](#optsautomaticfailover))
+- Adds cache support to any existing HTTP code with minimal changes
+- Uses [http-cache-semantics](https://github.com/pornel/http-cache-semantics) internally for HTTP RFC 7234 compliance
+
+## Install
+
+```shell
+npm install --save cacheable-request
+```
+
+## Usage
+
+```js
+const http = require('http');
+const CacheableRequest = require('cacheable-request');
+
+// Then instead of
+const req = http.request('http://example.com', cb);
+req.end();
+
+// You can do
+const cacheableRequest = new CacheableRequest(http.request);
+const cacheReq = cacheableRequest('http://example.com', cb);
+cacheReq.on('request', req => req.end());
+// Future requests to 'example.com' will be returned from cache if still valid
+
+// You pass in any other http.request API compatible method to be wrapped with cache support:
+const cacheableRequest = new CacheableRequest(https.request);
+const cacheableRequest = new CacheableRequest(electron.net);
+```
+
+## Storage Adapters
+
+`cacheable-request` uses [Keyv](https://github.com/lukechilds/keyv) to support a wide range of storage adapters.
+
+For example, to use Redis as a cache backend, you just need to install the official Redis Keyv storage adapter:
+
+```
+npm install --save @keyv/redis
+```
+
+And then you can pass `CacheableRequest` your connection string:
+
+```js
+const cacheableRequest = new CacheableRequest(http.request, 'redis://user:pass@localhost:6379');
+```
+
+[View all official Keyv storage adapters.](https://github.com/lukechilds/keyv#official-storage-adapters)
+
+Keyv also supports anything that follows the Map API so it's easy to write your own storage adapter or use a third-party solution.
+
+e.g The following are all valid storage adapters
+
+```js
+const storageAdapter = new Map();
+// or
+const storageAdapter = require('./my-storage-adapter');
+// or
+const QuickLRU = require('quick-lru');
+const storageAdapter = new QuickLRU({ maxSize: 1000 });
+
+const cacheableRequest = new CacheableRequest(http.request, storageAdapter);
+```
+
+View the [Keyv docs](https://github.com/lukechilds/keyv) for more information on how to use storage adapters.
+
+## API
+
+### new cacheableRequest(request, [storageAdapter])
+
+Returns the provided request function wrapped with cache support.
+
+#### request
+
+Type: `function`
+
+Request function to wrap with cache support. Should be [`http.request`](https://nodejs.org/api/http.html#http_http_request_options_callback) or a similar API compatible request function.
+
+#### storageAdapter
+
+Type: `Keyv storage adapter`
+Default: `new Map()`
+
+A [Keyv](https://github.com/lukechilds/keyv) storage adapter instance, or connection string if using with an official Keyv storage adapter.
+
+### Instance
+
+#### cacheableRequest(opts, [cb])
+
+Returns an event emitter.
+
+##### opts
+
+Type: `object`, `string`
+
+Any of the default request functions options plus:
+
+###### opts.cache
+
+Type: `boolean`
+Default: `true`
+
+If the cache should be used. Setting this to false will completely bypass the cache for the current request.
+
+###### opts.strictTtl
+
+Type: `boolean`
+Default: `false`
+
+If set to `false`, after a cached resource's TTL expires it is kept in the cache and will be revalidated on the next request with `If-None-Match`/`If-Modified-Since` headers.
+
+If set to `true` once a cached resource has expired it is deleted and will have to be re-requested.
+
+###### opts.automaticFailover
+
+Type: `boolean`
+Default: `false`
+
+When set to `true`, if the DB connection fails we will automatically fallback to a network request. DB errors will still be emitted to notify you of the problem even though the request callback may succeed.
+
+##### cb
+
+Type: `function`
+
+The callback function which will receive the response as an argument.
+
+The response can be either a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage) or a [responselike object](https://github.com/lukechilds/responselike). The response will also have a `fromCache` property set with a boolean value.
+
+##### .on('request', request)
+
+`request` event to get the request object of the request.
+
+**Note:** This event will only fire if an HTTP request is actually made, not when a response is retrieved from cache. However, you should always handle the `request` event to end the request and handle any potential request errors.
+
+##### .on('response', response)
+
+`response` event to get the response object from the HTTP request or cache.
+
+##### .on('error', error)
+
+`error` event emitted in case of an error with the cache.
+
+Errors emitted here will be an instance of `CacheableRequest.RequestError` or `CacheableRequest.CacheError`. You will only ever receive a `RequestError` if the request function throws (normally caused by invalid user input). Normal request errors should be handled inside the `request` event.
+
+To properly handle all error scenarios you should use the following pattern:
+
+```js
+cacheableRequest('example.com', cb)
+ .on('error', err => {
+ if (err instanceof CacheableRequest.CacheError) {
+ handleCacheError(err); // Cache error
+ } else if (err instanceof CacheableRequest.RequestError) {
+ handleRequestError(err); // Request function thrown
+ }
+ })
+ .on('request', req => {
+ req.on('error', handleRequestError); // Request error emitted
+ req.end();
+ });
+```
+
+**Note:** Database connection errors are emitted here, however `cacheable-request` will attempt to re-request the resource and bypass the cache on a connection error. Therefore a database connection error doesn't necessarily mean the request won't be fulfilled.
+
+## License
+
+MIT © Luke Childs
diff --git a/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js b/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js
new file mode 100644
index 00000000..ae45d3d9
--- /dev/null
+++ b/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js
@@ -0,0 +1,51 @@
+'use strict';
+const PassThrough = require('stream').PassThrough;
+
+module.exports = opts => {
+ opts = Object.assign({}, opts);
+
+ const array = opts.array;
+ let encoding = opts.encoding;
+ const buffer = encoding === 'buffer';
+ let objectMode = false;
+
+ if (array) {
+ objectMode = !(encoding || buffer);
+ } else {
+ encoding = encoding || 'utf8';
+ }
+
+ if (buffer) {
+ encoding = null;
+ }
+
+ let len = 0;
+ const ret = [];
+ const stream = new PassThrough({objectMode});
+
+ if (encoding) {
+ stream.setEncoding(encoding);
+ }
+
+ stream.on('data', chunk => {
+ ret.push(chunk);
+
+ if (objectMode) {
+ len = ret.length;
+ } else {
+ len += chunk.length;
+ }
+ });
+
+ stream.getBufferedValue = () => {
+ if (array) {
+ return ret;
+ }
+
+ return buffer ? Buffer.concat(ret, len) : ret.join('');
+ };
+
+ stream.getBufferedLength = () => len;
+
+ return stream;
+};
diff --git a/node_modules/cacheable-request/node_modules/get-stream/index.js b/node_modules/cacheable-request/node_modules/get-stream/index.js
new file mode 100644
index 00000000..2dc5ee96
--- /dev/null
+++ b/node_modules/cacheable-request/node_modules/get-stream/index.js
@@ -0,0 +1,51 @@
+'use strict';
+const bufferStream = require('./buffer-stream');
+
+function getStream(inputStream, opts) {
+ if (!inputStream) {
+ return Promise.reject(new Error('Expected a stream'));
+ }
+
+ opts = Object.assign({maxBuffer: Infinity}, opts);
+
+ const maxBuffer = opts.maxBuffer;
+ let stream;
+ let clean;
+
+ const p = new Promise((resolve, reject) => {
+ const error = err => {
+ if (err) { // null check
+ err.bufferedData = stream.getBufferedValue();
+ }
+
+ reject(err);
+ };
+
+ stream = bufferStream(opts);
+ inputStream.once('error', error);
+ inputStream.pipe(stream);
+
+ stream.on('data', () => {
+ if (stream.getBufferedLength() > maxBuffer) {
+ reject(new Error('maxBuffer exceeded'));
+ }
+ });
+ stream.once('error', error);
+ stream.on('end', resolve);
+
+ clean = () => {
+ // some streams doesn't implement the `stream.Readable` interface correctly
+ if (inputStream.unpipe) {
+ inputStream.unpipe(stream);
+ }
+ };
+ });
+
+ p.then(clean, clean);
+
+ return p.then(() => stream.getBufferedValue());
+}
+
+module.exports = getStream;
+module.exports.buffer = (stream, opts) => getStream(stream, Object.assign({}, opts, {encoding: 'buffer'}));
+module.exports.array = (stream, opts) => getStream(stream, Object.assign({}, opts, {array: true}));
diff --git a/node_modules/cacheable-request/node_modules/get-stream/license b/node_modules/cacheable-request/node_modules/get-stream/license
new file mode 100644
index 00000000..654d0bfe
--- /dev/null
+++ b/node_modules/cacheable-request/node_modules/get-stream/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/cacheable-request/node_modules/get-stream/package.json b/node_modules/cacheable-request/node_modules/get-stream/package.json
new file mode 100644
index 00000000..2f2adf0d
--- /dev/null
+++ b/node_modules/cacheable-request/node_modules/get-stream/package.json
@@ -0,0 +1,48 @@
+{
+ "name": "get-stream",
+ "version": "3.0.0",
+ "description": "Get a stream as a string, buffer, or array",
+ "license": "MIT",
+ "repository": "sindresorhus/get-stream",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js",
+ "buffer-stream.js"
+ ],
+ "keywords": [
+ "get",
+ "stream",
+ "promise",
+ "concat",
+ "string",
+ "str",
+ "text",
+ "buffer",
+ "read",
+ "data",
+ "consume",
+ "readable",
+ "readablestream",
+ "array",
+ "object",
+ "obj"
+ ],
+ "devDependencies": {
+ "ava": "*",
+ "into-stream": "^3.0.0",
+ "xo": "*"
+ },
+ "xo": {
+ "esnext": true
+ }
+}
diff --git a/node_modules/cacheable-request/node_modules/get-stream/readme.md b/node_modules/cacheable-request/node_modules/get-stream/readme.md
new file mode 100644
index 00000000..73b188fb
--- /dev/null
+++ b/node_modules/cacheable-request/node_modules/get-stream/readme.md
@@ -0,0 +1,117 @@
+# get-stream [](https://travis-ci.org/sindresorhus/get-stream)
+
+> Get a stream as a string, buffer, or array
+
+
+## Install
+
+```
+$ npm install --save get-stream
+```
+
+
+## Usage
+
+```js
+const fs = require('fs');
+const getStream = require('get-stream');
+const stream = fs.createReadStream('unicorn.txt');
+
+getStream(stream).then(str => {
+ console.log(str);
+ /*
+ ,,))))))));,
+ __)))))))))))))),
+ \|/ -\(((((''''((((((((.
+ -*-==//////(('' . `)))))),
+ /|\ ))| o ;-. '((((( ,(,
+ ( `| / ) ;))))' ,_))^;(~
+ | | | ,))((((_ _____------~~~-. %,;(;(>';'~
+ o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
+ ; ''''```` `: `:::|\,__,%% );`'; ~
+ | _ ) / `:|`----' `-'
+ ______/\/~ | / /
+ /~;;.____/;;' / ___--,-( `;;;/
+ / // _;______;'------~~~~~ /;;/\ /
+ // | | / ; \;;,\
+ (<_ | ; /',/-----' _>
+ \_| ||_ //~;~~~~~~~~~
+ `\_| (,~~
+ \~\
+ ~~
+ */
+});
+```
+
+
+## API
+
+The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode.
+
+### getStream(stream, [options])
+
+Get the `stream` as a string.
+
+#### options
+
+##### encoding
+
+Type: `string`
+Default: `utf8`
+
+[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream.
+
+##### maxBuffer
+
+Type: `number`
+Default: `Infinity`
+
+Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected.
+
+### getStream.buffer(stream, [options])
+
+Get the `stream` as a buffer.
+
+It honors the `maxBuffer` option as above, but it refers to byte length rather than string length.
+
+### getStream.array(stream, [options])
+
+Get the `stream` as an array of values.
+
+It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen:
+
+- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes).
+
+- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array.
+
+- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array.
+
+
+## Errors
+
+If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error.
+
+```js
+getStream(streamThatErrorsAtTheEnd('unicorn'))
+ .catch(err => {
+ console.log(err.bufferedData);
+ //=> 'unicorn'
+ });
+```
+
+
+## FAQ
+
+### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)?
+
+This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package.
+
+
+## Related
+
+- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/index.js b/node_modules/cacheable-request/node_modules/lowercase-keys/index.js
new file mode 100644
index 00000000..b8d88983
--- /dev/null
+++ b/node_modules/cacheable-request/node_modules/lowercase-keys/index.js
@@ -0,0 +1,11 @@
+'use strict';
+module.exports = function (obj) {
+ var ret = {};
+ var keys = Object.keys(Object(obj));
+
+ for (var i = 0; i < keys.length; i++) {
+ ret[keys[i].toLowerCase()] = obj[keys[i]];
+ }
+
+ return ret;
+};
diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/package.json b/node_modules/cacheable-request/node_modules/lowercase-keys/package.json
new file mode 100644
index 00000000..7e4e396f
--- /dev/null
+++ b/node_modules/cacheable-request/node_modules/lowercase-keys/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "lowercase-keys",
+ "version": "1.0.0",
+ "description": "Lowercase the keys of an object",
+ "license": "MIT",
+ "repository": "sindresorhus/lowercase-keys",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "node test.js"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "object",
+ "assign",
+ "extend",
+ "properties",
+ "lowercase",
+ "lower-case",
+ "case",
+ "keys",
+ "key"
+ ],
+ "devDependencies": {
+ "ava": "0.0.4"
+ }
+}
diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md b/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md
new file mode 100644
index 00000000..dc65770a
--- /dev/null
+++ b/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md
@@ -0,0 +1,33 @@
+# lowercase-keys [](https://travis-ci.org/sindresorhus/lowercase-keys)
+
+> Lowercase the keys of an object
+
+
+## Install
+
+```
+$ npm install --save lowercase-keys
+```
+
+
+## Usage
+
+```js
+var lowercaseKeys = require('lowercase-keys');
+
+lowercaseKeys({FOO: true, bAr: false});
+//=> {foo: true, bar: false}
+```
+
+
+## API
+
+### lowercaseKeys(object)
+
+Lowercases the keys and returns a new object.
+
+
+
+## License
+
+MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/node_modules/cacheable-request/package.json b/node_modules/cacheable-request/package.json
new file mode 100644
index 00000000..04483124
--- /dev/null
+++ b/node_modules/cacheable-request/package.json
@@ -0,0 +1,57 @@
+{
+ "name": "cacheable-request",
+ "version": "2.1.4",
+ "description": "Wrap native HTTP requests with RFC compliant cache support",
+ "main": "src/index.js",
+ "scripts": {
+ "test": "xo && nyc ava",
+ "coverage": "nyc report --reporter=text-lcov | coveralls"
+ },
+ "xo": {
+ "extends": "xo-lukechilds"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/lukechilds/cacheable-request.git"
+ },
+ "keywords": [
+ "HTTP",
+ "HTTPS",
+ "cache",
+ "caching",
+ "layer",
+ "cacheable",
+ "RFC 7234",
+ "RFC",
+ "7234",
+ "compliant"
+ ],
+ "author": "Luke Childs (http://lukechilds.co.uk)",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/lukechilds/cacheable-request/issues"
+ },
+ "homepage": "https://github.com/lukechilds/cacheable-request",
+ "dependencies": {
+ "clone-response": "1.0.2",
+ "get-stream": "3.0.0",
+ "http-cache-semantics": "3.8.1",
+ "keyv": "3.0.0",
+ "lowercase-keys": "1.0.0",
+ "normalize-url": "2.0.1",
+ "responselike": "1.0.2"
+ },
+ "devDependencies": {
+ "@keyv/sqlite": "^1.2.6",
+ "ava": "^0.24.0",
+ "coveralls": "^3.0.0",
+ "create-test-server": "^2.0.0",
+ "delay": "^2.0.0",
+ "eslint-config-xo-lukechilds": "^1.0.0",
+ "nyc": "^11.0.2",
+ "pify": "^3.0.0",
+ "sqlite3": "^3.1.9",
+ "this": "^1.0.2",
+ "xo": "^0.19.0"
+ }
+}
diff --git a/node_modules/cacheable-request/src/index.js b/node_modules/cacheable-request/src/index.js
new file mode 100644
index 00000000..1935b2db
--- /dev/null
+++ b/node_modules/cacheable-request/src/index.js
@@ -0,0 +1,155 @@
+'use strict';
+
+const EventEmitter = require('events');
+const urlLib = require('url');
+const normalizeUrl = require('normalize-url');
+const getStream = require('get-stream');
+const CachePolicy = require('http-cache-semantics');
+const Response = require('responselike');
+const lowercaseKeys = require('lowercase-keys');
+const cloneResponse = require('clone-response');
+const Keyv = require('keyv');
+
+class CacheableRequest {
+ constructor(request, cacheAdapter) {
+ if (typeof request !== 'function') {
+ throw new TypeError('Parameter `request` must be a function');
+ }
+
+ this.cache = new Keyv({
+ uri: typeof cacheAdapter === 'string' && cacheAdapter,
+ store: typeof cacheAdapter !== 'string' && cacheAdapter,
+ namespace: 'cacheable-request'
+ });
+
+ return this.createCacheableRequest(request);
+ }
+
+ createCacheableRequest(request) {
+ return (opts, cb) => {
+ if (typeof opts === 'string') {
+ opts = urlLib.parse(opts);
+ }
+ opts = Object.assign({
+ headers: {},
+ method: 'GET',
+ cache: true,
+ strictTtl: false,
+ automaticFailover: false
+ }, opts);
+ opts.headers = lowercaseKeys(opts.headers);
+
+ const ee = new EventEmitter();
+ const url = normalizeUrl(urlLib.format(opts));
+ const key = `${opts.method}:${url}`;
+ let revalidate = false;
+ let madeRequest = false;
+
+ const makeRequest = opts => {
+ madeRequest = true;
+ const handler = response => {
+ if (revalidate) {
+ const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response);
+ if (!revalidatedPolicy.modified) {
+ const headers = revalidatedPolicy.policy.responseHeaders();
+ response = new Response(response.statusCode, headers, revalidate.body, revalidate.url);
+ response.cachePolicy = revalidatedPolicy.policy;
+ response.fromCache = true;
+ }
+ }
+
+ if (!response.fromCache) {
+ response.cachePolicy = new CachePolicy(opts, response);
+ response.fromCache = false;
+ }
+
+ let clonedResponse;
+ if (opts.cache && response.cachePolicy.storable()) {
+ clonedResponse = cloneResponse(response);
+ getStream.buffer(response)
+ .then(body => {
+ const value = {
+ cachePolicy: response.cachePolicy.toObject(),
+ url: response.url,
+ statusCode: response.fromCache ? revalidate.statusCode : response.statusCode,
+ body
+ };
+ const ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined;
+ return this.cache.set(key, value, ttl);
+ })
+ .catch(err => ee.emit('error', new CacheableRequest.CacheError(err)));
+ } else if (opts.cache && revalidate) {
+ this.cache.delete(key)
+ .catch(err => ee.emit('error', new CacheableRequest.CacheError(err)));
+ }
+
+ ee.emit('response', clonedResponse || response);
+ if (typeof cb === 'function') {
+ cb(clonedResponse || response);
+ }
+ };
+
+ try {
+ const req = request(opts, handler);
+ ee.emit('request', req);
+ } catch (err) {
+ ee.emit('error', new CacheableRequest.RequestError(err));
+ }
+ };
+
+ const get = opts => Promise.resolve()
+ .then(() => opts.cache ? this.cache.get(key) : undefined)
+ .then(cacheEntry => {
+ if (typeof cacheEntry === 'undefined') {
+ return makeRequest(opts);
+ }
+
+ const policy = CachePolicy.fromObject(cacheEntry.cachePolicy);
+ if (policy.satisfiesWithoutRevalidation(opts)) {
+ const headers = policy.responseHeaders();
+ const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url);
+ response.cachePolicy = policy;
+ response.fromCache = true;
+
+ ee.emit('response', response);
+ if (typeof cb === 'function') {
+ cb(response);
+ }
+ } else {
+ revalidate = cacheEntry;
+ opts.headers = policy.revalidationHeaders(opts);
+ makeRequest(opts);
+ }
+ });
+
+ this.cache.on('error', err => ee.emit('error', new CacheableRequest.CacheError(err)));
+
+ get(opts).catch(err => {
+ if (opts.automaticFailover && !madeRequest) {
+ makeRequest(opts);
+ }
+ ee.emit('error', new CacheableRequest.CacheError(err));
+ });
+
+ return ee;
+ };
+ }
+}
+
+CacheableRequest.RequestError = class extends Error {
+ constructor(err) {
+ super(err.message);
+ this.name = 'RequestError';
+ Object.assign(this, err);
+ }
+};
+
+CacheableRequest.CacheError = class extends Error {
+ constructor(err) {
+ super(err.message);
+ this.name = 'CacheError';
+ Object.assign(this, err);
+ }
+};
+
+module.exports = CacheableRequest;
diff --git a/node_modules/call-bind/.eslintignore b/node_modules/call-bind/.eslintignore
new file mode 100644
index 00000000..404abb22
--- /dev/null
+++ b/node_modules/call-bind/.eslintignore
@@ -0,0 +1 @@
+coverage/
diff --git a/node_modules/call-bind/.eslintrc b/node_modules/call-bind/.eslintrc
new file mode 100644
index 00000000..e5d3c9a9
--- /dev/null
+++ b/node_modules/call-bind/.eslintrc
@@ -0,0 +1,17 @@
+{
+ "root": true,
+
+ "extends": "@ljharb",
+
+ "rules": {
+ "func-name-matching": 0,
+ "id-length": 0,
+ "new-cap": [2, {
+ "capIsNewExceptions": [
+ "GetIntrinsic",
+ ],
+ }],
+ "no-magic-numbers": 0,
+ "operator-linebreak": [2, "before"],
+ },
+}
diff --git a/node_modules/call-bind/.github/FUNDING.yml b/node_modules/call-bind/.github/FUNDING.yml
new file mode 100644
index 00000000..c70c2ecd
--- /dev/null
+++ b/node_modules/call-bind/.github/FUNDING.yml
@@ -0,0 +1,12 @@
+# These are supported funding model platforms
+
+github: [ljharb]
+patreon: # Replace with a single Patreon username
+open_collective: # Replace with a single Open Collective username
+ko_fi: # Replace with a single Ko-fi username
+tidelift: npm/call-bind
+community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
+liberapay: # Replace with a single Liberapay username
+issuehunt: # Replace with a single IssueHunt username
+otechie: # Replace with a single Otechie username
+custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
diff --git a/node_modules/call-bind/.nycrc b/node_modules/call-bind/.nycrc
new file mode 100644
index 00000000..1826526e
--- /dev/null
+++ b/node_modules/call-bind/.nycrc
@@ -0,0 +1,13 @@
+{
+ "all": true,
+ "check-coverage": false,
+ "reporter": ["text-summary", "text", "html", "json"],
+ "lines": 86,
+ "statements": 85.93,
+ "functions": 82.43,
+ "branches": 76.06,
+ "exclude": [
+ "coverage",
+ "test"
+ ]
+}
diff --git a/node_modules/call-bind/CHANGELOG.md b/node_modules/call-bind/CHANGELOG.md
new file mode 100644
index 00000000..62a37279
--- /dev/null
+++ b/node_modules/call-bind/CHANGELOG.md
@@ -0,0 +1,42 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [v1.0.2](https://github.com/ljharb/call-bind/compare/v1.0.1...v1.0.2) - 2021-01-11
+
+### Commits
+
+- [Fix] properly include the receiver in the bound length [`dbae7bc`](https://github.com/ljharb/call-bind/commit/dbae7bc676c079a0d33c0a43e9ef92cb7b01345d)
+
+## [v1.0.1](https://github.com/ljharb/call-bind/compare/v1.0.0...v1.0.1) - 2021-01-08
+
+### Commits
+
+- [Tests] migrate tests to Github Actions [`b6db284`](https://github.com/ljharb/call-bind/commit/b6db284c36f8ccd195b88a6764fe84b7223a0da1)
+- [meta] do not publish github action workflow files [`ec7fe46`](https://github.com/ljharb/call-bind/commit/ec7fe46e60cfa4764ee943d2755f5e5a366e578e)
+- [Fix] preserve original function’s length when possible [`adbceaa`](https://github.com/ljharb/call-bind/commit/adbceaa3cac4b41ea78bb19d7ccdbaaf7e0bdadb)
+- [Tests] gather coverage data on every job [`d69e23c`](https://github.com/ljharb/call-bind/commit/d69e23cc65f101ba1d4c19bb07fa8eb0ec624be8)
+- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`2fd3586`](https://github.com/ljharb/call-bind/commit/2fd3586c5d47b335364c14293114c6b625ae1f71)
+- [Deps] update `get-intrinsic` [`f23e931`](https://github.com/ljharb/call-bind/commit/f23e9318cc271c2add8bb38cfded85ee7baf8eee)
+- [Deps] update `get-intrinsic` [`72d9f44`](https://github.com/ljharb/call-bind/commit/72d9f44e184465ba8dd3fb48260bbcff234985f2)
+- [meta] fix FUNDING.yml [`e723573`](https://github.com/ljharb/call-bind/commit/e723573438c5a68dcec31fb5d96ea6b7e4a93be8)
+- [eslint] ignore coverage output [`15e76d2`](https://github.com/ljharb/call-bind/commit/15e76d28a5f43e504696401e5b31ebb78ee1b532)
+- [meta] add Automatic Rebase and Require Allow Edits workflows [`8fa4dab`](https://github.com/ljharb/call-bind/commit/8fa4dabb23ba3dd7bb92c9571c1241c08b56e4b6)
+
+## v1.0.0 - 2020-10-30
+
+### Commits
+
+- Initial commit [`306cf98`](https://github.com/ljharb/call-bind/commit/306cf98c7ec9e7ef66b653ec152277ac1381eb50)
+- Tests [`e10d0bb`](https://github.com/ljharb/call-bind/commit/e10d0bbdadc7a10ecedc9a1c035112d3e368b8df)
+- Implementation [`43852ed`](https://github.com/ljharb/call-bind/commit/43852eda0f187327b7fad2423ca972149a52bd65)
+- npm init [`408f860`](https://github.com/ljharb/call-bind/commit/408f860b773a2f610805fd3613d0d71bac1b6249)
+- [meta] add Automatic Rebase and Require Allow Edits workflows [`fb349b2`](https://github.com/ljharb/call-bind/commit/fb349b2e48defbec8b5ec8a8395cc8f69f220b13)
+- [meta] add `auto-changelog` [`c4001fc`](https://github.com/ljharb/call-bind/commit/c4001fc43031799ef908211c98d3b0fb2b60fde4)
+- [meta] add "funding"; create `FUNDING.yml` [`d4d6d29`](https://github.com/ljharb/call-bind/commit/d4d6d2974a14bc2e98830468eda7fe6d6a776717)
+- [Tests] add `npm run lint` [`dedfb98`](https://github.com/ljharb/call-bind/commit/dedfb98bd0ecefb08ddb9a94061bd10cde4332af)
+- Only apps should have lockfiles [`54ac776`](https://github.com/ljharb/call-bind/commit/54ac77653db45a7361dc153d2f478e743f110650)
+- [meta] add `safe-publish-latest` [`9ea8e43`](https://github.com/ljharb/call-bind/commit/9ea8e435b950ce9b705559cd651039f9bf40140f)
diff --git a/node_modules/call-bind/LICENSE b/node_modules/call-bind/LICENSE
new file mode 100644
index 00000000..48f05d01
--- /dev/null
+++ b/node_modules/call-bind/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 Jordan Harband
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/call-bind/README.md b/node_modules/call-bind/README.md
new file mode 100644
index 00000000..53649eb4
--- /dev/null
+++ b/node_modules/call-bind/README.md
@@ -0,0 +1,2 @@
+# call-bind
+Robustly `.call.bind()` a function.
diff --git a/node_modules/call-bind/callBound.js b/node_modules/call-bind/callBound.js
new file mode 100644
index 00000000..8374adfd
--- /dev/null
+++ b/node_modules/call-bind/callBound.js
@@ -0,0 +1,15 @@
+'use strict';
+
+var GetIntrinsic = require('get-intrinsic');
+
+var callBind = require('./');
+
+var $indexOf = callBind(GetIntrinsic('String.prototype.indexOf'));
+
+module.exports = function callBoundIntrinsic(name, allowMissing) {
+ var intrinsic = GetIntrinsic(name, !!allowMissing);
+ if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) {
+ return callBind(intrinsic);
+ }
+ return intrinsic;
+};
diff --git a/node_modules/call-bind/index.js b/node_modules/call-bind/index.js
new file mode 100644
index 00000000..6fa3e4af
--- /dev/null
+++ b/node_modules/call-bind/index.js
@@ -0,0 +1,47 @@
+'use strict';
+
+var bind = require('function-bind');
+var GetIntrinsic = require('get-intrinsic');
+
+var $apply = GetIntrinsic('%Function.prototype.apply%');
+var $call = GetIntrinsic('%Function.prototype.call%');
+var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply);
+
+var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%', true);
+var $defineProperty = GetIntrinsic('%Object.defineProperty%', true);
+var $max = GetIntrinsic('%Math.max%');
+
+if ($defineProperty) {
+ try {
+ $defineProperty({}, 'a', { value: 1 });
+ } catch (e) {
+ // IE 8 has a broken defineProperty
+ $defineProperty = null;
+ }
+}
+
+module.exports = function callBind(originalFunction) {
+ var func = $reflectApply(bind, $call, arguments);
+ if ($gOPD && $defineProperty) {
+ var desc = $gOPD(func, 'length');
+ if (desc.configurable) {
+ // original length, plus the receiver, minus any additional arguments (after the receiver)
+ $defineProperty(
+ func,
+ 'length',
+ { value: 1 + $max(0, originalFunction.length - (arguments.length - 1)) }
+ );
+ }
+ }
+ return func;
+};
+
+var applyBind = function applyBind() {
+ return $reflectApply(bind, $apply, arguments);
+};
+
+if ($defineProperty) {
+ $defineProperty(module.exports, 'apply', { value: applyBind });
+} else {
+ module.exports.apply = applyBind;
+}
diff --git a/node_modules/call-bind/package.json b/node_modules/call-bind/package.json
new file mode 100644
index 00000000..4360556a
--- /dev/null
+++ b/node_modules/call-bind/package.json
@@ -0,0 +1,80 @@
+{
+ "name": "call-bind",
+ "version": "1.0.2",
+ "description": "Robustly `.call.bind()` a function",
+ "main": "index.js",
+ "exports": {
+ ".": [
+ {
+ "default": "./index.js"
+ },
+ "./index.js"
+ ],
+ "./callBound": [
+ {
+ "default": "./callBound.js"
+ },
+ "./callBound.js"
+ ],
+ "./package.json": "./package.json"
+ },
+ "scripts": {
+ "prepublish": "safe-publish-latest",
+ "lint": "eslint --ext=.js,.mjs .",
+ "pretest": "npm run lint",
+ "tests-only": "nyc tape 'test/*'",
+ "test": "npm run tests-only",
+ "posttest": "aud --production",
+ "version": "auto-changelog && git add CHANGELOG.md",
+ "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/ljharb/call-bind.git"
+ },
+ "keywords": [
+ "javascript",
+ "ecmascript",
+ "es",
+ "js",
+ "callbind",
+ "callbound",
+ "call",
+ "bind",
+ "bound",
+ "call-bind",
+ "call-bound",
+ "function",
+ "es-abstract"
+ ],
+ "author": "Jordan Harband ",
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ },
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/ljharb/call-bind/issues"
+ },
+ "homepage": "https://github.com/ljharb/call-bind#readme",
+ "devDependencies": {
+ "@ljharb/eslint-config": "^17.3.0",
+ "aud": "^1.1.3",
+ "auto-changelog": "^2.2.1",
+ "eslint": "^7.17.0",
+ "nyc": "^10.3.2",
+ "safe-publish-latest": "^1.1.4",
+ "tape": "^5.1.1"
+ },
+ "dependencies": {
+ "function-bind": "^1.1.1",
+ "get-intrinsic": "^1.0.2"
+ },
+ "auto-changelog": {
+ "output": "CHANGELOG.md",
+ "template": "keepachangelog",
+ "unreleased": false,
+ "commitLimit": false,
+ "backfillLimit": false,
+ "hideCredit": true
+ }
+}
diff --git a/node_modules/call-bind/test/callBound.js b/node_modules/call-bind/test/callBound.js
new file mode 100644
index 00000000..209ce3cc
--- /dev/null
+++ b/node_modules/call-bind/test/callBound.js
@@ -0,0 +1,55 @@
+'use strict';
+
+var test = require('tape');
+
+var callBound = require('../callBound');
+
+test('callBound', function (t) {
+ // static primitive
+ t.equal(callBound('Array.length'), Array.length, 'Array.length yields itself');
+ t.equal(callBound('%Array.length%'), Array.length, '%Array.length% yields itself');
+
+ // static non-function object
+ t.equal(callBound('Array.prototype'), Array.prototype, 'Array.prototype yields itself');
+ t.equal(callBound('%Array.prototype%'), Array.prototype, '%Array.prototype% yields itself');
+ t.equal(callBound('Array.constructor'), Array.constructor, 'Array.constructor yields itself');
+ t.equal(callBound('%Array.constructor%'), Array.constructor, '%Array.constructor% yields itself');
+
+ // static function
+ t.equal(callBound('Date.parse'), Date.parse, 'Date.parse yields itself');
+ t.equal(callBound('%Date.parse%'), Date.parse, '%Date.parse% yields itself');
+
+ // prototype primitive
+ t.equal(callBound('Error.prototype.message'), Error.prototype.message, 'Error.prototype.message yields itself');
+ t.equal(callBound('%Error.prototype.message%'), Error.prototype.message, '%Error.prototype.message% yields itself');
+
+ // prototype function
+ t.notEqual(callBound('Object.prototype.toString'), Object.prototype.toString, 'Object.prototype.toString does not yield itself');
+ t.notEqual(callBound('%Object.prototype.toString%'), Object.prototype.toString, '%Object.prototype.toString% does not yield itself');
+ t.equal(callBound('Object.prototype.toString')(true), Object.prototype.toString.call(true), 'call-bound Object.prototype.toString calls into the original');
+ t.equal(callBound('%Object.prototype.toString%')(true), Object.prototype.toString.call(true), 'call-bound %Object.prototype.toString% calls into the original');
+
+ t['throws'](
+ function () { callBound('does not exist'); },
+ SyntaxError,
+ 'nonexistent intrinsic throws'
+ );
+ t['throws'](
+ function () { callBound('does not exist', true); },
+ SyntaxError,
+ 'allowMissing arg still throws for unknown intrinsic'
+ );
+
+ /* globals WeakRef: false */
+ t.test('real but absent intrinsic', { skip: typeof WeakRef !== 'undefined' }, function (st) {
+ st['throws'](
+ function () { callBound('WeakRef'); },
+ TypeError,
+ 'real but absent intrinsic throws'
+ );
+ st.equal(callBound('WeakRef', true), undefined, 'allowMissing arg avoids exception');
+ st.end();
+ });
+
+ t.end();
+});
diff --git a/node_modules/call-bind/test/index.js b/node_modules/call-bind/test/index.js
new file mode 100644
index 00000000..bf6769c7
--- /dev/null
+++ b/node_modules/call-bind/test/index.js
@@ -0,0 +1,66 @@
+'use strict';
+
+var callBind = require('../');
+var bind = require('function-bind');
+
+var test = require('tape');
+
+/*
+ * older engines have length nonconfigurable
+ * in io.js v3, it is configurable except on bound functions, hence the .bind()
+ */
+var functionsHaveConfigurableLengths = !!(
+ Object.getOwnPropertyDescriptor
+ && Object.getOwnPropertyDescriptor(bind.call(function () {}), 'length').configurable
+);
+
+test('callBind', function (t) {
+ var sentinel = { sentinel: true };
+ var func = function (a, b) {
+ // eslint-disable-next-line no-invalid-this
+ return [this, a, b];
+ };
+ t.equal(func.length, 2, 'original function length is 2');
+ t.deepEqual(func(), [undefined, undefined, undefined], 'unbound func with too few args');
+ t.deepEqual(func(1, 2), [undefined, 1, 2], 'unbound func with right args');
+ t.deepEqual(func(1, 2, 3), [undefined, 1, 2], 'unbound func with too many args');
+
+ var bound = callBind(func);
+ t.equal(bound.length, func.length + 1, 'function length is preserved', { skip: !functionsHaveConfigurableLengths });
+ t.deepEqual(bound(), [undefined, undefined, undefined], 'bound func with too few args');
+ t.deepEqual(bound(1, 2), [1, 2, undefined], 'bound func with right args');
+ t.deepEqual(bound(1, 2, 3), [1, 2, 3], 'bound func with too many args');
+
+ var boundR = callBind(func, sentinel);
+ t.equal(boundR.length, func.length, 'function length is preserved', { skip: !functionsHaveConfigurableLengths });
+ t.deepEqual(boundR(), [sentinel, undefined, undefined], 'bound func with receiver, with too few args');
+ t.deepEqual(boundR(1, 2), [sentinel, 1, 2], 'bound func with receiver, with right args');
+ t.deepEqual(boundR(1, 2, 3), [sentinel, 1, 2], 'bound func with receiver, with too many args');
+
+ var boundArg = callBind(func, sentinel, 1);
+ t.equal(boundArg.length, func.length - 1, 'function length is preserved', { skip: !functionsHaveConfigurableLengths });
+ t.deepEqual(boundArg(), [sentinel, 1, undefined], 'bound func with receiver and arg, with too few args');
+ t.deepEqual(boundArg(2), [sentinel, 1, 2], 'bound func with receiver and arg, with right arg');
+ t.deepEqual(boundArg(2, 3), [sentinel, 1, 2], 'bound func with receiver and arg, with too many args');
+
+ t.test('callBind.apply', function (st) {
+ var aBound = callBind.apply(func);
+ st.deepEqual(aBound(sentinel), [sentinel, undefined, undefined], 'apply-bound func with no args');
+ st.deepEqual(aBound(sentinel, [1], 4), [sentinel, 1, undefined], 'apply-bound func with too few args');
+ st.deepEqual(aBound(sentinel, [1, 2], 4), [sentinel, 1, 2], 'apply-bound func with right args');
+
+ var aBoundArg = callBind.apply(func);
+ st.deepEqual(aBoundArg(sentinel, [1, 2, 3], 4), [sentinel, 1, 2], 'apply-bound func with too many args');
+ st.deepEqual(aBoundArg(sentinel, [1, 2], 4), [sentinel, 1, 2], 'apply-bound func with right args');
+ st.deepEqual(aBoundArg(sentinel, [1], 4), [sentinel, 1, undefined], 'apply-bound func with too few args');
+
+ var aBoundR = callBind.apply(func, sentinel);
+ st.deepEqual(aBoundR([1, 2, 3], 4), [sentinel, 1, 2], 'apply-bound func with receiver and too many args');
+ st.deepEqual(aBoundR([1, 2], 4), [sentinel, 1, 2], 'apply-bound func with receiver and right args');
+ st.deepEqual(aBoundR([1], 4), [sentinel, 1, undefined], 'apply-bound func with receiver and too few args');
+
+ st.end();
+ });
+
+ t.end();
+});
diff --git a/node_modules/caw/index.js b/node_modules/caw/index.js
new file mode 100644
index 00000000..69e7f554
--- /dev/null
+++ b/node_modules/caw/index.js
@@ -0,0 +1,37 @@
+'use strict';
+const url = require('url');
+const getProxy = require('get-proxy');
+const isurl = require('isurl');
+const tunnelAgent = require('tunnel-agent');
+const urlToOptions = require('url-to-options');
+
+module.exports = (proxy, opts) => {
+ proxy = proxy || getProxy();
+ opts = Object.assign({}, opts);
+
+ if (typeof proxy === 'object') {
+ opts = proxy;
+ proxy = getProxy();
+ }
+
+ if (!proxy) {
+ return null;
+ }
+
+ proxy = isurl.lenient(proxy) ? urlToOptions(proxy) : url.parse(proxy);
+
+ const uriProtocol = opts.protocol === 'https' ? 'https' : 'http';
+ const proxyProtocol = proxy.protocol === 'https:' ? 'Https' : 'Http';
+ const port = proxy.port || (proxyProtocol === 'Https' ? 443 : 80);
+ const method = `${uriProtocol}Over${proxyProtocol}`;
+
+ delete opts.protocol;
+
+ return tunnelAgent[method](Object.assign({
+ proxy: {
+ port,
+ host: proxy.hostname,
+ proxyAuth: proxy.auth
+ }
+ }, opts));
+};
diff --git a/node_modules/caw/license b/node_modules/caw/license
new file mode 100644
index 00000000..db6bc32c
--- /dev/null
+++ b/node_modules/caw/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Kevin Mårtensson (github.com/kevva)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/caw/package.json b/node_modules/caw/package.json
new file mode 100644
index 00000000..15274e72
--- /dev/null
+++ b/node_modules/caw/package.json
@@ -0,0 +1,49 @@
+{
+ "name": "caw",
+ "version": "2.0.1",
+ "description": "Construct HTTP/HTTPS agents for tunneling proxies",
+ "license": "MIT",
+ "repository": "kevva/caw",
+ "author": {
+ "email": "kevinmartensson@gmail.com",
+ "name": "Kevin Mårtensson",
+ "url": "github.com/kevva"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "http",
+ "https",
+ "proxy",
+ "tunnel"
+ ],
+ "dependencies": {
+ "get-proxy": "^2.0.0",
+ "isurl": "^1.0.0-alpha5",
+ "tunnel-agent": "^0.6.0",
+ "url-to-options": "^1.0.1"
+ },
+ "devDependencies": {
+ "ava": "*",
+ "create-cert": "^1.0.4",
+ "get-port": "^3.1.0",
+ "got": "^7.0.0",
+ "pify": "^3.0.0",
+ "proxyquire": "^1.7.9",
+ "sinon": "^2.3.1",
+ "universal-url": "1.0.0-alpha",
+ "xo": "*"
+ },
+ "xo": {
+ "rules": {
+ "ava/no-skip-test": 0
+ }
+ }
+}
diff --git a/node_modules/caw/readme.md b/node_modules/caw/readme.md
new file mode 100644
index 00000000..447191f3
--- /dev/null
+++ b/node_modules/caw/readme.md
@@ -0,0 +1,51 @@
+# caw [](https://travis-ci.org/kevva/caw)
+
+> Construct HTTP/HTTPS agents for tunneling proxies
+
+
+## Install
+
+```
+$ npm install caw
+```
+
+
+## Usage
+
+```js
+const caw = require('caw');
+const got = require('got');
+
+got('todomvc.com', {
+ agent: caw()
+}, () => {});
+```
+
+
+## API
+
+### caw([proxy], [options])
+
+#### proxy
+
+Type: `string`
+
+Proxy URL. If not set, it'll try getting it using [`get-proxy`](https://github.com/kevva/get-proxy).
+
+#### options
+
+Type: `Object`
+
+Besides the options below, you can pass in options allowed in [tunnel-agent](https://github.com/request/tunnel-agent).
+
+##### protocol
+
+Type: `string`
+Default: `http`
+
+Endpoint protocol.
+
+
+## License
+
+MIT © [Kevin Mårtensson](https://github.com/kevva)
diff --git a/node_modules/chalk/index.d.ts b/node_modules/chalk/index.d.ts
new file mode 100644
index 00000000..9cd88f38
--- /dev/null
+++ b/node_modules/chalk/index.d.ts
@@ -0,0 +1,415 @@
+/**
+Basic foreground colors.
+
+[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
+*/
+declare type ForegroundColor =
+ | 'black'
+ | 'red'
+ | 'green'
+ | 'yellow'
+ | 'blue'
+ | 'magenta'
+ | 'cyan'
+ | 'white'
+ | 'gray'
+ | 'grey'
+ | 'blackBright'
+ | 'redBright'
+ | 'greenBright'
+ | 'yellowBright'
+ | 'blueBright'
+ | 'magentaBright'
+ | 'cyanBright'
+ | 'whiteBright';
+
+/**
+Basic background colors.
+
+[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
+*/
+declare type BackgroundColor =
+ | 'bgBlack'
+ | 'bgRed'
+ | 'bgGreen'
+ | 'bgYellow'
+ | 'bgBlue'
+ | 'bgMagenta'
+ | 'bgCyan'
+ | 'bgWhite'
+ | 'bgGray'
+ | 'bgGrey'
+ | 'bgBlackBright'
+ | 'bgRedBright'
+ | 'bgGreenBright'
+ | 'bgYellowBright'
+ | 'bgBlueBright'
+ | 'bgMagentaBright'
+ | 'bgCyanBright'
+ | 'bgWhiteBright';
+
+/**
+Basic colors.
+
+[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
+*/
+declare type Color = ForegroundColor | BackgroundColor;
+
+declare type Modifiers =
+ | 'reset'
+ | 'bold'
+ | 'dim'
+ | 'italic'
+ | 'underline'
+ | 'inverse'
+ | 'hidden'
+ | 'strikethrough'
+ | 'visible';
+
+declare namespace chalk {
+ /**
+ Levels:
+ - `0` - All colors disabled.
+ - `1` - Basic 16 colors support.
+ - `2` - ANSI 256 colors support.
+ - `3` - Truecolor 16 million colors support.
+ */
+ type Level = 0 | 1 | 2 | 3;
+
+ interface Options {
+ /**
+ Specify the color support for Chalk.
+
+ By default, color support is automatically detected based on the environment.
+
+ Levels:
+ - `0` - All colors disabled.
+ - `1` - Basic 16 colors support.
+ - `2` - ANSI 256 colors support.
+ - `3` - Truecolor 16 million colors support.
+ */
+ level?: Level;
+ }
+
+ /**
+ Return a new Chalk instance.
+ */
+ type Instance = new (options?: Options) => Chalk;
+
+ /**
+ Detect whether the terminal supports color.
+ */
+ interface ColorSupport {
+ /**
+ The color level used by Chalk.
+ */
+ level: Level;
+
+ /**
+ Return whether Chalk supports basic 16 colors.
+ */
+ hasBasic: boolean;
+
+ /**
+ Return whether Chalk supports ANSI 256 colors.
+ */
+ has256: boolean;
+
+ /**
+ Return whether Chalk supports Truecolor 16 million colors.
+ */
+ has16m: boolean;
+ }
+
+ interface ChalkFunction {
+ /**
+ Use a template string.
+
+ @remarks Template literals are unsupported for nested calls (see [issue #341](https://github.com/chalk/chalk/issues/341))
+
+ @example
+ ```
+ import chalk = require('chalk');
+
+ log(chalk`
+ CPU: {red ${cpu.totalPercent}%}
+ RAM: {green ${ram.used / ram.total * 100}%}
+ DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%}
+ `);
+ ```
+
+ @example
+ ```
+ import chalk = require('chalk');
+
+ log(chalk.red.bgBlack`2 + 3 = {bold ${2 + 3}}`)
+ ```
+ */
+ (text: TemplateStringsArray, ...placeholders: unknown[]): string;
+
+ (...text: unknown[]): string;
+ }
+
+ interface Chalk extends ChalkFunction {
+ /**
+ Return a new Chalk instance.
+ */
+ Instance: Instance;
+
+ /**
+ The color support for Chalk.
+
+ By default, color support is automatically detected based on the environment.
+
+ Levels:
+ - `0` - All colors disabled.
+ - `1` - Basic 16 colors support.
+ - `2` - ANSI 256 colors support.
+ - `3` - Truecolor 16 million colors support.
+ */
+ level: Level;
+
+ /**
+ Use HEX value to set text color.
+
+ @param color - Hexadecimal value representing the desired color.
+
+ @example
+ ```
+ import chalk = require('chalk');
+
+ chalk.hex('#DEADED');
+ ```
+ */
+ hex(color: string): Chalk;
+
+ /**
+ Use keyword color value to set text color.
+
+ @param color - Keyword value representing the desired color.
+
+ @example
+ ```
+ import chalk = require('chalk');
+
+ chalk.keyword('orange');
+ ```
+ */
+ keyword(color: string): Chalk;
+
+ /**
+ Use RGB values to set text color.
+ */
+ rgb(red: number, green: number, blue: number): Chalk;
+
+ /**
+ Use HSL values to set text color.
+ */
+ hsl(hue: number, saturation: number, lightness: number): Chalk;
+
+ /**
+ Use HSV values to set text color.
+ */
+ hsv(hue: number, saturation: number, value: number): Chalk;
+
+ /**
+ Use HWB values to set text color.
+ */
+ hwb(hue: number, whiteness: number, blackness: number): Chalk;
+
+ /**
+ Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set text color.
+
+ 30 <= code && code < 38 || 90 <= code && code < 98
+ For example, 31 for red, 91 for redBright.
+ */
+ ansi(code: number): Chalk;
+
+ /**
+ Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color.
+ */
+ ansi256(index: number): Chalk;
+
+ /**
+ Use HEX value to set background color.
+
+ @param color - Hexadecimal value representing the desired color.
+
+ @example
+ ```
+ import chalk = require('chalk');
+
+ chalk.bgHex('#DEADED');
+ ```
+ */
+ bgHex(color: string): Chalk;
+
+ /**
+ Use keyword color value to set background color.
+
+ @param color - Keyword value representing the desired color.
+
+ @example
+ ```
+ import chalk = require('chalk');
+
+ chalk.bgKeyword('orange');
+ ```
+ */
+ bgKeyword(color: string): Chalk;
+
+ /**
+ Use RGB values to set background color.
+ */
+ bgRgb(red: number, green: number, blue: number): Chalk;
+
+ /**
+ Use HSL values to set background color.
+ */
+ bgHsl(hue: number, saturation: number, lightness: number): Chalk;
+
+ /**
+ Use HSV values to set background color.
+ */
+ bgHsv(hue: number, saturation: number, value: number): Chalk;
+
+ /**
+ Use HWB values to set background color.
+ */
+ bgHwb(hue: number, whiteness: number, blackness: number): Chalk;
+
+ /**
+ Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set background color.
+
+ 30 <= code && code < 38 || 90 <= code && code < 98
+ For example, 31 for red, 91 for redBright.
+ Use the foreground code, not the background code (for example, not 41, nor 101).
+ */
+ bgAnsi(code: number): Chalk;
+
+ /**
+ Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set background color.
+ */
+ bgAnsi256(index: number): Chalk;
+
+ /**
+ Modifier: Resets the current color chain.
+ */
+ readonly reset: Chalk;
+
+ /**
+ Modifier: Make text bold.
+ */
+ readonly bold: Chalk;
+
+ /**
+ Modifier: Emitting only a small amount of light.
+ */
+ readonly dim: Chalk;
+
+ /**
+ Modifier: Make text italic. (Not widely supported)
+ */
+ readonly italic: Chalk;
+
+ /**
+ Modifier: Make text underline. (Not widely supported)
+ */
+ readonly underline: Chalk;
+
+ /**
+ Modifier: Inverse background and foreground colors.
+ */
+ readonly inverse: Chalk;
+
+ /**
+ Modifier: Prints the text, but makes it invisible.
+ */
+ readonly hidden: Chalk;
+
+ /**
+ Modifier: Puts a horizontal line through the center of the text. (Not widely supported)
+ */
+ readonly strikethrough: Chalk;
+
+ /**
+ Modifier: Prints the text only when Chalk has a color support level > 0.
+ Can be useful for things that are purely cosmetic.
+ */
+ readonly visible: Chalk;
+
+ readonly black: Chalk;
+ readonly red: Chalk;
+ readonly green: Chalk;
+ readonly yellow: Chalk;
+ readonly blue: Chalk;
+ readonly magenta: Chalk;
+ readonly cyan: Chalk;
+ readonly white: Chalk;
+
+ /*
+ Alias for `blackBright`.
+ */
+ readonly gray: Chalk;
+
+ /*
+ Alias for `blackBright`.
+ */
+ readonly grey: Chalk;
+
+ readonly blackBright: Chalk;
+ readonly redBright: Chalk;
+ readonly greenBright: Chalk;
+ readonly yellowBright: Chalk;
+ readonly blueBright: Chalk;
+ readonly magentaBright: Chalk;
+ readonly cyanBright: Chalk;
+ readonly whiteBright: Chalk;
+
+ readonly bgBlack: Chalk;
+ readonly bgRed: Chalk;
+ readonly bgGreen: Chalk;
+ readonly bgYellow: Chalk;
+ readonly bgBlue: Chalk;
+ readonly bgMagenta: Chalk;
+ readonly bgCyan: Chalk;
+ readonly bgWhite: Chalk;
+
+ /*
+ Alias for `bgBlackBright`.
+ */
+ readonly bgGray: Chalk;
+
+ /*
+ Alias for `bgBlackBright`.
+ */
+ readonly bgGrey: Chalk;
+
+ readonly bgBlackBright: Chalk;
+ readonly bgRedBright: Chalk;
+ readonly bgGreenBright: Chalk;
+ readonly bgYellowBright: Chalk;
+ readonly bgBlueBright: Chalk;
+ readonly bgMagentaBright: Chalk;
+ readonly bgCyanBright: Chalk;
+ readonly bgWhiteBright: Chalk;
+ }
+}
+
+/**
+Main Chalk object that allows to chain styles together.
+Call the last one as a method with a string argument.
+Order doesn't matter, and later styles take precedent in case of a conflict.
+This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`.
+*/
+declare const chalk: chalk.Chalk & chalk.ChalkFunction & {
+ supportsColor: chalk.ColorSupport | false;
+ Level: chalk.Level;
+ Color: Color;
+ ForegroundColor: ForegroundColor;
+ BackgroundColor: BackgroundColor;
+ Modifiers: Modifiers;
+ stderr: chalk.Chalk & {supportsColor: chalk.ColorSupport | false};
+};
+
+export = chalk;
diff --git a/node_modules/chalk/license b/node_modules/chalk/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/chalk/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/chalk/node_modules/has-flag/index.d.ts b/node_modules/chalk/node_modules/has-flag/index.d.ts
new file mode 100644
index 00000000..a0a48c89
--- /dev/null
+++ b/node_modules/chalk/node_modules/has-flag/index.d.ts
@@ -0,0 +1,39 @@
+/**
+Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag.
+
+@param flag - CLI flag to look for. The `--` prefix is optional.
+@param argv - CLI arguments. Default: `process.argv`.
+@returns Whether the flag exists.
+
+@example
+```
+// $ ts-node foo.ts -f --unicorn --foo=bar -- --rainbow
+
+// foo.ts
+import hasFlag = require('has-flag');
+
+hasFlag('unicorn');
+//=> true
+
+hasFlag('--unicorn');
+//=> true
+
+hasFlag('f');
+//=> true
+
+hasFlag('-f');
+//=> true
+
+hasFlag('foo=bar');
+//=> true
+
+hasFlag('foo');
+//=> false
+
+hasFlag('rainbow');
+//=> false
+```
+*/
+declare function hasFlag(flag: string, argv?: string[]): boolean;
+
+export = hasFlag;
diff --git a/node_modules/chalk/node_modules/has-flag/index.js b/node_modules/chalk/node_modules/has-flag/index.js
new file mode 100644
index 00000000..b6f80b1f
--- /dev/null
+++ b/node_modules/chalk/node_modules/has-flag/index.js
@@ -0,0 +1,8 @@
+'use strict';
+
+module.exports = (flag, argv = process.argv) => {
+ const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
+ const position = argv.indexOf(prefix + flag);
+ const terminatorPosition = argv.indexOf('--');
+ return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);
+};
diff --git a/node_modules/chalk/node_modules/has-flag/license b/node_modules/chalk/node_modules/has-flag/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/chalk/node_modules/has-flag/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/chalk/node_modules/has-flag/package.json b/node_modules/chalk/node_modules/has-flag/package.json
new file mode 100644
index 00000000..a9cba4b8
--- /dev/null
+++ b/node_modules/chalk/node_modules/has-flag/package.json
@@ -0,0 +1,46 @@
+{
+ "name": "has-flag",
+ "version": "4.0.0",
+ "description": "Check if argv has a specific flag",
+ "license": "MIT",
+ "repository": "sindresorhus/has-flag",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts"
+ ],
+ "keywords": [
+ "has",
+ "check",
+ "detect",
+ "contains",
+ "find",
+ "flag",
+ "cli",
+ "command-line",
+ "argv",
+ "process",
+ "arg",
+ "args",
+ "argument",
+ "arguments",
+ "getopt",
+ "minimist",
+ "optimist"
+ ],
+ "devDependencies": {
+ "ava": "^1.4.1",
+ "tsd": "^0.7.2",
+ "xo": "^0.24.0"
+ }
+}
diff --git a/node_modules/chalk/node_modules/has-flag/readme.md b/node_modules/chalk/node_modules/has-flag/readme.md
new file mode 100644
index 00000000..3f72dff2
--- /dev/null
+++ b/node_modules/chalk/node_modules/has-flag/readme.md
@@ -0,0 +1,89 @@
+# has-flag [](https://travis-ci.org/sindresorhus/has-flag)
+
+> Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag
+
+Correctly stops looking after an `--` argument terminator.
+
+---
+
+
+
+---
+
+
+## Install
+
+```
+$ npm install has-flag
+```
+
+
+## Usage
+
+```js
+// foo.js
+const hasFlag = require('has-flag');
+
+hasFlag('unicorn');
+//=> true
+
+hasFlag('--unicorn');
+//=> true
+
+hasFlag('f');
+//=> true
+
+hasFlag('-f');
+//=> true
+
+hasFlag('foo=bar');
+//=> true
+
+hasFlag('foo');
+//=> false
+
+hasFlag('rainbow');
+//=> false
+```
+
+```
+$ node foo.js -f --unicorn --foo=bar -- --rainbow
+```
+
+
+## API
+
+### hasFlag(flag, [argv])
+
+Returns a boolean for whether the flag exists.
+
+#### flag
+
+Type: `string`
+
+CLI flag to look for. The `--` prefix is optional.
+
+#### argv
+
+Type: `string[]`
+Default: `process.argv`
+
+CLI arguments.
+
+
+## Security
+
+To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure.
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/chalk/node_modules/supports-color/browser.js b/node_modules/chalk/node_modules/supports-color/browser.js
new file mode 100644
index 00000000..62afa3a7
--- /dev/null
+++ b/node_modules/chalk/node_modules/supports-color/browser.js
@@ -0,0 +1,5 @@
+'use strict';
+module.exports = {
+ stdout: false,
+ stderr: false
+};
diff --git a/node_modules/chalk/node_modules/supports-color/index.js b/node_modules/chalk/node_modules/supports-color/index.js
new file mode 100644
index 00000000..6fada390
--- /dev/null
+++ b/node_modules/chalk/node_modules/supports-color/index.js
@@ -0,0 +1,135 @@
+'use strict';
+const os = require('os');
+const tty = require('tty');
+const hasFlag = require('has-flag');
+
+const {env} = process;
+
+let forceColor;
+if (hasFlag('no-color') ||
+ hasFlag('no-colors') ||
+ hasFlag('color=false') ||
+ hasFlag('color=never')) {
+ forceColor = 0;
+} else if (hasFlag('color') ||
+ hasFlag('colors') ||
+ hasFlag('color=true') ||
+ hasFlag('color=always')) {
+ forceColor = 1;
+}
+
+if ('FORCE_COLOR' in env) {
+ if (env.FORCE_COLOR === 'true') {
+ forceColor = 1;
+ } else if (env.FORCE_COLOR === 'false') {
+ forceColor = 0;
+ } else {
+ forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);
+ }
+}
+
+function translateLevel(level) {
+ if (level === 0) {
+ return false;
+ }
+
+ return {
+ level,
+ hasBasic: true,
+ has256: level >= 2,
+ has16m: level >= 3
+ };
+}
+
+function supportsColor(haveStream, streamIsTTY) {
+ if (forceColor === 0) {
+ return 0;
+ }
+
+ if (hasFlag('color=16m') ||
+ hasFlag('color=full') ||
+ hasFlag('color=truecolor')) {
+ return 3;
+ }
+
+ if (hasFlag('color=256')) {
+ return 2;
+ }
+
+ if (haveStream && !streamIsTTY && forceColor === undefined) {
+ return 0;
+ }
+
+ const min = forceColor || 0;
+
+ if (env.TERM === 'dumb') {
+ return min;
+ }
+
+ if (process.platform === 'win32') {
+ // Windows 10 build 10586 is the first Windows release that supports 256 colors.
+ // Windows 10 build 14931 is the first release that supports 16m/TrueColor.
+ const osRelease = os.release().split('.');
+ if (
+ Number(osRelease[0]) >= 10 &&
+ Number(osRelease[2]) >= 10586
+ ) {
+ return Number(osRelease[2]) >= 14931 ? 3 : 2;
+ }
+
+ return 1;
+ }
+
+ if ('CI' in env) {
+ if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
+ return 1;
+ }
+
+ return min;
+ }
+
+ if ('TEAMCITY_VERSION' in env) {
+ return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
+ }
+
+ if (env.COLORTERM === 'truecolor') {
+ return 3;
+ }
+
+ if ('TERM_PROGRAM' in env) {
+ const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
+
+ switch (env.TERM_PROGRAM) {
+ case 'iTerm.app':
+ return version >= 3 ? 3 : 2;
+ case 'Apple_Terminal':
+ return 2;
+ // No default
+ }
+ }
+
+ if (/-256(color)?$/i.test(env.TERM)) {
+ return 2;
+ }
+
+ if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
+ return 1;
+ }
+
+ if ('COLORTERM' in env) {
+ return 1;
+ }
+
+ return min;
+}
+
+function getSupportLevel(stream) {
+ const level = supportsColor(stream, stream && stream.isTTY);
+ return translateLevel(level);
+}
+
+module.exports = {
+ supportsColor: getSupportLevel,
+ stdout: translateLevel(supportsColor(true, tty.isatty(1))),
+ stderr: translateLevel(supportsColor(true, tty.isatty(2)))
+};
diff --git a/node_modules/chalk/node_modules/supports-color/license b/node_modules/chalk/node_modules/supports-color/license
new file mode 100644
index 00000000..e7af2f77
--- /dev/null
+++ b/node_modules/chalk/node_modules/supports-color/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/chalk/node_modules/supports-color/package.json b/node_modules/chalk/node_modules/supports-color/package.json
new file mode 100644
index 00000000..f7182edc
--- /dev/null
+++ b/node_modules/chalk/node_modules/supports-color/package.json
@@ -0,0 +1,53 @@
+{
+ "name": "supports-color",
+ "version": "7.2.0",
+ "description": "Detect whether a terminal supports color",
+ "license": "MIT",
+ "repository": "chalk/supports-color",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "files": [
+ "index.js",
+ "browser.js"
+ ],
+ "keywords": [
+ "color",
+ "colour",
+ "colors",
+ "terminal",
+ "console",
+ "cli",
+ "ansi",
+ "styles",
+ "tty",
+ "rgb",
+ "256",
+ "shell",
+ "xterm",
+ "command-line",
+ "support",
+ "supports",
+ "capability",
+ "detect",
+ "truecolor",
+ "16m"
+ ],
+ "dependencies": {
+ "has-flag": "^4.0.0"
+ },
+ "devDependencies": {
+ "ava": "^1.4.1",
+ "import-fresh": "^3.0.0",
+ "xo": "^0.24.0"
+ },
+ "browser": "browser.js"
+}
diff --git a/node_modules/chalk/node_modules/supports-color/readme.md b/node_modules/chalk/node_modules/supports-color/readme.md
new file mode 100644
index 00000000..36542285
--- /dev/null
+++ b/node_modules/chalk/node_modules/supports-color/readme.md
@@ -0,0 +1,76 @@
+# supports-color [](https://travis-ci.org/chalk/supports-color)
+
+> Detect whether a terminal supports color
+
+
+## Install
+
+```
+$ npm install supports-color
+```
+
+
+## Usage
+
+```js
+const supportsColor = require('supports-color');
+
+if (supportsColor.stdout) {
+ console.log('Terminal stdout supports color');
+}
+
+if (supportsColor.stdout.has256) {
+ console.log('Terminal stdout supports 256 colors');
+}
+
+if (supportsColor.stderr.has16m) {
+ console.log('Terminal stderr supports 16 million colors (truecolor)');
+}
+```
+
+
+## API
+
+Returns an `Object` with a `stdout` and `stderr` property for testing either streams. Each property is an `Object`, or `false` if color is not supported.
+
+The `stdout`/`stderr` objects specifies a level of support for color through a `.level` property and a corresponding flag:
+
+- `.level = 1` and `.hasBasic = true`: Basic color support (16 colors)
+- `.level = 2` and `.has256 = true`: 256 color support
+- `.level = 3` and `.has16m = true`: Truecolor support (16 million colors)
+
+
+## Info
+
+It obeys the `--color` and `--no-color` CLI flags.
+
+For situations where using `--color` is not possible, use the environment variable `FORCE_COLOR=1` (level 1), `FORCE_COLOR=2` (level 2), or `FORCE_COLOR=3` (level 3) to forcefully enable color, or `FORCE_COLOR=0` to forcefully disable. The use of `FORCE_COLOR` overrides all other color support checks.
+
+Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively.
+
+
+## Related
+
+- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module
+- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right
+
+
+## Maintainers
+
+- [Sindre Sorhus](https://github.com/sindresorhus)
+- [Josh Junon](https://github.com/qix-)
+
+
+---
+
+
+
+---
diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json
new file mode 100644
index 00000000..47c23f29
--- /dev/null
+++ b/node_modules/chalk/package.json
@@ -0,0 +1,68 @@
+{
+ "name": "chalk",
+ "version": "4.1.2",
+ "description": "Terminal string styling done right",
+ "license": "MIT",
+ "repository": "chalk/chalk",
+ "funding": "https://github.com/chalk/chalk?sponsor=1",
+ "main": "source",
+ "engines": {
+ "node": ">=10"
+ },
+ "scripts": {
+ "test": "xo && nyc ava && tsd",
+ "bench": "matcha benchmark.js"
+ },
+ "files": [
+ "source",
+ "index.d.ts"
+ ],
+ "keywords": [
+ "color",
+ "colour",
+ "colors",
+ "terminal",
+ "console",
+ "cli",
+ "string",
+ "str",
+ "ansi",
+ "style",
+ "styles",
+ "tty",
+ "formatting",
+ "rgb",
+ "256",
+ "shell",
+ "xterm",
+ "log",
+ "logging",
+ "command-line",
+ "text"
+ ],
+ "dependencies": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ },
+ "devDependencies": {
+ "ava": "^2.4.0",
+ "coveralls": "^3.0.7",
+ "execa": "^4.0.0",
+ "import-fresh": "^3.1.0",
+ "matcha": "^0.7.0",
+ "nyc": "^15.0.0",
+ "resolve-from": "^5.0.0",
+ "tsd": "^0.7.4",
+ "xo": "^0.28.2"
+ },
+ "xo": {
+ "rules": {
+ "unicorn/prefer-string-slice": "off",
+ "unicorn/prefer-includes": "off",
+ "@typescript-eslint/member-ordering": "off",
+ "no-redeclare": "off",
+ "unicorn/string-content": "off",
+ "unicorn/better-regex": "off"
+ }
+ }
+}
diff --git a/node_modules/chalk/readme.md b/node_modules/chalk/readme.md
new file mode 100644
index 00000000..a055d21c
--- /dev/null
+++ b/node_modules/chalk/readme.md
@@ -0,0 +1,341 @@
+
+
+
+
+
+
+
+
+
+> Terminal string styling done right
+
+[](https://travis-ci.org/chalk/chalk) [](https://coveralls.io/github/chalk/chalk?branch=master) [](https://www.npmjs.com/package/chalk?activeTab=dependents) [](https://www.npmjs.com/package/chalk) [](https://www.youtube.com/watch?v=9auOCbH5Ns4) [](https://github.com/xojs/xo)  [](https://repl.it/github/chalk/chalk)
+
+
+
+
+
+---
+
+
+
+---
+
+
+
+## Highlights
+
+- Expressive API
+- Highly performant
+- Ability to nest styles
+- [256/Truecolor color support](#256-and-truecolor-color-support)
+- Auto-detects color support
+- Doesn't extend `String.prototype`
+- Clean and focused
+- Actively maintained
+- [Used by ~50,000 packages](https://www.npmjs.com/browse/depended/chalk) as of January 1, 2020
+
+## Install
+
+```console
+$ npm install chalk
+```
+
+## Usage
+
+```js
+const chalk = require('chalk');
+
+console.log(chalk.blue('Hello world!'));
+```
+
+Chalk comes with an easy to use composable API where you just chain and nest the styles you want.
+
+```js
+const chalk = require('chalk');
+const log = console.log;
+
+// Combine styled and normal strings
+log(chalk.blue('Hello') + ' World' + chalk.red('!'));
+
+// Compose multiple styles using the chainable API
+log(chalk.blue.bgRed.bold('Hello world!'));
+
+// Pass in multiple arguments
+log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz'));
+
+// Nest styles
+log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!'));
+
+// Nest styles of the same type even (color, underline, background)
+log(chalk.green(
+ 'I am a green line ' +
+ chalk.blue.underline.bold('with a blue substring') +
+ ' that becomes green again!'
+));
+
+// ES2015 template literal
+log(`
+CPU: ${chalk.red('90%')}
+RAM: ${chalk.green('40%')}
+DISK: ${chalk.yellow('70%')}
+`);
+
+// ES2015 tagged template literal
+log(chalk`
+CPU: {red ${cpu.totalPercent}%}
+RAM: {green ${ram.used / ram.total * 100}%}
+DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%}
+`);
+
+// Use RGB colors in terminal emulators that support it.
+log(chalk.keyword('orange')('Yay for orange colored text!'));
+log(chalk.rgb(123, 45, 67).underline('Underlined reddish color'));
+log(chalk.hex('#DEADED').bold('Bold gray!'));
+```
+
+Easily define your own themes:
+
+```js
+const chalk = require('chalk');
+
+const error = chalk.bold.red;
+const warning = chalk.keyword('orange');
+
+console.log(error('Error!'));
+console.log(warning('Warning!'));
+```
+
+Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args):
+
+```js
+const name = 'Sindre';
+console.log(chalk.green('Hello %s'), name);
+//=> 'Hello Sindre'
+```
+
+## API
+
+### chalk.`