diff --git a/.env b/.env new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/.env @@ -0,0 +1 @@ + diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..dc150eb5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/node_modules +.env diff --git a/README.md b/README.md index 74859d8e..c8814b61 100644 --- a/README.md +++ b/README.md @@ -1,33 +1,79 @@ -> _Fork_ deze leertaak en ga aan de slag. Onderstaande outline ga je gedurende deze taak in jouw eigen GitHub omgeving uitwerken. De instructie vind je in: [docs/INSTRUCTIONS.md](docs/INSTRUCTIONS.md) +# Performance Matters oba +![CD4C69B9-BB73-4A29-8DE4-C875DC73297C](https://user-images.githubusercontent.com/94745953/236788424-0f177e4b-6cfa-497b-bd35-d4d7246b428f.jpg) -# Titel - +logo-oba -## Inhoudsopgave +[Bezoek de oba site](https://uninterested-shirt-seal.cyclic.app/) - * [Beschrijving](#beschrijving) - * [Gebruik](#gebruik) - * [Kenmerken](#kenmerken) - * [Installatie](#installatie) - * [Bronnen](#bronnen) - * [Licentie](#licentie) -## Beschrijving - - - +##📚 Inhoudsopgave + + * 📝 [Beschrijving](#beschrijving) + * 🖇 [Gebruik](#gebruik) + * 🔖 [Kenmerken](#kenmerken) + * 📲 [Installatie](#installatie) + * 💾 [Bronnen](#bronnen) + * 📠 [Licentie](#licentie) + + +## 📝 Beschrijving +Als team hebben wij een responsive website gebouwd voor onze opdrachtgever Oba waarbij verschillende performance technieken zijn toegepast. +Wij hebben de website gebouwd met server- side code Node en gebruik gemaakt van de API om informatie op te halen zoals boeken, activiteiten en cursussen. +Voor de client side opmaak hebben wij gebruikt gemaakt van Ejs-HTML, CSS. + +[Bezoek de oba site](https://uninterested-shirt-seal.cyclic.app/) + +## 🖇 Gebruik -## Gebruik +Voor onze groepsopdracht hebben wij besloten om verschillende userstories te gebruiken. +Wij hebben gebruik gemaakt van een projectboard en daar alle userstories en issues beschreven. + +Userstories: +- Als gebruiker wil ik makkelijk kunnen navigeren op alle devices +- Als ontwikkelaar wil ik een overzichtelijke stylegids kunnen openen om de huisstijl en het sfeer van Oba te begrijpen en ontwerptaal en ontwerpredenen te definiëren. +- Als gebruiker wil ik een boek/item reserveren +- Als gebruiker wil ik een studieplek reserveren + +## 🔖 Kenmerken -## Kenmerken +Wij hebben gebruikt gemaakt van Node, Ejs-HTML en express, CSS en JavaScript. + +Omdat deze leertaak over performace gaat hebben wij de onderstaande metrics aan onze website toegepast. +Voor client side performance: +- Assets optimization (responsive images by using modern image extensions zoals webP, Avif etc) +- Delivery optmization (lazy load for images) +- Build optimization + +Voor server side performance: +- LCP (loading) +- FID (interaction) +- CLS (visual stability) + +## 📲 Installatie -## Installatie +## NPM +Voordat wij aan dit project gingen werken hebben wij eerst NPM install gedaan omdat we met REST API en Databases gingen werken. Wanneer je npm install uitvoert, leest npm het package.json-bestand in de hoofdmap van je project, dat de afhankelijkheden voor het project vermeldt. Vervolgens downloadt en installeert het die afhankelijkheden in de node_modules map in het project. In de terminal van Visual Studio Code zijn er een aantal commando's voor gebruikt om het NPM init te initialiseren, installeren NPM install en testen met NPM start. In de map Node_Modules hebben wij Nodemon geactiveerd om bij iedere aanpassing die wij hebben opgeslagen in de server te laten verversen. Hiervoor gebruiken wij het commando NPM install Nodemon. + +Express is een webframework voor Node.js dat wordt gebruikt voor het bouwen van webapplicaties en API's. EJS is een templating engine die wordt gebruikt voor het genereren van dynamische HTML-pagina's op basis van gegevens vanuit een server. In combinatie worden Express en EJS vaak gebruikt voor het bouwen van dynamische websites + +## .gitignore en .env +Voor dit project hebben wij .env gebruikt omdat wij met echte data gingen werken. Om Api key te beschermen hebben wij een .env mapje aangemaakt en daar API key aangekoppeld. Api key van Oba is hierdoor beschermd. Door .env in de gitignore mapje te plaatsen gaat de API key niet mee naar github en is dit niet zichtbaar online. + +## Progressive Enhancement +Om ervoor te zorgen dat iedereen toegang heeft tot het project, hebben wij de methode "progressive enhancement" gebruikt. Hierbij hebben we eerst de inhoud van de webpagina opgebouwd met behulp van HTML, geïntegreerd in EJS. Vervolgens heb ik functionaliteit toegevoegd door middel met de GET en POST-methode. Voor CSS hebben we diverse CSS-stijlen gebruikt om de website te stijlen in de huisstijl van OBA. + + +## 💾 Bronnen +- MDN documents +- Pagespeed insights +- Lighthouse +- Can i use +- DOC (instructions) -## Bronnen -## Licentie +## 📠 Licentie This project is licensed under the terms of the [MIT license](./LICENSE). diff --git a/index.js b/index.js new file mode 100644 index 00000000..115ab1d0 --- /dev/null +++ b/index.js @@ -0,0 +1,283 @@ +// Importeert basis modules uit npm +import express from "express"; +import dotenv from "dotenv"; +import bodyParser from 'body-parser'; + +dotenv.config(); + +// Maakt een nieuwe express app +const server = express(); + +// Stelt de public map in +server.use(express.static("public")); + +// Stelt het poortnummer in waar express op gaat luisteren +server.set("port", process.env.PORT || 8000); + +// Activeert het .env bestand +dotenv.config(); + +// Handelt de formulieren af +server.use(bodyParser.urlencoded({ + extended: true +})); +server.use(bodyParser.json()); + +// Stel in hoe express gebruikt kan worden +server.set("view engine", "ejs"); +server.set("views", "./views"); + +// Start express op, haal het ingestelde poortnummer op +server.listen(server.get("port"), function () { + // Toon een bericht in de console en geef het poortnummer door + console.log( + `Application started on http://localhost:${server.get( + "port" + )}` + ); +}); + +// Extenties voor de URL +const space = "%20"; +const bookItems = "boeken"; + +// Endpoints voor de URL +const urlSearch = "search/"; + +// Opbouw URL van de API +const urlBase = "https://zoeken.oba.nl/api/v1/"; +const urlQuery = "?q="; +const urlDefault = "special:all"; +const urlKey = `${process.env.KEY}`; +const urlOutput = "&refine=true&output=json"; + +// opbouw url activiteiten en Cursus +const activityURL = + urlBase + + "/search/?q=special:all%20table:activiteiten&authorization=" + + process.env.authorization + + "&output=json"; +const courseURL = + urlBase + + "/search/?q=special:all%20table:jsonsrc&authorization=" + + process.env.authorization + + "&output=json"; + +const defaultUrl = + urlBase + + urlSearch + + urlQuery + + urlDefault + + space + + bookItems + + urlKey + + urlOutput; + +// Maakt een route voor de index +server.get("/", (request, response) => { + fetchJson(defaultUrl).then((data) => { + response.render("index", data); + }); +}); + +// Maakt een route voor de detailpagina +server.get("/item", async (request, response) => { + let uniqueQuery = "?id="; + let urlId = request.query.id || ""; + + const itemUrl = + urlBase + + urlSearch + + uniqueQuery + + urlId + + urlKey + + urlOutput; + + const data = await fetch(itemUrl) + .then((response) => response.json()) + .catch((err) => err); + response.render("item", data); +}); + +// Maakt een route voor de reguliere reserveringspagina +server.get("/reserveren", async (request, response) => { + const baseurl = "https://api.oba.fdnd.nl/api/v1"; + const url = `${baseurl}/reserveringen`; + + let uniqueQuery = "?id="; + let urlId = request.query.id || "|oba-catalogus|279240"; + + const itemUrl = + urlBase + + urlSearch + + uniqueQuery + + urlId + + urlKey + + urlOutput; + + const data = await fetch(itemUrl) + .then((response) => response.json()) + .catch((err) => err); + response.render("reserveren", data); + + fetchJson(url).then((data) => { + response.render("reserveren", data); + }); +}); + +// Verstuurt de data naar de API +server.post("/reserveren", (request, response) => { + const baseurl = "https://api.oba.fdnd.nl/api/v1"; + const url = `${baseurl}/reserveringen`; + + postJson(url, request.body).then((data) => { + let newReservering = { ... request.body } + console.log(newReservering); + if (data.id) { + response.redirect('/succes') + console.log("werkt!") + + + } else{ + response.redirect('/succes') + } + + }); +}); + +// Maakt een route voor de studieplek reserveringspagina +server.get( + "/reserveer-een-studieplek", + (request, response) => { + const baseurl = "https://api.oba.fdnd.nl/api/v1"; + const url = `${baseurl}/studieplekReserveringen`; + + fetchJson(url).then((data) => { + response.render("reserveer-een-studieplek", data); + }); + } +); + +server.get("/succes", (request, response) => { + response.render("succes"); +}); + +// Maakt een route voor de studieplek reserveringspagina om vestiging foto's in te laden +server.get( + "/reserveer-een-studieplek", + (request, response) => { + const baseurl = "https://api.oba.fdnd.nl/api/v1"; + const url = `${baseurl}/vestigingen?clgnoumd6fttt0buw6rwa00pa`; + + fetchJson(url).then((data) => { + response.render("reserveer-een-studieplek", data); + }); + } +); + +// Verstuurt de data van de studieplek naar de API +server.post( + "/reserveer-een-studieplek", + (request, response) => { + const baseurl = "https://api.oba.fdnd.nl/api/v1"; + const url = `${baseurl}/studieplekReserveringen`; + + postJson(url, request.body).then((data) => { + let newReservation = { + ...request.body, + }; + + console.log(data); + + if (data.success) { + response.redirect("/"); + } else { + const errormessage = `${data.message}: Mogelijk komt dit door het id die al bestaat.`; + const newdata = { + error: errormessage, + values: newReservation, + }; + + response.render( + "reserveer-een-studieplek", + newdata + ); + } + + console.log(JSON.stringify(data.errors)); + }); + } +); + +//Maakt een route voor de activiteiten pagina +server.get("/activiteiten", (request, response) => { + fetchJson(activityURL).then((data) => { + let dataClone = structuredClone(data); + + if (request.query.titles) { + dataClone.results.titles = + dataClone.results.titles.filter(function (title) { + return results.titles.includes( + request.query.titles + ); + }); + } + + response.render("activiteiten", dataClone); + }); +}); + +// Maakt route voor de cursussen pagina +server.get("/cursussen", (request, response) => { + fetchJson(courseURL).then((data) => { + let dataClone = structuredClone(data); + + if (request.query.titles) { + dataClone.results.titles = + dataClone.results.titles.filter(function (title) { + return results.titles.includes( + request.query.titles + ); + }); + } + response.render("cursussen", dataClone); + }); +}); + +//Maakt route voor de Vestigingen pagina +server.get( + "/vestigingen", + (request, response) => { + const baseurl = "https://api.oba.fdnd.nl/api/v1"; + const url = `${baseurl}/vestigingen`; + + fetchJson(url).then((data) => { + response.render("vestigingen", data); + }); + } +); + + +/** + * fetchJson() is a wrapper for the experimental node fetch api. It fetches the url + * passed as a parameter and returns the response body parsed through json. + * @param {*} url the api endpoint to address + * @returns the json response from the api endpoint + */ +export async function fetchJson(url, payload = {}) { + return await fetch(url, payload) + .then((response) => response.json()) + .catch((error) => error); +} + +export async function postJson(url, body) { + return await fetch(url, { + method: "post", + body: JSON.stringify(body), + headers: { + "Content-Type": "application/json", + }, + }) + .then((response) => response.json()) + .catch((error) => error); +} \ No newline at end of file diff --git a/node_modules/.bin/cwebp b/node_modules/.bin/cwebp new file mode 120000 index 00000000..d9f894f7 --- /dev/null +++ b/node_modules/.bin/cwebp @@ -0,0 +1 @@ +../cwebp-bin/cli.js \ No newline at end of file diff --git a/node_modules/.bin/ejs b/node_modules/.bin/ejs new file mode 120000 index 00000000..88e80d01 --- /dev/null +++ b/node_modules/.bin/ejs @@ -0,0 +1 @@ +../ejs/bin/cli.js \ No newline at end of file diff --git a/node_modules/.bin/jake b/node_modules/.bin/jake new file mode 120000 index 00000000..36267456 --- /dev/null +++ b/node_modules/.bin/jake @@ -0,0 +1 @@ +../jake/bin/cli.js \ No newline at end of file diff --git a/node_modules/.bin/mime b/node_modules/.bin/mime new file mode 120000 index 00000000..fbb7ee0e --- /dev/null +++ b/node_modules/.bin/mime @@ -0,0 +1 @@ +../mime/cli.js \ No newline at end of file diff --git a/node_modules/.bin/nodemon b/node_modules/.bin/nodemon new file mode 120000 index 00000000..1056ddc1 --- /dev/null +++ b/node_modules/.bin/nodemon @@ -0,0 +1 @@ +../nodemon/bin/nodemon.js \ No newline at end of file diff --git a/node_modules/.bin/nodetouch b/node_modules/.bin/nodetouch new file mode 120000 index 00000000..3409fdb7 --- /dev/null +++ b/node_modules/.bin/nodetouch @@ -0,0 +1 @@ +../touch/bin/nodetouch.js \ No newline at end of file diff --git a/node_modules/.bin/nopt b/node_modules/.bin/nopt new file mode 120000 index 00000000..6b6566ea --- /dev/null +++ b/node_modules/.bin/nopt @@ -0,0 +1 @@ +../nopt/bin/nopt.js \ No newline at end of file diff --git a/node_modules/.bin/rimraf b/node_modules/.bin/rimraf new file mode 120000 index 00000000..4cd49a49 --- /dev/null +++ b/node_modules/.bin/rimraf @@ -0,0 +1 @@ +../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/.bin/seek-bunzip b/node_modules/.bin/seek-bunzip new file mode 120000 index 00000000..7bda56e7 --- /dev/null +++ b/node_modules/.bin/seek-bunzip @@ -0,0 +1 @@ +../seek-bzip/bin/seek-bunzip \ No newline at end of file diff --git a/node_modules/.bin/seek-table b/node_modules/.bin/seek-table new file mode 120000 index 00000000..77217106 --- /dev/null +++ b/node_modules/.bin/seek-table @@ -0,0 +1 @@ +../seek-bzip/bin/seek-bzip-table \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 120000 index 00000000..317eb293 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver \ No newline at end of file diff --git a/node_modules/.bin/uuid b/node_modules/.bin/uuid new file mode 120000 index 00000000..b3e45bc5 --- /dev/null +++ b/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/bin/uuid \ No newline at end of file diff --git a/node_modules/.bin/which b/node_modules/.bin/which new file mode 120000 index 00000000..f62471c8 --- /dev/null +++ b/node_modules/.bin/which @@ -0,0 +1 @@ +../which/bin/which \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 00000000..76d31f3b --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,3211 @@ +{ + "name": "performance-matters-oba", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@sindresorhus/is": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.7.0.tgz", + "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==", + "engines": { + "node": ">=4" + } + }, + "node_modules/@tokenizer/token": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", + "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==" + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/archive-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/archive-type/-/archive-type-4.0.0.tgz", + "integrity": "sha512-zV4Ky0v1F8dBrdYElwTvQhweQ0P7Kwc1aluqJsYtOBP01jXcWCyW2IEfI1YiqsG+Iy7ZR+o5LF1N+PGECBxHWA==", + "dependencies": { + "file-type": "^4.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/archive-type/node_modules/file-type": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz", + "integrity": "sha512-f2UbFQEk7LXgWpi5ntcO86OeA/cC80fuDDDaX/fZ2ZGel+AF7leRQqBBW1eJNiiQkrZlAoM6P+VYP5P6bOlDEQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + }, + "node_modules/array-union": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-3.0.1.tgz", + "integrity": "sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/async": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bin-build": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bin-build/-/bin-build-3.0.0.tgz", + "integrity": "sha512-jcUOof71/TNAI2uM5uoUaDq2ePcVBQ3R/qhxAz1rX7UfvduAL/RXD3jXzvn8cVcDJdGVkiR1shal3OH0ImpuhA==", + "dependencies": { + "decompress": "^4.0.0", + "download": "^6.2.2", + "execa": "^0.7.0", + "p-map-series": "^1.0.0", + "tempfile": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-check": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bin-check/-/bin-check-4.1.0.tgz", + "integrity": "sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA==", + "dependencies": { + "execa": "^0.7.0", + "executable": "^4.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-version": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bin-version/-/bin-version-3.1.0.tgz", + "integrity": "sha512-Mkfm4iE1VFt4xd4vH+gx+0/71esbfus2LsnCGe8Pi4mndSPyT+NGES/Eg99jx8/lUGWfu3z2yuB/bt5UB+iVbQ==", + "dependencies": { + "execa": "^1.0.0", + "find-versions": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/bin-version-check": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/bin-version-check/-/bin-version-check-4.0.0.tgz", + "integrity": "sha512-sR631OrhC+1f8Cvs8WyVWOA33Y8tgwjETNPyyD/myRBXLkfS/vl74FmH/lFcRl9KY3zwGh7jFhvyk9vV3/3ilQ==", + "dependencies": { + "bin-version": "^3.0.0", + "semver": "^5.6.0", + "semver-truncate": "^1.1.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/bin-version/node_modules/cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/bin-version/node_modules/execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "dependencies": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/bin-version/node_modules/get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/bin-wrapper": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bin-wrapper/-/bin-wrapper-4.1.0.tgz", + "integrity": "sha512-hfRmo7hWIXPkbpi0ZltboCMVrU+0ClXR/JgbCKKjlDjQf6igXa7OwdqNcFWQZPZTgiY7ZpzE3+LjjkLiTN2T7Q==", + "dependencies": { + "bin-check": "^4.1.0", + "bin-version-check": "^4.0.0", + "download": "^7.1.0", + "import-lazy": "^3.1.0", + "os-filter-obj": "^2.0.0", + "pify": "^4.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/bin-wrapper/node_modules/download": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/download/-/download-7.1.0.tgz", + "integrity": "sha512-xqnBTVd/E+GxJVrX5/eUJiLYjCGPwMpdL+jGhGU57BvtcA7wwhtHVbXBeUk51kOpW3S7Jn3BQbN9Q1R1Km2qDQ==", + "dependencies": { + "archive-type": "^4.0.0", + "caw": "^2.0.1", + "content-disposition": "^0.5.2", + "decompress": "^4.2.0", + "ext-name": "^5.0.0", + "file-type": "^8.1.0", + "filenamify": "^2.0.0", + "get-stream": "^3.0.0", + "got": "^8.3.1", + "make-dir": "^1.2.0", + "p-event": "^2.1.0", + "pify": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/bin-wrapper/node_modules/download/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-wrapper/node_modules/file-type": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-8.1.0.tgz", + "integrity": "sha512-qyQ0pzAy78gVoJsmYeNgl8uH8yKhr1lVhW7JbzJmnlRi0I4R2eEDEJZVKG8agpDnLpacwNbDhLNG/LMdxHD2YQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/bin-wrapper/node_modules/get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-wrapper/node_modules/got": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz", + "integrity": "sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==", + "dependencies": { + "@sindresorhus/is": "^0.7.0", + "cacheable-request": "^2.1.1", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "into-stream": "^3.1.0", + "is-retry-allowed": "^1.1.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "mimic-response": "^1.0.0", + "p-cancelable": "^0.4.0", + "p-timeout": "^2.0.1", + "pify": "^3.0.0", + "safe-buffer": "^5.1.1", + "timed-out": "^4.0.1", + "url-parse-lax": "^3.0.0", + "url-to-options": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-wrapper/node_modules/got/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-wrapper/node_modules/p-cancelable": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.4.1.tgz", + "integrity": "sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-wrapper/node_modules/p-event": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-2.3.1.tgz", + "integrity": "sha512-NQCqOFhbpVTMX4qMe8PF8lbGtzZ+LCiN7pcNrb/413Na7+TRoe1xkKUzuWa/YEJdGQ0FvKtj35EEbDoVPO2kbA==", + "dependencies": { + "p-timeout": "^2.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/bin-wrapper/node_modules/p-timeout": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-2.0.1.tgz", + "integrity": "sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA==", + "dependencies": { + "p-finally": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-wrapper/node_modules/prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-wrapper/node_modules/url-parse-lax": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ==", + "dependencies": { + "prepend-http": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "engines": { + "node": ">=8" + } + }, + "node_modules/bl": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "dependencies": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/bl/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/bl/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/bl/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/body-parser": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", + "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-alloc": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz", + "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", + "dependencies": { + "buffer-alloc-unsafe": "^1.1.0", + "buffer-fill": "^1.0.0" + } + }, + "node_modules/buffer-alloc-unsafe": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", + "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==" + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "engines": { + "node": "*" + } + }, + "node_modules/buffer-fill": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz", + "integrity": "sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ==" + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/cacheable-request": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-2.1.4.tgz", + "integrity": "sha512-vag0O2LKZ/najSoUwDbVlnlCFvhBE/7mGTY2B5FgCBDcRD+oVV1HYTOwM6JZfMg/hIcM6IwnTZ1uQQL5/X3xIQ==", + "dependencies": { + "clone-response": "1.0.2", + "get-stream": "3.0.0", + "http-cache-semantics": "3.8.1", + "keyv": "3.0.0", + "lowercase-keys": "1.0.0", + "normalize-url": "2.0.1", + "responselike": "1.0.2" + } + }, + "node_modules/cacheable-request/node_modules/get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/cacheable-request/node_modules/lowercase-keys": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz", + "integrity": "sha512-RPlX0+PHuvxVDZ7xX+EBVAp4RsVxP/TdDSN2mJYdiq1Lc4Hz7EUSjUI7RZrKKlmrIzVhf6Jo2stj7++gVarS0A==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/caw": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/caw/-/caw-2.0.1.tgz", + "integrity": "sha512-Cg8/ZSBEa8ZVY9HspcGUYaK63d/bN7rqS3CYCzEGUxuYv6UlmcjzDUz2fCFFHyTvUW5Pk0I+3hkA3iXlIj6guA==", + "dependencies": { + "get-proxy": "^2.0.0", + "isurl": "^1.0.0-alpha5", + "tunnel-agent": "^0.6.0", + "url-to-options": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/clone-response": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", + "integrity": "sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q==", + "dependencies": { + "mimic-response": "^1.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/config-chain": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", + "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", + "dependencies": { + "ini": "^1.3.4", + "proto-list": "~1.2.1" + } + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "node_modules/cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", + "dependencies": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "node_modules/cwebp-bin": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/cwebp-bin/-/cwebp-bin-8.0.0.tgz", + "integrity": "sha512-j2s6jA84aG20lB0i/FBwqZGc8nHx4VASUK8OTDxy3xoUHoX/+pP6T15/TnWwhMcD0pZ05y5GgRPkurufOC8tnQ==", + "hasInstallScript": true, + "dependencies": { + "bin-build": "^3.0.0", + "bin-wrapper": "^4.0.1" + }, + "bin": { + "cwebp": "cli.js" + }, + "engines": { + "node": "^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/imagemin/cwebp-bin?sponsor=1" + } + }, + "node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/decode-uri-component": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", + "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/decompress": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/decompress/-/decompress-4.2.1.tgz", + "integrity": "sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ==", + "dependencies": { + "decompress-tar": "^4.0.0", + "decompress-tarbz2": "^4.0.0", + "decompress-targz": "^4.0.0", + "decompress-unzip": "^4.0.1", + "graceful-fs": "^4.1.10", + "make-dir": "^1.0.0", + "pify": "^2.3.0", + "strip-dirs": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", + "dependencies": { + "mimic-response": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-tar": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz", + "integrity": "sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ==", + "dependencies": { + "file-type": "^5.2.0", + "is-stream": "^1.1.0", + "tar-stream": "^1.5.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-tar/node_modules/file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-tarbz2": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz", + "integrity": "sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A==", + "dependencies": { + "decompress-tar": "^4.1.0", + "file-type": "^6.1.0", + "is-stream": "^1.1.0", + "seek-bzip": "^1.0.5", + "unbzip2-stream": "^1.0.9" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-tarbz2/node_modules/file-type": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz", + "integrity": "sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-targz": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz", + "integrity": "sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w==", + "dependencies": { + "decompress-tar": "^4.1.1", + "file-type": "^5.2.0", + "is-stream": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-targz/node_modules/file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-unzip": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz", + "integrity": "sha512-1fqeluvxgnn86MOh66u8FjbtJpAFv5wgCT9Iw8rcBqQcCo5tO8eiJw7NNTrvt9n4CRBVq7CstiS922oPgyGLrw==", + "dependencies": { + "file-type": "^3.8.0", + "get-stream": "^2.2.0", + "pify": "^2.3.0", + "yauzl": "^2.4.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/decompress-unzip/node_modules/file-type": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", + "integrity": "sha512-RLoqTXE8/vPmMuTI88DAzhMYC99I8BWv7zYP4A1puo5HIjEJ5EX48ighy4ZyKMG9EDXxBgW6e++cn7d1xuFghA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decompress-unzip/node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decompress/node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dotenv": { + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.0.3.tgz", + "integrity": "sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==", + "engines": { + "node": ">=12" + } + }, + "node_modules/download": { + "version": "6.2.5", + "resolved": "https://registry.npmjs.org/download/-/download-6.2.5.tgz", + "integrity": "sha512-DpO9K1sXAST8Cpzb7kmEhogJxymyVUd5qz/vCOSyvwtp2Klj2XcDt5YUuasgxka44SxF0q5RriKIwJmQHG2AuA==", + "dependencies": { + "caw": "^2.0.0", + "content-disposition": "^0.5.2", + "decompress": "^4.0.0", + "ext-name": "^5.0.0", + "file-type": "5.2.0", + "filenamify": "^2.0.0", + "get-stream": "^3.0.0", + "got": "^7.0.0", + "make-dir": "^1.0.0", + "p-event": "^1.0.0", + "pify": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/download/node_modules/file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/download/node_modules/get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/download/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/duplexer3": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz", + "integrity": "sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==" + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/ejs": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz", + "integrity": "sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ==", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/exec-buffer": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/exec-buffer/-/exec-buffer-3.2.0.tgz", + "integrity": "sha512-wsiD+2Tp6BWHoVv3B+5Dcx6E7u5zky+hUwOHjuH2hKSLR3dvRmX8fk8UD8uqQixHs4Wk6eDmiegVrMPjKj7wpA==", + "dependencies": { + "execa": "^0.7.0", + "p-finally": "^1.0.0", + "pify": "^3.0.0", + "rimraf": "^2.5.4", + "tempfile": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/exec-buffer/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==", + "dependencies": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/execa/node_modules/get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dependencies": { + "pify": "^2.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/executable/node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/express": { + "version": "4.18.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", + "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.1", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.5.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.2.0", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.7", + "qs": "6.11.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.18.0", + "serve-static": "1.15.0", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/express/node_modules/body-parser": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", + "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.1", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/express/node_modules/raw-body": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", + "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/ext-list": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", + "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", + "dependencies": { + "mime-db": "^1.28.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ext-name": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", + "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", + "dependencies": { + "ext-list": "^2.0.0", + "sort-keys-length": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/file-type": { + "version": "16.5.4", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-16.5.4.tgz", + "integrity": "sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw==", + "dependencies": { + "readable-web-to-node-stream": "^3.0.0", + "strtok3": "^6.2.4", + "token-types": "^4.1.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/file-type?sponsor=1" + } + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/filename-reserved-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", + "integrity": "sha512-lc1bnsSr4L4Bdif8Xb/qrtokGbq5zlsms/CYH8PP+WtCkGNF65DPiQY8vG3SakEdRn8Dlnm+gW/qWKKjS5sZzQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/filenamify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-2.1.0.tgz", + "integrity": "sha512-ICw7NTT6RsDp2rnYKVd8Fu4cr6ITzGy3+u4vUujPkabyaz+03F24NWEX7fs5fp+kBonlaqPH8fAO2NM+SXt/JA==", + "dependencies": { + "filename-reserved-regex": "^2.0.0", + "strip-outer": "^1.0.0", + "trim-repeated": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", + "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/find-versions": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-3.2.0.tgz", + "integrity": "sha512-P8WRou2S+oe222TOCHitLy8zj+SIsVJh52VP4lvXkaFVnOFFdoWv1H1Jjvel1aI6NCFOAaeAVm8qrI0odiLcww==", + "dependencies": { + "semver-regex": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/from2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", + "integrity": "sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==", + "dependencies": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + } + }, + "node_modules/from2/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/from2/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/from2/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/get-intrinsic": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz", + "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proxy": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/get-proxy/-/get-proxy-2.1.0.tgz", + "integrity": "sha512-zmZIaQTWnNQb4R4fJUEp/FC51eZsc6EkErspy3xtIYStaq8EB/hDIWipxsal+E8rz0qD7f2sL/NA9Xee4RInJw==", + "dependencies": { + "npm-conf": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/get-stream": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz", + "integrity": "sha512-AUGhbbemXxrZJRD5cDvKtQxLuYaIbNtDTK8YqupCI393Q2KSTreEsLUN3ZxAWFGiKTzL6nKuzfcIvieflUX9qA==", + "dependencies": { + "object-assign": "^4.0.1", + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globby": { + "version": "12.2.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-12.2.0.tgz", + "integrity": "sha512-wiSuFQLZ+urS9x2gGPl1H5drc5twabmm4m2gTR27XDFyjUHJUNsS8o/2aKyIF6IoBaR630atdher0XJ5g6OMmA==", + "dependencies": { + "array-union": "^3.0.1", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.7", + "ignore": "^5.1.9", + "merge2": "^1.4.1", + "slash": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby/node_modules/slash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/got": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/got/-/got-7.1.0.tgz", + "integrity": "sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw==", + "dependencies": { + "decompress-response": "^3.2.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "is-plain-obj": "^1.1.0", + "is-retry-allowed": "^1.0.0", + "is-stream": "^1.0.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "p-cancelable": "^0.3.0", + "p-timeout": "^1.1.1", + "safe-buffer": "^5.0.1", + "timed-out": "^4.0.0", + "url-parse-lax": "^1.0.0", + "url-to-options": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/got/node_modules/get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/has-symbol-support-x": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz", + "integrity": "sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw==", + "engines": { + "node": "*" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-to-string-tag-x": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz", + "integrity": "sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw==", + "dependencies": { + "has-symbol-support-x": "^1.4.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/http-cache-semantics": { + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz", + "integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", + "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/ignore-by-default": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", + "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==" + }, + "node_modules/imagemin": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/imagemin/-/imagemin-8.0.1.tgz", + "integrity": "sha512-Q/QaPi+5HuwbZNtQRqUVk6hKacI6z9iWiCSQBisAv7uBynZwO7t1svkryKl7+iSQbkU/6t9DWnHz04cFs2WY7w==", + "dependencies": { + "file-type": "^16.5.3", + "globby": "^12.0.0", + "graceful-fs": "^4.2.8", + "junk": "^3.1.0", + "p-pipe": "^4.0.0", + "replace-ext": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/imagemin-webp": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/imagemin-webp/-/imagemin-webp-8.0.0.tgz", + "integrity": "sha512-yN6kNKir6T/U3AtP3uLHrLn9XYafk2m49EbUqLCQ3GPRRLRs+4pUQxxaHz+lnTDM+LQpkSjGQaFVcSgYqvW3dQ==", + "dependencies": { + "cwebp-bin": "^8.0.0", + "exec-buffer": "^3.2.0", + "is-cwebp-readable": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/import-lazy": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-3.1.0.tgz", + "integrity": "sha512-8/gvXvX2JMn0F+CDlSC4l6kOmVaLOO3XLkksI7CI3Ud95KDYJuYur2b9P/PUt/i/pDAMd/DulQsNbbbmRRsDIQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, + "node_modules/into-stream": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-3.1.0.tgz", + "integrity": "sha512-TcdjPibTksa1NQximqep2r17ISRiNE9fwlfbg3F8ANdvP5/yrFTew86VcO//jk4QTaMlbjypPBq76HN2zaKfZQ==", + "dependencies": { + "from2": "^2.1.1", + "p-is-promise": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-cwebp-readable": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-cwebp-readable/-/is-cwebp-readable-3.0.0.tgz", + "integrity": "sha512-bpELc7/Q1/U5MWHn4NdHI44R3jxk0h9ew9ljzabiRl70/UIjL/ZAqRMb52F5+eke/VC8yTiv4Ewryo1fPWidvA==", + "dependencies": { + "file-type": "^10.5.0" + } + }, + "node_modules/is-cwebp-readable/node_modules/file-type": { + "version": "10.11.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-10.11.0.tgz", + "integrity": "sha512-uzk64HRpUZyTGZtVuvrjP0FYxzQrBf4rojot6J65YMEbwBLB0CWm0CLojVpwpmFmxcE/lkvYICgfcGozbBq6rw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-natural-number": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", + "integrity": "sha512-Y4LTamMe0DDQIIAlaer9eKebAlDSV6huy+TWhJVPlzZh2o4tRP5SQWFlLn5N0To4mDD22/qdOq+veo1cSISLgQ==" + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.2.tgz", + "integrity": "sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-retry-allowed": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", + "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/isurl": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz", + "integrity": "sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w==", + "dependencies": { + "has-to-string-tag-x": "^1.2.0", + "is-object": "^1.0.1" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/jake": { + "version": "10.8.5", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.5.tgz", + "integrity": "sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==", + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.1", + "minimatch": "^3.0.4" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/json-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==" + }, + "node_modules/junk": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/junk/-/junk-3.1.0.tgz", + "integrity": "sha512-pBxcB3LFc8QVgdggvZWyeys+hnrNWg4OcZIU/1X59k5jQdLBlCsYGRQaz234SqoRLTCgMH00fY0xRJH+F9METQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/keyv": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz", + "integrity": "sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA==", + "dependencies": { + "json-buffer": "3.0.0" + } + }, + "node_modules/lowercase-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "node_modules/make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "dependencies": { + "pify": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/make-dir/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" + }, + "node_modules/nodemon": { + "version": "2.0.22", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-2.0.22.tgz", + "integrity": "sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==", + "dependencies": { + "chokidar": "^3.5.2", + "debug": "^3.2.7", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", + "semver": "^5.7.1", + "simple-update-notifier": "^1.0.7", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.5" + }, + "bin": { + "nodemon": "bin/nodemon.js" + }, + "engines": { + "node": ">=8.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" + } + }, + "node_modules/nopt": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", + "integrity": "sha512-NWmpvLSqUrgrAC9HCuxEvb+PSloHpqVu+FqcO4eeF2h5qYRhA7ev6KvelyQAKtegUbC6RypJnlEOhd8vloNKYg==", + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "*" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-url": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz", + "integrity": "sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw==", + "dependencies": { + "prepend-http": "^2.0.0", + "query-string": "^5.0.1", + "sort-keys": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/normalize-url/node_modules/prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/normalize-url/node_modules/sort-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", + "integrity": "sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg==", + "dependencies": { + "is-plain-obj": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-conf": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/npm-conf/-/npm-conf-1.1.3.tgz", + "integrity": "sha512-Yic4bZHJOt9RCFbRP3GgpqhScOY4HH3V2P8yBj6CeYq118Qr+BLXqT2JvpJ00mryLESpgOxf5XlFv4ZjXxLScw==", + "dependencies": { + "config-chain": "^1.1.11", + "pify": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-conf/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", + "dependencies": { + "path-key": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", + "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/os-filter-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/os-filter-obj/-/os-filter-obj-2.0.0.tgz", + "integrity": "sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg==", + "dependencies": { + "arch": "^2.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/p-cancelable": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.3.0.tgz", + "integrity": "sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/p-event": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-1.3.0.tgz", + "integrity": "sha512-hV1zbA7gwqPVFcapfeATaNjQ3J0NuzorHPyG8GPL9g/Y/TplWVBVoCKCXL6Ej2zscrCEv195QNWJXuBH6XZuzA==", + "dependencies": { + "p-timeout": "^1.1.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", + "engines": { + "node": ">=4" + } + }, + "node_modules/p-is-promise": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-1.1.0.tgz", + "integrity": "sha512-zL7VE4JVS2IFSkR2GQKDSPEVxkoH43/p7oEnwpdCndKYJO0HVeRB7fA8TJwuLOTBREtK0ea8eHaxdwcpob5dmg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/p-map-series": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-map-series/-/p-map-series-1.0.0.tgz", + "integrity": "sha512-4k9LlvY6Bo/1FcIdV33wqZQES0Py+iKISU9Uc8p8AjWoZPnFKMpVIVD3s0EYn4jzLh1I+WeUZkJ0Yoa4Qfw3Kg==", + "dependencies": { + "p-reduce": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/p-pipe": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-pipe/-/p-pipe-4.0.0.tgz", + "integrity": "sha512-HkPfFklpZQPUKBFXzKFB6ihLriIHxnmuQdK9WmLDwe4hf2PdhhfWT/FJa+pc3bA1ywvKXtedxIRmd4Y7BTXE4w==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-reduce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-reduce/-/p-reduce-1.0.0.tgz", + "integrity": "sha512-3Tx1T3oM1xO/Y8Gj0sWyE78EIJZ+t+aEmXUdvQgvGmSMri7aPTHoovbXEreWKkL5j21Er60XAWLTzKbAKYOujQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/p-timeout": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-1.2.1.tgz", + "integrity": "sha512-gb0ryzr+K2qFqFv6qi3khoeqMZF/+ajxQipEF6NteZVnvz9tzdsfAVj3lYtn1gAXvH5lfLwfxEII799gt/mRIA==", + "dependencies": { + "p-finally": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/peek-readable": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-4.1.0.tgz", + "integrity": "sha512-ZI3LnwUv5nOGbQzD9c2iDG6toheuXSZP5esSHBjopsXH4dg19soufvpUGA3uohi5anFtGb2lhAVdHzH6R/Evvg==", + "engines": { + "node": ">=8" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "engines": { + "node": ">=6" + } + }, + "node_modules/pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==", + "dependencies": { + "pinkie": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/prepend-http": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", + "integrity": "sha512-PhmXi5XmoyKw1Un4E+opM2KcsJInDvKyuOumcjjw3waw86ZNjHwVUOOWLc4bCzLdcKNaWBH9e99sbWzDQsVaYg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/proto-list": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", + "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==" + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==" + }, + "node_modules/pstree.remy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==" + }, + "node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/query-string": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-5.1.1.tgz", + "integrity": "sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw==", + "dependencies": { + "decode-uri-component": "^0.2.0", + "object-assign": "^4.1.0", + "strict-uri-encode": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readable-web-to-node-stream": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz", + "integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==", + "dependencies": { + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/replace-ext": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-2.0.0.tgz", + "integrity": "sha512-UszKE5KVK6JvyD92nzMn9cDapSk6w/CaFZ96CnmDMUqH9oowfxF/ZjRITD25H4DnOQClLA4/j7jLGXXLVKxAug==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/responselike": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==", + "dependencies": { + "lowercase-keys": "^1.0.0" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/seek-bzip": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz", + "integrity": "sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==", + "dependencies": { + "commander": "^2.8.1" + }, + "bin": { + "seek-bunzip": "bin/seek-bunzip", + "seek-table": "bin/seek-bzip-table" + } + }, + "node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/semver-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/semver-regex/-/semver-regex-2.0.0.tgz", + "integrity": "sha512-mUdIBBvdn0PLOeP3TEkMH7HHeUP3GjsXCwKarjv/kGmUFOYg1VqEemKhoQpWMu6X2I8kHeuVdGibLGkVK+/5Qw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/semver-truncate": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/semver-truncate/-/semver-truncate-1.1.2.tgz", + "integrity": "sha512-V1fGg9i4CL3qesB6U0L6XAm4xOJiHmt4QAacazumuasc03BvtFGIMCduv01JWQ69Nv+JST9TqhSCiJoxoY031w==", + "dependencies": { + "semver": "^5.3.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/send": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", + "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/serve-static": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "dependencies": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.18.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "node_modules/simple-update-notifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.1.0.tgz", + "integrity": "sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==", + "dependencies": { + "semver": "~7.0.0" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/simple-update-notifier/node_modules/semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/sort-keys": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", + "integrity": "sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==", + "dependencies": { + "is-plain-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sort-keys-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", + "integrity": "sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==", + "dependencies": { + "sort-keys": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/strict-uri-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz", + "integrity": "sha512-R3f198pcvnB+5IpnBlRkphuE9n46WyVl8I39W/ZUTZLz4nqSP/oLYUrcnJrw462Ds8he4YKMov2efsTIw1BDGQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/strip-dirs": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz", + "integrity": "sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g==", + "dependencies": { + "is-natural-number": "^4.0.1" + } + }, + "node_modules/strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-outer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-1.0.1.tgz", + "integrity": "sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg==", + "dependencies": { + "escape-string-regexp": "^1.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strtok3": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-6.3.0.tgz", + "integrity": "sha512-fZtbhtvI9I48xDSywd/somNqgUHl2L2cstmXCCif0itOf96jeW18MBSyrLuNicYQVkvpOxkZtkzujiTJ9LW5Jw==", + "dependencies": { + "@tokenizer/token": "^0.3.0", + "peek-readable": "^4.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "dependencies": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/tar-stream/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/tar-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/tar-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/temp-dir": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-1.0.0.tgz", + "integrity": "sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/tempfile": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tempfile/-/tempfile-2.0.0.tgz", + "integrity": "sha512-ZOn6nJUgvgC09+doCEF3oB+r3ag7kUvlsXEGX069QRD60p+P3uP7XG9N2/at+EyIRGSN//ZY3LyEotA1YpmjuA==", + "dependencies": { + "temp-dir": "^1.0.0", + "uuid": "^3.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" + }, + "node_modules/timed-out": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", + "integrity": "sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-buffer": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", + "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/token-types": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-4.2.1.tgz", + "integrity": "sha512-6udB24Q737UD/SDsKAHI9FCRP7Bqc9D/MQUV02ORQg5iskjtLJlZJNdN4kKtcdtwCeWIwIHDGaUsTsCCAa8sFQ==", + "dependencies": { + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/touch": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz", + "integrity": "sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==", + "dependencies": { + "nopt": "~1.0.10" + }, + "bin": { + "nodetouch": "bin/nodetouch.js" + } + }, + "node_modules/trim-repeated": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz", + "integrity": "sha512-pkonvlKk8/ZuR0D5tLW8ljt5I8kmxp2XKymhepUeOdCEfKpZaktSArkLHZt76OB1ZvO9bssUsDty4SWhLvZpLg==", + "dependencies": { + "escape-string-regexp": "^1.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/unbzip2-stream": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", + "integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==", + "dependencies": { + "buffer": "^5.2.1", + "through": "^2.3.8" + } + }, + "node_modules/undefsafe": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/url-parse-lax": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz", + "integrity": "sha512-BVA4lR5PIviy2PMseNd2jbFQ+jwSwQGdJejf5ctd1rEXt0Ypd7yanUK9+lYechVlN5VaTJGsu2U/3MDDu6KgBA==", + "dependencies": { + "prepend-http": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/url-to-options": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz", + "integrity": "sha512-0kQLIzG4fdk/G5NONku64rSH/x32NOA39LVQqlK8Le6lvTF6GGRJpqaQFGgU+CLwySIqBSMdwYM0sYcW9f6P4A==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==" + }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + } + } +} diff --git a/node_modules/@nodelib/fs.scandir/LICENSE b/node_modules/@nodelib/fs.scandir/LICENSE new file mode 100644 index 00000000..65a99946 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.scandir/README.md b/node_modules/@nodelib/fs.scandir/README.md new file mode 100644 index 00000000..e0b218b9 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/README.md @@ -0,0 +1,171 @@ +# @nodelib/fs.scandir + +> List files and directories inside the specified directory. + +## :bulb: Highlights + +The package is aimed at obtaining information about entries in the directory. + +* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). +* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode). +* :link: Can safely work with broken symbolic links. + +## Install + +```console +npm install @nodelib/fs.scandir +``` + +## Usage + +```ts +import * as fsScandir from '@nodelib/fs.scandir'; + +fsScandir.scandir('path', (error, stats) => { /* … */ }); +``` + +## API + +### .scandir(path, [optionsOrSettings], callback) + +Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style. + +```ts +fsScandir.scandir('path', (error, entries) => { /* … */ }); +fsScandir.scandir('path', {}, (error, entries) => { /* … */ }); +fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ }); +``` + +### .scandirSync(path, [optionsOrSettings]) + +Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path. + +```ts +const entries = fsScandir.scandirSync('path'); +const entries = fsScandir.scandirSync('path', {}); +const entries = fsScandir.scandirSync(('path', new fsScandir.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsScandir.Settings({ followSymbolicLinks: false }); + +const entries = fsScandir.scandirSync('path', settings); +``` + +## Entry + +* `name` — The name of the entry (`unknown.txt`). +* `path` — The path of the entry relative to call directory (`root/unknown.txt`). +* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class. +* `stats` (optional) — An instance of `fs.Stats` class. + +For example, the `scandir` call for `tools` directory with one directory inside: + +```ts +{ + dirent: Dirent { name: 'typedoc', /* … */ }, + name: 'typedoc', + path: 'tools/typedoc' +} +``` + +## Options + +### stats + +* Type: `boolean` +* Default: `false` + +Adds an instance of `fs.Stats` class to the [`Entry`](#entry). + +> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO?? + +### followSymbolicLinks + +* Type: `boolean` +* Default: `false` + +Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`. + +### `pathSegmentSeparator` + +* Type: `string` +* Default: `path.sep` + +By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. + +### `fs` + +* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; + readdir?: typeof fs.readdir; + readdirSync?: typeof fs.readdirSync; +} + +const settings = new fsScandir.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## `old` and `modern` mode + +This package has two modes that are used depending on the environment and parameters of use. + +### old + +* Node.js below `10.10` or when the `stats` option is enabled + +When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links). + +### modern + +* Node.js 10.10+ and the `stats` option is disabled + +In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present. + +This mode makes fewer calls to the file system. It's faster. + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts new file mode 100644 index 00000000..827f1db0 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts @@ -0,0 +1,20 @@ +import type * as fsStat from '@nodelib/fs.stat'; +import type { Dirent, ErrnoException } from '../types'; +export interface ReaddirAsynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }, callback: (error: ErrnoException | null, files: Dirent[]) => void): void; + (filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void; +} +export interface ReaddirSynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }): Dirent[]; + (filepath: string): string[]; +} +export declare type FileSystemAdapter = fsStat.FileSystemAdapter & { + readdir: ReaddirAsynchronousMethod; + readdirSync: ReaddirSynchronousMethod; +}; +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js new file mode 100644 index 00000000..f0fe0220 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.d.ts b/node_modules/@nodelib/fs.scandir/out/constants.d.ts new file mode 100644 index 00000000..33f17497 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/constants.d.ts @@ -0,0 +1,4 @@ +/** + * IS `true` for Node.js 10.10 and greater. + */ +export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.js b/node_modules/@nodelib/fs.scandir/out/constants.js new file mode 100644 index 00000000..7e3d4411 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/constants.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; +const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); +if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { + throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); +} +const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); +const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); +const SUPPORTED_MAJOR_VERSION = 10; +const SUPPORTED_MINOR_VERSION = 10; +const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; +const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; +/** + * IS `true` for Node.js 10.10 and greater. + */ +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/@nodelib/fs.scandir/out/index.d.ts b/node_modules/@nodelib/fs.scandir/out/index.d.ts new file mode 100644 index 00000000..b9da83ed --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/index.d.ts @@ -0,0 +1,12 @@ +import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Dirent, Entry } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function scandir(path: string, callback: AsyncCallback): void; +declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace scandir { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; +export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options }; diff --git a/node_modules/@nodelib/fs.scandir/out/index.js b/node_modules/@nodelib/fs.scandir/out/index.js new file mode 100644 index 00000000..99c70d3d --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/index.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.scandirSync = exports.scandir = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function scandir(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.scandir = scandir; +function scandirSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.scandirSync = scandirSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts new file mode 100644 index 00000000..5829676d --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts @@ -0,0 +1,7 @@ +/// +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void; +export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.js b/node_modules/@nodelib/fs.scandir/out/providers/async.js new file mode 100644 index 00000000..e8e2f0a9 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/async.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const rpl = require("run-parallel"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings, callback) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + readdirWithFileTypes(directory, settings, callback); + return; + } + readdir(directory, settings, callback); +} +exports.read = read; +function readdirWithFileTypes(directory, settings, callback) { + settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const entries = dirents.map((dirent) => ({ + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + })); + if (!settings.followSymbolicLinks) { + callSuccessCallback(callback, entries); + return; + } + const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); + rpl(tasks, (rplError, rplEntries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, rplEntries); + }); + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function makeRplTaskEntry(entry, settings) { + return (done) => { + if (!entry.dirent.isSymbolicLink()) { + done(null, entry); + return; + } + settings.fs.stat(entry.path, (statError, stats) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + done(statError); + return; + } + done(null, entry); + return; + } + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + done(null, entry); + }); + }; +} +function readdir(directory, settings, callback) { + settings.fs.readdir(directory, (readdirError, names) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const tasks = names.map((name) => { + const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + return (done) => { + fsStat.stat(path, settings.fsStatSettings, (error, stats) => { + if (error !== null) { + done(error); + return; + } + const entry = { + name, + path, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + done(null, entry); + }); + }; + }); + rpl(tasks, (rplError, entries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, entries); + }); + }); +} +exports.readdir = readdir; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts new file mode 100644 index 00000000..2b4d08b5 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts @@ -0,0 +1 @@ +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.js b/node_modules/@nodelib/fs.scandir/out/providers/common.js new file mode 100644 index 00000000..8724cb59 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/common.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = void 0; +function joinPathSegments(a, b, separator) { + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts new file mode 100644 index 00000000..e05c8f07 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts @@ -0,0 +1,5 @@ +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare function read(directory: string, settings: Settings): Entry[]; +export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; +export declare function readdir(directory: string, settings: Settings): Entry[]; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.js b/node_modules/@nodelib/fs.scandir/out/providers/sync.js new file mode 100644 index 00000000..146db343 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/sync.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + return readdirWithFileTypes(directory, settings); + } + return readdir(directory, settings); +} +exports.read = read; +function readdirWithFileTypes(directory, settings) { + const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); + return dirents.map((dirent) => { + const entry = { + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + }; + if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { + try { + const stats = settings.fs.statSync(entry.path); + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + } + catch (error) { + if (settings.throwErrorOnBrokenSymbolicLink) { + throw error; + } + } + } + return entry; + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function readdir(directory, settings) { + const names = settings.fs.readdirSync(directory); + return names.map((name) => { + const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + const stats = fsStat.statSync(entryPath, settings.fsStatSettings); + const entry = { + name, + path: entryPath, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + return entry; + }); +} +exports.readdir = readdir; diff --git a/node_modules/@nodelib/fs.scandir/out/settings.d.ts b/node_modules/@nodelib/fs.scandir/out/settings.d.ts new file mode 100644 index 00000000..a0db1155 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/settings.d.ts @@ -0,0 +1,20 @@ +import * as fsStat from '@nodelib/fs.stat'; +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLinks: boolean; + readonly fs: fs.FileSystemAdapter; + readonly pathSegmentSeparator: string; + readonly stats: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + readonly fsStatSettings: fsStat.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.scandir/out/settings.js b/node_modules/@nodelib/fs.scandir/out/settings.js new file mode 100644 index 00000000..15a3e8cd --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/settings.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsStat = require("@nodelib/fs.stat"); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.stats = this._getValue(this._options.stats, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + this.fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this.followSymbolicLinks, + fs: this.fs, + throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts new file mode 100644 index 00000000..f326c5e5 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts @@ -0,0 +1,20 @@ +/// +import type * as fs from 'fs'; +export interface Entry { + dirent: Dirent; + name: string; + path: string; + stats?: Stats; +} +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; +export interface Dirent { + isBlockDevice: () => boolean; + isCharacterDevice: () => boolean; + isDirectory: () => boolean; + isFIFO: () => boolean; + isFile: () => boolean; + isSocket: () => boolean; + isSymbolicLink: () => boolean; + name: string; +} diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.js b/node_modules/@nodelib/fs.scandir/out/types/index.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts new file mode 100644 index 00000000..bb863f15 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts @@ -0,0 +1,2 @@ +import type { Dirent, Stats } from '../types'; +export declare function createDirentFromStats(name: string, stats: Stats): Dirent; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.js b/node_modules/@nodelib/fs.scandir/out/utils/fs.js new file mode 100644 index 00000000..ace7c74d --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts new file mode 100644 index 00000000..1b41954e --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts @@ -0,0 +1,2 @@ +import * as fs from './fs'; +export { fs }; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.js b/node_modules/@nodelib/fs.scandir/out/utils/index.js new file mode 100644 index 00000000..f5de129f --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fs = void 0; +const fs = require("./fs"); +exports.fs = fs; diff --git a/node_modules/@nodelib/fs.scandir/package.json b/node_modules/@nodelib/fs.scandir/package.json new file mode 100644 index 00000000..d3a89241 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/package.json @@ -0,0 +1,44 @@ +{ + "name": "@nodelib/fs.scandir", + "version": "2.1.5", + "description": "List files and directories inside the specified directory", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "scandir", + "readdir", + "dirent" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4", + "@types/run-parallel": "^1.1.0" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" +} diff --git a/node_modules/@nodelib/fs.stat/LICENSE b/node_modules/@nodelib/fs.stat/LICENSE new file mode 100644 index 00000000..65a99946 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.stat/README.md b/node_modules/@nodelib/fs.stat/README.md new file mode 100644 index 00000000..686f0471 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/README.md @@ -0,0 +1,126 @@ +# @nodelib/fs.stat + +> Get the status of a file with some features. + +## :bulb: Highlights + +Wrapper around standard method `fs.lstat` and `fs.stat` with some features. + +* :beginner: Normally follows symbolic link. +* :gear: Can safely work with broken symbolic link. + +## Install + +```console +npm install @nodelib/fs.stat +``` + +## Usage + +```ts +import * as fsStat from '@nodelib/fs.stat'; + +fsStat.stat('path', (error, stats) => { /* … */ }); +``` + +## API + +### .stat(path, [optionsOrSettings], callback) + +Returns an instance of `fs.Stats` class for provided path with standard callback-style. + +```ts +fsStat.stat('path', (error, stats) => { /* … */ }); +fsStat.stat('path', {}, (error, stats) => { /* … */ }); +fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ }); +``` + +### .statSync(path, [optionsOrSettings]) + +Returns an instance of `fs.Stats` class for provided path. + +```ts +const stats = fsStat.stat('path'); +const stats = fsStat.stat('path', {}); +const stats = fsStat.stat('path', new fsStat.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settings) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsStat.Settings({ followSymbolicLink: false }); + +const stats = fsStat.stat('path', settings); +``` + +## Options + +### `followSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`. + +### `markSymbolicLink` + +* Type: `boolean` +* Default: `false` + +Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`). + +> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +### `fs` + +* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; +} + +const settings = new fsStat.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts new file mode 100644 index 00000000..3af759c9 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts @@ -0,0 +1,13 @@ +/// +import * as fs from 'fs'; +import type { ErrnoException } from '../types'; +export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void; +export declare type StatSynchronousMethod = (path: string) => fs.Stats; +export interface FileSystemAdapter { + lstat: StatAsynchronousMethod; + stat: StatAsynchronousMethod; + lstatSync: StatSynchronousMethod; + statSync: StatSynchronousMethod; +} +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.js b/node_modules/@nodelib/fs.stat/out/adapters/fs.js new file mode 100644 index 00000000..8dc08c8c --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/adapters/fs.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/index.d.ts b/node_modules/@nodelib/fs.stat/out/index.d.ts new file mode 100644 index 00000000..f95db995 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/index.d.ts @@ -0,0 +1,12 @@ +import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Stats } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function stat(path: string, callback: AsyncCallback): void; +declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace stat { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; +export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats }; diff --git a/node_modules/@nodelib/fs.stat/out/index.js b/node_modules/@nodelib/fs.stat/out/index.js new file mode 100644 index 00000000..b23f7510 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/index.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.statSync = exports.stat = exports.Settings = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function stat(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.stat = stat; +function statSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.statSync = statSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts new file mode 100644 index 00000000..85423ce1 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts @@ -0,0 +1,4 @@ +import type Settings from '../settings'; +import type { ErrnoException, Stats } from '../types'; +export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void; +export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.js b/node_modules/@nodelib/fs.stat/out/providers/async.js new file mode 100644 index 00000000..983ff0e6 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/async.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings, callback) { + settings.fs.lstat(path, (lstatError, lstat) => { + if (lstatError !== null) { + callFailureCallback(callback, lstatError); + return; + } + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + callSuccessCallback(callback, lstat); + return; + } + settings.fs.stat(path, (statError, stat) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + callFailureCallback(callback, statError); + return; + } + callSuccessCallback(callback, lstat); + return; + } + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + callSuccessCallback(callback, stat); + }); + }); +} +exports.read = read; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts new file mode 100644 index 00000000..428c3d79 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts @@ -0,0 +1,3 @@ +import type Settings from '../settings'; +import type { Stats } from '../types'; +export declare function read(path: string, settings: Settings): Stats; diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.js b/node_modules/@nodelib/fs.stat/out/providers/sync.js new file mode 100644 index 00000000..1521c361 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/sync.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings) { + const lstat = settings.fs.lstatSync(path); + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + return lstat; + } + try { + const stat = settings.fs.statSync(path); + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + return stat; + } + catch (error) { + if (!settings.throwErrorOnBrokenSymbolicLink) { + return lstat; + } + throw error; + } +} +exports.read = read; diff --git a/node_modules/@nodelib/fs.stat/out/settings.d.ts b/node_modules/@nodelib/fs.stat/out/settings.d.ts new file mode 100644 index 00000000..f4b3d444 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/settings.d.ts @@ -0,0 +1,16 @@ +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLink?: boolean; + fs?: Partial; + markSymbolicLink?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLink: boolean; + readonly fs: fs.FileSystemAdapter; + readonly markSymbolicLink: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.stat/out/settings.js b/node_modules/@nodelib/fs.stat/out/settings.js new file mode 100644 index 00000000..111ec09c --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/settings.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.d.ts b/node_modules/@nodelib/fs.stat/out/types/index.d.ts new file mode 100644 index 00000000..74c08ed2 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/types/index.d.ts @@ -0,0 +1,4 @@ +/// +import type * as fs from 'fs'; +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.js b/node_modules/@nodelib/fs.stat/out/types/index.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.stat/package.json b/node_modules/@nodelib/fs.stat/package.json new file mode 100644 index 00000000..f2540c28 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/package.json @@ -0,0 +1,37 @@ +{ + "name": "@nodelib/fs.stat", + "version": "2.0.5", + "description": "Get the status of a file with some features", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "stat" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" +} diff --git a/node_modules/@nodelib/fs.walk/LICENSE b/node_modules/@nodelib/fs.walk/LICENSE new file mode 100644 index 00000000..65a99946 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.walk/README.md b/node_modules/@nodelib/fs.walk/README.md new file mode 100644 index 00000000..6ccc08db --- /dev/null +++ b/node_modules/@nodelib/fs.walk/README.md @@ -0,0 +1,215 @@ +# @nodelib/fs.walk + +> A library for efficiently walking a directory recursively. + +## :bulb: Highlights + +* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). +* :rocket: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type for performance reasons. See [`old` and `modern` mode](https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode). +* :gear: Built-in directories/files and error filtering system. +* :link: Can safely work with broken symbolic links. + +## Install + +```console +npm install @nodelib/fs.walk +``` + +## Usage + +```ts +import * as fsWalk from '@nodelib/fs.walk'; + +fsWalk.walk('path', (error, entries) => { /* … */ }); +``` + +## API + +### .walk(path, [optionsOrSettings], callback) + +Reads the directory recursively and asynchronously. Requires a callback function. + +> :book: If you want to use the Promise API, use `util.promisify`. + +```ts +fsWalk.walk('path', (error, entries) => { /* … */ }); +fsWalk.walk('path', {}, (error, entries) => { /* … */ }); +fsWalk.walk('path', new fsWalk.Settings(), (error, entries) => { /* … */ }); +``` + +### .walkStream(path, [optionsOrSettings]) + +Reads the directory recursively and asynchronously. [Readable Stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_readable_streams) is used as a provider. + +```ts +const stream = fsWalk.walkStream('path'); +const stream = fsWalk.walkStream('path', {}); +const stream = fsWalk.walkStream('path', new fsWalk.Settings()); +``` + +### .walkSync(path, [optionsOrSettings]) + +Reads the directory recursively and synchronously. Returns an array of entries. + +```ts +const entries = fsWalk.walkSync('path'); +const entries = fsWalk.walkSync('path', {}); +const entries = fsWalk.walkSync('path', new fsWalk.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settings) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsWalk.Settings({ followSymbolicLinks: true }); + +const entries = fsWalk.walkSync('path', settings); +``` + +## Entry + +* `name` — The name of the entry (`unknown.txt`). +* `path` — The path of the entry relative to call directory (`root/unknown.txt`). +* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. +* [`stats`] — An instance of `fs.Stats` class. + +## Options + +### basePath + +* Type: `string` +* Default: `undefined` + +By default, all paths are built relative to the root path. You can use this option to set custom root path. + +In the example below we read the files from the `root` directory, but in the results the root path will be `custom`. + +```ts +fsWalk.walkSync('root'); // → ['root/file.txt'] +fsWalk.walkSync('root', { basePath: 'custom' }); // → ['custom/file.txt'] +``` + +### concurrency + +* Type: `number` +* Default: `Infinity` + +The maximum number of concurrent calls to `fs.readdir`. + +> :book: The higher the number, the higher performance and the load on the File System. If you want to read in quiet mode, set the value to `4 * os.cpus().length` (4 is default size of [thread pool work scheduling](http://docs.libuv.org/en/v1.x/threadpool.html#thread-pool-work-scheduling)). + +### deepFilter + +* Type: [`DeepFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that indicates whether the directory will be read deep or not. + +```ts +// Skip all directories that starts with `node_modules` +const filter: DeepFilterFunction = (entry) => !entry.path.startsWith('node_modules'); +``` + +### entryFilter + +* Type: [`EntryFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that indicates whether the entry will be included to results or not. + +```ts +// Exclude all `.js` files from results +const filter: EntryFilterFunction = (entry) => !entry.name.endsWith('.js'); +``` + +### errorFilter + +* Type: [`ErrorFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that allows you to skip errors that occur when reading directories. + +For example, you can skip `ENOENT` errors if required: + +```ts +// Skip all ENOENT errors +const filter: ErrorFilterFunction = (error) => error.code == 'ENOENT'; +``` + +### stats + +* Type: `boolean` +* Default: `false` + +Adds an instance of `fs.Stats` class to the [`Entry`](#entry). + +> :book: Always use `fs.readdir` with additional `fs.lstat/fs.stat` calls to determine the entry type. + +### followSymbolicLinks + +* Type: `boolean` +* Default: `false` + +Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +### `pathSegmentSeparator` + +* Type: `string` +* Default: `path.sep` + +By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. + +### `fs` + +* Type: `FileSystemAdapter` +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat: typeof fs.lstat; + stat: typeof fs.stat; + lstatSync: typeof fs.lstatSync; + statSync: typeof fs.statSync; + readdir: typeof fs.readdir; + readdirSync: typeof fs.readdirSync; +} + +const settings = new fsWalk.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.walk/out/index.d.ts b/node_modules/@nodelib/fs.walk/out/index.d.ts new file mode 100644 index 00000000..8864c7bf --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/index.d.ts @@ -0,0 +1,14 @@ +/// +import type { Readable } from 'stream'; +import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; +import { AsyncCallback } from './providers/async'; +import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; +import type { Entry } from './types'; +declare function walk(directory: string, callback: AsyncCallback): void; +declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace walk { + function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; +declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; +export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; diff --git a/node_modules/@nodelib/fs.walk/out/index.js b/node_modules/@nodelib/fs.walk/out/index.js new file mode 100644 index 00000000..15207874 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/index.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; +const async_1 = require("./providers/async"); +const stream_1 = require("./providers/stream"); +const sync_1 = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function walk(directory, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + return; + } + new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); +} +exports.walk = walk; +function walkSync(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new sync_1.default(directory, settings); + return provider.read(); +} +exports.walkSync = walkSync; +function walkStream(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new stream_1.default(directory, settings); + return provider.read(); +} +exports.walkStream = walkStream; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts new file mode 100644 index 00000000..0f6717d7 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts @@ -0,0 +1,12 @@ +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void; +export default class AsyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + private readonly _storage; + constructor(_root: string, _settings: Settings); + read(callback: AsyncCallback): void; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.js b/node_modules/@nodelib/fs.walk/out/providers/async.js new file mode 100644 index 00000000..51d3be51 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/async.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const async_1 = require("../readers/async"); +class AsyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._storage = []; + } + read(callback) { + this._reader.onError((error) => { + callFailureCallback(callback, error); + }); + this._reader.onEntry((entry) => { + this._storage.push(entry); + }); + this._reader.onEnd(() => { + callSuccessCallback(callback, this._storage); + }); + this._reader.read(); + } +} +exports.default = AsyncProvider; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, entries) { + callback(null, entries); +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts new file mode 100644 index 00000000..874f60c5 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts @@ -0,0 +1,4 @@ +import AsyncProvider from './async'; +import StreamProvider from './stream'; +import SyncProvider from './sync'; +export { AsyncProvider, StreamProvider, SyncProvider }; diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.js b/node_modules/@nodelib/fs.walk/out/providers/index.js new file mode 100644 index 00000000..4c2529ce --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/index.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; +const async_1 = require("./async"); +exports.AsyncProvider = async_1.default; +const stream_1 = require("./stream"); +exports.StreamProvider = stream_1.default; +const sync_1 = require("./sync"); +exports.SyncProvider = sync_1.default; diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts new file mode 100644 index 00000000..294185f8 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from 'stream'; +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +export default class StreamProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + protected readonly _stream: Readable; + constructor(_root: string, _settings: Settings); + read(): Readable; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.js b/node_modules/@nodelib/fs.walk/out/providers/stream.js new file mode 100644 index 00000000..51298b0f --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/stream.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const async_1 = require("../readers/async"); +class StreamProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._stream = new stream_1.Readable({ + objectMode: true, + read: () => { }, + destroy: () => { + if (!this._reader.isDestroyed) { + this._reader.destroy(); + } + } + }); + } + read() { + this._reader.onError((error) => { + this._stream.emit('error', error); + }); + this._reader.onEntry((entry) => { + this._stream.push(entry); + }); + this._reader.onEnd(() => { + this._stream.push(null); + }); + this._reader.read(); + return this._stream; + } +} +exports.default = StreamProvider; diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts new file mode 100644 index 00000000..551c42e4 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts @@ -0,0 +1,10 @@ +import SyncReader from '../readers/sync'; +import type Settings from '../settings'; +import type { Entry } from '../types'; +export default class SyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: SyncReader; + constructor(_root: string, _settings: Settings); + read(): Entry[]; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.js b/node_modules/@nodelib/fs.walk/out/providers/sync.js new file mode 100644 index 00000000..faab6ca2 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/sync.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const sync_1 = require("../readers/sync"); +class SyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new sync_1.default(this._root, this._settings); + } + read() { + return this._reader.read(); + } +} +exports.default = SyncProvider; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts new file mode 100644 index 00000000..9acf4e6c --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts @@ -0,0 +1,30 @@ +/// +import { EventEmitter } from 'events'; +import * as fsScandir from '@nodelib/fs.scandir'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +import Reader from './reader'; +declare type EntryEventCallback = (entry: Entry) => void; +declare type ErrorEventCallback = (error: Errno) => void; +declare type EndEventCallback = () => void; +export default class AsyncReader extends Reader { + protected readonly _settings: Settings; + protected readonly _scandir: typeof fsScandir.scandir; + protected readonly _emitter: EventEmitter; + private readonly _queue; + private _isFatalError; + private _isDestroyed; + constructor(_root: string, _settings: Settings); + read(): EventEmitter; + get isDestroyed(): boolean; + destroy(): void; + onEntry(callback: EntryEventCallback): void; + onError(callback: ErrorEventCallback): void; + onEnd(callback: EndEventCallback): void; + private _pushToQueue; + private _worker; + private _handleError; + private _handleEntry; + private _emitEntry; +} +export {}; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.js b/node_modules/@nodelib/fs.walk/out/readers/async.js new file mode 100644 index 00000000..ebe8dd57 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/async.js @@ -0,0 +1,97 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const events_1 = require("events"); +const fsScandir = require("@nodelib/fs.scandir"); +const fastq = require("fastq"); +const common = require("./common"); +const reader_1 = require("./reader"); +class AsyncReader extends reader_1.default { + constructor(_root, _settings) { + super(_root, _settings); + this._settings = _settings; + this._scandir = fsScandir.scandir; + this._emitter = new events_1.EventEmitter(); + this._queue = fastq(this._worker.bind(this), this._settings.concurrency); + this._isFatalError = false; + this._isDestroyed = false; + this._queue.drain = () => { + if (!this._isFatalError) { + this._emitter.emit('end'); + } + }; + } + read() { + this._isFatalError = false; + this._isDestroyed = false; + setImmediate(() => { + this._pushToQueue(this._root, this._settings.basePath); + }); + return this._emitter; + } + get isDestroyed() { + return this._isDestroyed; + } + destroy() { + if (this._isDestroyed) { + throw new Error('The reader is already destroyed'); + } + this._isDestroyed = true; + this._queue.killAndDrain(); + } + onEntry(callback) { + this._emitter.on('entry', callback); + } + onError(callback) { + this._emitter.once('error', callback); + } + onEnd(callback) { + this._emitter.once('end', callback); + } + _pushToQueue(directory, base) { + const queueItem = { directory, base }; + this._queue.push(queueItem, (error) => { + if (error !== null) { + this._handleError(error); + } + }); + } + _worker(item, done) { + this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { + if (error !== null) { + done(error, undefined); + return; + } + for (const entry of entries) { + this._handleEntry(entry, item.base); + } + done(null, undefined); + }); + } + _handleError(error) { + if (this._isDestroyed || !common.isFatalError(this._settings, error)) { + return; + } + this._isFatalError = true; + this._isDestroyed = true; + this._emitter.emit('error', error); + } + _handleEntry(entry, base) { + if (this._isDestroyed || this._isFatalError) { + return; + } + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._emitEntry(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); + } + } + _emitEntry(entry) { + this._emitter.emit('entry', entry); + } +} +exports.default = AsyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts new file mode 100644 index 00000000..5985f97c --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts @@ -0,0 +1,7 @@ +import type { FilterFunction } from '../settings'; +import type Settings from '../settings'; +import type { Errno } from '../types'; +export declare function isFatalError(settings: Settings, error: Errno): boolean; +export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; +export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.js b/node_modules/@nodelib/fs.walk/out/readers/common.js new file mode 100644 index 00000000..a93572f4 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/common.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; +function isFatalError(settings, error) { + if (settings.errorFilter === null) { + return true; + } + return !settings.errorFilter(error); +} +exports.isFatalError = isFatalError; +function isAppliedFilter(filter, value) { + return filter === null || filter(value); +} +exports.isAppliedFilter = isAppliedFilter; +function replacePathSegmentSeparator(filepath, separator) { + return filepath.split(/[/\\]/).join(separator); +} +exports.replacePathSegmentSeparator = replacePathSegmentSeparator; +function joinPathSegments(a, b, separator) { + if (a === '') { + return b; + } + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts new file mode 100644 index 00000000..e1f383b2 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts @@ -0,0 +1,6 @@ +import type Settings from '../settings'; +export default class Reader { + protected readonly _root: string; + protected readonly _settings: Settings; + constructor(_root: string, _settings: Settings); +} diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.js b/node_modules/@nodelib/fs.walk/out/readers/reader.js new file mode 100644 index 00000000..782f07cb --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/reader.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const common = require("./common"); +class Reader { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); + } +} +exports.default = Reader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts new file mode 100644 index 00000000..af410335 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts @@ -0,0 +1,15 @@ +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry } from '../types'; +import Reader from './reader'; +export default class SyncReader extends Reader { + protected readonly _scandir: typeof fsScandir.scandirSync; + private readonly _storage; + private readonly _queue; + read(): Entry[]; + private _pushToQueue; + private _handleQueue; + private _handleDirectory; + private _handleError; + private _handleEntry; + private _pushToStorage; +} diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.js b/node_modules/@nodelib/fs.walk/out/readers/sync.js new file mode 100644 index 00000000..9a8d5a6f --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/sync.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsScandir = require("@nodelib/fs.scandir"); +const common = require("./common"); +const reader_1 = require("./reader"); +class SyncReader extends reader_1.default { + constructor() { + super(...arguments); + this._scandir = fsScandir.scandirSync; + this._storage = []; + this._queue = new Set(); + } + read() { + this._pushToQueue(this._root, this._settings.basePath); + this._handleQueue(); + return this._storage; + } + _pushToQueue(directory, base) { + this._queue.add({ directory, base }); + } + _handleQueue() { + for (const item of this._queue.values()) { + this._handleDirectory(item.directory, item.base); + } + } + _handleDirectory(directory, base) { + try { + const entries = this._scandir(directory, this._settings.fsScandirSettings); + for (const entry of entries) { + this._handleEntry(entry, base); + } + } + catch (error) { + this._handleError(error); + } + } + _handleError(error) { + if (!common.isFatalError(this._settings, error)) { + return; + } + throw error; + } + _handleEntry(entry, base) { + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._pushToStorage(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); + } + } + _pushToStorage(entry) { + this._storage.push(entry); + } +} +exports.default = SyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/settings.d.ts b/node_modules/@nodelib/fs.walk/out/settings.d.ts new file mode 100644 index 00000000..d1c4b45f --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/settings.d.ts @@ -0,0 +1,30 @@ +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry, Errno } from './types'; +export declare type FilterFunction = (value: T) => boolean; +export declare type DeepFilterFunction = FilterFunction; +export declare type EntryFilterFunction = FilterFunction; +export declare type ErrorFilterFunction = FilterFunction; +export interface Options { + basePath?: string; + concurrency?: number; + deepFilter?: DeepFilterFunction; + entryFilter?: EntryFilterFunction; + errorFilter?: ErrorFilterFunction; + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly basePath?: string; + readonly concurrency: number; + readonly deepFilter: DeepFilterFunction | null; + readonly entryFilter: EntryFilterFunction | null; + readonly errorFilter: ErrorFilterFunction | null; + readonly pathSegmentSeparator: string; + readonly fsScandirSettings: fsScandir.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.walk/out/settings.js b/node_modules/@nodelib/fs.walk/out/settings.js new file mode 100644 index 00000000..d7a85c81 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/settings.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsScandir = require("@nodelib/fs.scandir"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.basePath = this._getValue(this._options.basePath, undefined); + this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); + this.deepFilter = this._getValue(this._options.deepFilter, null); + this.entryFilter = this._getValue(this._options.entryFilter, null); + this.errorFilter = this._getValue(this._options.errorFilter, null); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.fsScandirSettings = new fsScandir.Settings({ + followSymbolicLinks: this._options.followSymbolicLinks, + fs: this._options.fs, + pathSegmentSeparator: this._options.pathSegmentSeparator, + stats: this._options.stats, + throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.walk/out/types/index.d.ts b/node_modules/@nodelib/fs.walk/out/types/index.d.ts new file mode 100644 index 00000000..6ee9bd3f --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/types/index.d.ts @@ -0,0 +1,8 @@ +/// +import type * as scandir from '@nodelib/fs.scandir'; +export declare type Entry = scandir.Entry; +export declare type Errno = NodeJS.ErrnoException; +export interface QueueItem { + directory: string; + base?: string; +} diff --git a/node_modules/@nodelib/fs.walk/out/types/index.js b/node_modules/@nodelib/fs.walk/out/types/index.js new file mode 100644 index 00000000..c8ad2e54 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.walk/package.json b/node_modules/@nodelib/fs.walk/package.json new file mode 100644 index 00000000..86bfce48 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/package.json @@ -0,0 +1,44 @@ +{ + "name": "@nodelib/fs.walk", + "version": "1.2.8", + "description": "A library for efficiently walking a directory recursively", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "walk", + "scanner", + "crawler" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*", + "!out/**/tests/**" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "1e5bad48565da2b06b8600e744324ea240bf49d8" +} diff --git a/node_modules/@sindresorhus/is/dist/example.d.ts b/node_modules/@sindresorhus/is/dist/example.d.ts new file mode 100644 index 00000000..e69de29b diff --git a/node_modules/@sindresorhus/is/dist/example.js b/node_modules/@sindresorhus/is/dist/example.js new file mode 100644 index 00000000..4d575fc1 --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/example.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=example.js.map \ No newline at end of file diff --git a/node_modules/@sindresorhus/is/dist/example.js.map b/node_modules/@sindresorhus/is/dist/example.js.map new file mode 100644 index 00000000..1677c0dd --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/example.js.map @@ -0,0 +1 @@ +{"version":3,"file":"example.js","sourceRoot":"","sources":["../example.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@sindresorhus/is/dist/index.d.ts b/node_modules/@sindresorhus/is/dist/index.d.ts new file mode 100644 index 00000000..fc54f3aa --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/index.d.ts @@ -0,0 +1,95 @@ +/// +export declare const enum TypeName { + null = "null", + boolean = "boolean", + undefined = "undefined", + string = "string", + number = "number", + symbol = "symbol", + Function = "Function", + Array = "Array", + Buffer = "Buffer", + Object = "Object", + RegExp = "RegExp", + Date = "Date", + Error = "Error", + Map = "Map", + Set = "Set", + WeakMap = "WeakMap", + WeakSet = "WeakSet", + Int8Array = "Int8Array", + Uint8Array = "Uint8Array", + Uint8ClampedArray = "Uint8ClampedArray", + Int16Array = "Int16Array", + Uint16Array = "Uint16Array", + Int32Array = "Int32Array", + Uint32Array = "Uint32Array", + Float32Array = "Float32Array", + Float64Array = "Float64Array", + ArrayBuffer = "ArrayBuffer", + SharedArrayBuffer = "SharedArrayBuffer", + DataView = "DataView", + Promise = "Promise", +} +declare function is(value: any): TypeName; +declare namespace is { + const undefined: (value: any) => boolean; + const string: (value: any) => boolean; + const number: (value: any) => boolean; + const function_: (value: any) => boolean; + const null_: (value: any) => boolean; + const class_: (value: any) => any; + const boolean: (value: any) => boolean; + const symbol: (value: any) => boolean; + const array: (arg: any) => arg is any[]; + const buffer: (obj: any) => obj is Buffer; + const nullOrUndefined: (value: any) => boolean; + const object: (value: any) => boolean; + const iterable: (value: any) => boolean; + const generator: (value: any) => boolean; + const nativePromise: (value: any) => boolean; + const promise: (value: any) => boolean; + const generatorFunction: (value: any) => boolean; + const asyncFunction: (value: any) => boolean; + const boundFunction: (value: any) => boolean; + const regExp: (value: any) => boolean; + const date: (value: any) => boolean; + const error: (value: any) => boolean; + const map: (value: any) => boolean; + const set: (value: any) => boolean; + const weakMap: (value: any) => boolean; + const weakSet: (value: any) => boolean; + const int8Array: (value: any) => boolean; + const uint8Array: (value: any) => boolean; + const uint8ClampedArray: (value: any) => boolean; + const int16Array: (value: any) => boolean; + const uint16Array: (value: any) => boolean; + const int32Array: (value: any) => boolean; + const uint32Array: (value: any) => boolean; + const float32Array: (value: any) => boolean; + const float64Array: (value: any) => boolean; + const arrayBuffer: (value: any) => boolean; + const sharedArrayBuffer: (value: any) => boolean; + const dataView: (value: any) => boolean; + const directInstanceOf: (instance: any, klass: any) => boolean; + const truthy: (value: any) => boolean; + const falsy: (value: any) => boolean; + const nan: (value: any) => boolean; + const primitive: (value: any) => boolean; + const integer: (value: any) => boolean; + const safeInteger: (value: any) => boolean; + const plainObject: (value: any) => boolean; + const typedArray: (value: any) => boolean; + const arrayLike: (value: any) => boolean; + const inRange: (value: number, range: number | number[]) => boolean; + const domElement: (value: any) => boolean; + const nodeStream: (value: any) => boolean; + const infinite: (value: any) => boolean; + const even: (rem: number) => boolean; + const odd: (rem: number) => boolean; + const empty: (value: any) => boolean; + const emptyOrWhitespace: (value: any) => boolean; + function any(...predicate: any[]): any; + function all(...predicate: any[]): any; +} +export default is; diff --git a/node_modules/@sindresorhus/is/dist/index.js b/node_modules/@sindresorhus/is/dist/index.js new file mode 100644 index 00000000..d613b67a --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/index.js @@ -0,0 +1,215 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const util = require("util"); +const toString = Object.prototype.toString; +const isOfType = (type) => (value) => typeof value === type; // tslint:disable-line:strict-type-predicates +const getObjectType = (value) => { + const objectName = toString.call(value).slice(8, -1); + if (objectName) { + return objectName; + } + return null; +}; +const isObjectOfType = (typeName) => (value) => { + return getObjectType(value) === typeName; +}; +function is(value) { + if (value === null) { + return "null" /* null */; + } + if (value === true || value === false) { + return "boolean" /* boolean */; + } + const type = typeof value; + if (type === 'undefined') { + return "undefined" /* undefined */; + } + if (type === 'string') { + return "string" /* string */; + } + if (type === 'number') { + return "number" /* number */; + } + if (type === 'symbol') { + return "symbol" /* symbol */; + } + if (is.function_(value)) { + return "Function" /* Function */; + } + if (Array.isArray(value)) { + return "Array" /* Array */; + } + if (Buffer.isBuffer(value)) { + return "Buffer" /* Buffer */; + } + const tagType = getObjectType(value); + if (tagType) { + return tagType; + } + if (value instanceof String || value instanceof Boolean || value instanceof Number) { + throw new TypeError('Please don\'t use object wrappers for primitive types'); + } + return "Object" /* Object */; +} +(function (is) { + const isObject = (value) => typeof value === 'object'; + // tslint:disable:variable-name + is.undefined = isOfType('undefined'); + is.string = isOfType('string'); + is.number = isOfType('number'); + is.function_ = isOfType('function'); + is.null_ = (value) => value === null; + is.class_ = (value) => is.function_(value) && value.toString().startsWith('class '); + is.boolean = (value) => value === true || value === false; + // tslint:enable:variable-name + is.symbol = isOfType('symbol'); + is.array = Array.isArray; + is.buffer = Buffer.isBuffer; + is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value); + is.object = (value) => !is.nullOrUndefined(value) && (is.function_(value) || isObject(value)); + is.iterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.iterator]); + is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw); + is.nativePromise = isObjectOfType("Promise" /* Promise */); + const hasPromiseAPI = (value) => !is.null_(value) && + isObject(value) && + is.function_(value.then) && + is.function_(value.catch); + is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value); + // TODO: Change to use `isObjectOfType` once Node.js 6 or higher is targeted + const isFunctionOfType = (type) => (value) => is.function_(value) && is.function_(value.constructor) && value.constructor.name === type; + is.generatorFunction = isFunctionOfType('GeneratorFunction'); + is.asyncFunction = isFunctionOfType('AsyncFunction'); + is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype'); + is.regExp = isObjectOfType("RegExp" /* RegExp */); + is.date = isObjectOfType("Date" /* Date */); + is.error = isObjectOfType("Error" /* Error */); + is.map = isObjectOfType("Map" /* Map */); + is.set = isObjectOfType("Set" /* Set */); + is.weakMap = isObjectOfType("WeakMap" /* WeakMap */); + is.weakSet = isObjectOfType("WeakSet" /* WeakSet */); + is.int8Array = isObjectOfType("Int8Array" /* Int8Array */); + is.uint8Array = isObjectOfType("Uint8Array" /* Uint8Array */); + is.uint8ClampedArray = isObjectOfType("Uint8ClampedArray" /* Uint8ClampedArray */); + is.int16Array = isObjectOfType("Int16Array" /* Int16Array */); + is.uint16Array = isObjectOfType("Uint16Array" /* Uint16Array */); + is.int32Array = isObjectOfType("Int32Array" /* Int32Array */); + is.uint32Array = isObjectOfType("Uint32Array" /* Uint32Array */); + is.float32Array = isObjectOfType("Float32Array" /* Float32Array */); + is.float64Array = isObjectOfType("Float64Array" /* Float64Array */); + is.arrayBuffer = isObjectOfType("ArrayBuffer" /* ArrayBuffer */); + is.sharedArrayBuffer = isObjectOfType("SharedArrayBuffer" /* SharedArrayBuffer */); + is.dataView = isObjectOfType("DataView" /* DataView */); + // TODO: Remove `object` checks when targeting ES2015 or higher + // See `Notes`: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/getPrototypeOf + is.directInstanceOf = (instance, klass) => is.object(instance) && is.object(klass) && Object.getPrototypeOf(instance) === klass.prototype; + is.truthy = (value) => Boolean(value); + is.falsy = (value) => !value; + is.nan = (value) => Number.isNaN(value); + const primitiveTypes = new Set([ + 'undefined', + 'string', + 'number', + 'boolean', + 'symbol' + ]); + is.primitive = (value) => is.null_(value) || primitiveTypes.has(typeof value); + is.integer = (value) => Number.isInteger(value); + is.safeInteger = (value) => Number.isSafeInteger(value); + is.plainObject = (value) => { + // From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js + let prototype; + return getObjectType(value) === "Object" /* Object */ && + (prototype = Object.getPrototypeOf(value), prototype === null || // tslint:disable-line:ban-comma-operator + prototype === Object.getPrototypeOf({})); + }; + const typedArrayTypes = new Set([ + "Int8Array" /* Int8Array */, + "Uint8Array" /* Uint8Array */, + "Uint8ClampedArray" /* Uint8ClampedArray */, + "Int16Array" /* Int16Array */, + "Uint16Array" /* Uint16Array */, + "Int32Array" /* Int32Array */, + "Uint32Array" /* Uint32Array */, + "Float32Array" /* Float32Array */, + "Float64Array" /* Float64Array */ + ]); + is.typedArray = (value) => { + const objectType = getObjectType(value); + if (objectType === null) { + return false; + } + return typedArrayTypes.has(objectType); + }; + const isValidLength = (value) => is.safeInteger(value) && value > -1; + is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length); + is.inRange = (value, range) => { + if (is.number(range)) { + return value >= Math.min(0, range) && value <= Math.max(range, 0); + } + if (is.array(range) && range.length === 2) { + // TODO: Use spread operator here when targeting Node.js 6 or higher + return value >= Math.min.apply(null, range) && value <= Math.max.apply(null, range); + } + throw new TypeError(`Invalid range: ${util.inspect(range)}`); + }; + const NODE_TYPE_ELEMENT = 1; + const DOM_PROPERTIES_TO_CHECK = [ + 'innerHTML', + 'ownerDocument', + 'style', + 'attributes', + 'nodeValue' + ]; + is.domElement = (value) => is.object(value) && value.nodeType === NODE_TYPE_ELEMENT && is.string(value.nodeName) && + !is.plainObject(value) && DOM_PROPERTIES_TO_CHECK.every(property => property in value); + is.nodeStream = (value) => !is.nullOrUndefined(value) && isObject(value) && is.function_(value.pipe); + is.infinite = (value) => value === Infinity || value === -Infinity; + const isAbsoluteMod2 = (value) => (rem) => is.integer(rem) && Math.abs(rem % 2) === value; + is.even = isAbsoluteMod2(0); + is.odd = isAbsoluteMod2(1); + const isWhiteSpaceString = (value) => is.string(value) && /\S/.test(value) === false; + const isEmptyStringOrArray = (value) => (is.string(value) || is.array(value)) && value.length === 0; + const isEmptyObject = (value) => !is.map(value) && !is.set(value) && is.object(value) && Object.keys(value).length === 0; + const isEmptyMapOrSet = (value) => (is.map(value) || is.set(value)) && value.size === 0; + is.empty = (value) => is.falsy(value) || isEmptyStringOrArray(value) || isEmptyObject(value) || isEmptyMapOrSet(value); + is.emptyOrWhitespace = (value) => is.empty(value) || isWhiteSpaceString(value); + const predicateOnArray = (method, predicate, args) => { + // `args` is the calling function's "arguments object". + // We have to do it this way to keep node v4 support. + // So here we convert it to an array and slice off the first item. + const values = Array.prototype.slice.call(args, 1); + if (is.function_(predicate) === false) { + throw new TypeError(`Invalid predicate: ${util.inspect(predicate)}`); + } + if (values.length === 0) { + throw new TypeError('Invalid number of values'); + } + return method.call(values, predicate); + }; + function any(predicate) { + return predicateOnArray(Array.prototype.some, predicate, arguments); + } + is.any = any; + function all(predicate) { + return predicateOnArray(Array.prototype.every, predicate, arguments); + } + is.all = all; + // tslint:enable:only-arrow-functions no-function-expression +})(is || (is = {})); +// Some few keywords are reserved, but we'll populate them for Node.js users +// See https://github.com/Microsoft/TypeScript/issues/2536 +Object.defineProperties(is, { + class: { + value: is.class_ + }, + function: { + value: is.function_ + }, + null: { + value: is.null_ + } +}); +exports.default = is; +// For CommonJS default export support +module.exports = is; +module.exports.default = is; diff --git a/node_modules/@sindresorhus/is/dist/index.js.map b/node_modules/@sindresorhus/is/dist/index.js.map new file mode 100644 index 00000000..7461e68c --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../source/index.ts"],"names":[],"mappings":";;AAAA,6BAA6B;AAE7B,MAAM,QAAQ,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC;AAC3C,MAAM,aAAa,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAW,CAAC;AAClF,MAAM,QAAQ,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,KAAU,EAAE,EAAE,CAAC,OAAO,KAAK,KAAK,IAAI,CAAC;AACzE,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,KAAU,EAAE,EAAE,CAAC,aAAa,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC;AAEvF,YAAY,KAAU;IACrB,EAAE,CAAC,CAAC,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC;QACpB,MAAM,CAAC,MAAM,CAAC;IACf,CAAC;IAED,EAAE,CAAC,CAAC,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC;QACvC,MAAM,CAAC,SAAS,CAAC;IAClB,CAAC;IAED,MAAM,IAAI,GAAG,OAAO,KAAK,CAAC;IAE1B,EAAE,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC;QAC1B,MAAM,CAAC,WAAW,CAAC;IACpB,CAAC;IAED,EAAE,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;QACvB,MAAM,CAAC,QAAQ,CAAC;IACjB,CAAC;IAED,EAAE,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;QACvB,MAAM,CAAC,QAAQ,CAAC;IACjB,CAAC;IAED,EAAE,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;QACvB,MAAM,CAAC,QAAQ,CAAC;IACjB,CAAC;IAED,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACzB,MAAM,CAAC,UAAU,CAAC;IACnB,CAAC;IAED,EAAE,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAC1B,MAAM,CAAC,OAAO,CAAC;IAChB,CAAC;IAED,EAAE,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAC5B,MAAM,CAAC,QAAQ,CAAC;IACjB,CAAC;IAED,MAAM,OAAO,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC;IACrC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;QACb,MAAM,CAAC,OAAO,CAAC;IAChB,CAAC;IAED,EAAE,CAAC,CAAC,KAAK,YAAY,MAAM,IAAI,KAAK,YAAY,OAAO,IAAI,KAAK,YAAY,MAAM,CAAC,CAAC,CAAC;QACpF,MAAM,IAAI,SAAS,CAAC,uDAAuD,CAAC,CAAC;IAC9E,CAAC;IAED,MAAM,CAAC,QAAQ,CAAC;AACjB,CAAC;AAED,WAAU,EAAE;IACX,MAAM,QAAQ,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,OAAO,KAAK,KAAK,QAAQ,CAAC;IAG9C,YAAS,GAAG,QAAQ,CAAC,WAAW,CAAC,CAAC;IAClC,SAAM,GAAG,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAC5B,SAAM,GAAG,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAC5B,YAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;IACjC,QAAK,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,KAAK,KAAK,IAAI,CAAC;IAEvC,SAAM,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,QAAQ,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IACnF,UAAO,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,CAAC;IAG5D,SAAM,GAAG,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAE5B,QAAK,GAAG,KAAK,CAAC,OAAO,CAAC;IACtB,SAAM,GAAG,MAAM,CAAC,QAAQ,CAAC;IAEzB,kBAAe,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,CAAC;IACnE,SAAM,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;IAC1F,WAAQ,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,eAAe,CAAC,KAAK,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxF,YAAS,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,QAAQ,CAAC,KAAK,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAE/F,gBAAa,GAAG,cAAc,CAAC,SAAS,CAAC,CAAC;IAEvD,MAAM,aAAa,GAAG,CAAC,KAAU,EAAE,EAAE,CACpC,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC;QACb,QAAQ,CAAC,KAAK,CAAC;QACf,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC;QACrB,GAAA,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAEX,UAAO,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,aAAa,CAAC,KAAK,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,CAAC;IAGpF,MAAM,gBAAgB,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,KAAK,CAAC,WAAW,CAAC,IAAI,KAAK,IAAI,CAAC;IAElI,oBAAiB,GAAG,gBAAgB,CAAC,mBAAmB,CAAC,CAAC;IAC1D,gBAAa,GAAG,gBAAgB,CAAC,eAAe,CAAC,CAAC;IAElD,SAAM,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAC;IAClC,OAAI,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC;IAC9B,QAAK,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC;IAChC,MAAG,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;IAC5B,MAAG,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;IAC5B,UAAO,GAAG,cAAc,CAAC,SAAS,CAAC,CAAC;IACpC,UAAO,GAAG,cAAc,CAAC,SAAS,CAAC,CAAC;IAEpC,YAAS,GAAG,cAAc,CAAC,WAAW,CAAC,CAAC;IACxC,aAAU,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAC1C,oBAAiB,GAAG,cAAc,CAAC,mBAAmB,CAAC,CAAC;IACxD,aAAU,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAC1C,cAAW,GAAG,cAAc,CAAC,aAAa,CAAC,CAAC;IAC5C,aAAU,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAC1C,cAAW,GAAG,cAAc,CAAC,aAAa,CAAC,CAAC;IAC5C,eAAY,GAAG,cAAc,CAAC,cAAc,CAAC,CAAC;IAC9C,eAAY,GAAG,cAAc,CAAC,cAAc,CAAC,CAAC;IAE9C,cAAW,GAAG,cAAc,CAAC,aAAa,CAAC,CAAC;IAC5C,oBAAiB,GAAG,cAAc,CAAC,mBAAmB,CAAC,CAAC;IAExD,SAAM,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACxC,QAAK,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC;IAE/B,MAAG,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAEvD,MAAM,cAAc,GAAG,IAAI,GAAG,CAAC;QAC9B,WAAW;QACX,QAAQ;QACR,QAAQ;QACR,SAAS;QACT,QAAQ;KACR,CAAC,CAAC;IAEU,YAAS,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,cAAc,CAAC,GAAG,CAAC,OAAO,KAAK,CAAC,CAAC;IAE7E,UAAO,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;IAClD,cAAW,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC;IAE1D,cAAW,GAAG,CAAC,KAAU,EAAE,EAAE;QAEzC,IAAI,SAAS,CAAC;QAEd,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,KAAK,QAAQ;YACvC,CAAC,SAAS,GAAG,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAE,SAAS,KAAK,IAAI;gBAC5D,SAAS,KAAK,MAAM,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,CAAC;IAC5C,CAAC,CAAC;IAEF,MAAM,eAAe,GAAG,IAAI,GAAG,CAAC;QAC/B,WAAW;QACX,YAAY;QACZ,mBAAmB;QACnB,YAAY;QACZ,aAAa;QACb,YAAY;QACZ,aAAa;QACb,cAAc;QACd,cAAc;KACd,CAAC,CAAC;IACU,aAAU,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,eAAe,CAAC,GAAG,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC;IAEpF,MAAM,aAAa,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,WAAW,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC,CAAC,CAAC;IAC1D,YAAS,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;IAExG,UAAO,GAAG,CAAC,KAAa,EAAE,KAAwB,EAAE,EAAE;QAClE,EAAE,CAAC,CAAC,GAAA,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,MAAM,CAAC,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,KAAe,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,KAAe,EAAE,CAAC,CAAC,CAAC;QACvF,CAAC;QAED,EAAE,CAAC,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC;YAExC,MAAM,CAAC,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QACrF,CAAC;QAED,MAAM,IAAI,SAAS,CAAC,kBAAkB,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC9D,CAAC,CAAC;IAEF,MAAM,iBAAiB,GAAG,CAAC,CAAC;IAC5B,MAAM,uBAAuB,GAAG;QAC/B,WAAW;QACX,eAAe;QACf,OAAO;QACP,YAAY;QACZ,WAAW;KACX,CAAC;IAEW,aAAU,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,MAAM,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,QAAQ,KAAK,iBAAiB,IAAI,GAAA,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC;QACxH,CAAC,GAAA,WAAW,CAAC,KAAK,CAAC,IAAI,uBAAuB,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,IAAI,KAAK,CAAC,CAAC;IAExE,WAAQ,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,CAAC,QAAQ,CAAC;IAElF,MAAM,cAAc,GAAG,CAAC,KAAa,EAAE,EAAE,CAAC,CAAC,GAAW,EAAE,EAAE,CAAC,GAAA,OAAO,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,KAAK,CAAC;IAC1F,OAAI,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC;IACzB,MAAG,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC;IAErC,MAAM,kBAAkB,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,MAAM,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,KAAK,CAAC;IACvF,MAAM,oBAAoB,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,MAAM,CAAC,KAAK,CAAC,IAAI,GAAA,KAAK,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,CAAC;IACnG,MAAM,aAAa,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,GAAA,GAAG,CAAC,KAAK,CAAC,IAAI,GAAA,MAAM,CAAC,KAAK,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC;IACrH,MAAM,eAAe,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,GAAG,CAAC,KAAK,CAAC,IAAI,GAAA,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,CAAC,CAAC;IAE1E,QAAK,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,oBAAoB,CAAC,KAAK,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,IAAI,eAAe,CAAC,KAAK,CAAC,CAAC;IACtH,oBAAiB,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,CAAC;IAG3F,MAAM,gBAAgB,GAAG,CAAC,MAAmB,EAAE,SAAc,EAAE,IAAgB,EAAE,EAAE;QAIlF,MAAM,MAAM,GAAG,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;QAEnD,EAAE,CAAC,CAAC,GAAA,SAAS,CAAC,SAAS,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC;YACpC,MAAM,IAAI,SAAS,CAAC,sBAAsB,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QACtE,CAAC;QAED,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC;YACzB,MAAM,IAAI,SAAS,CAAC,0BAA0B,CAAC,CAAC;QACjD,CAAC;QAED,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IACvC,CAAC,CAAC;IAMF,aAAoB,SAAc;QACjC,MAAM,CAAC,gBAAgB,CAAC,KAAK,CAAC,SAAS,CAAC,IAAI,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACrE,CAAC;IAFe,MAAG,MAElB,CAAA;IAGD,aAAoB,SAAc;QACjC,MAAM,CAAC,gBAAgB,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACtE,CAAC;IAFe,MAAG,MAElB,CAAA;AAEF,CAAC,EA9KS,EAAE,KAAF,EAAE,QA8KX;AAID,MAAM,CAAC,gBAAgB,CAAC,EAAE,EAAE;IAC3B,KAAK,EAAE;QACN,KAAK,EAAE,EAAE,CAAC,MAAM;KAChB;IACD,QAAQ,EAAE;QACT,KAAK,EAAE,EAAE,CAAC,SAAS;KACnB;IACD,IAAI,EAAE;QACL,KAAK,EAAE,EAAE,CAAC,KAAK;KACf;CACD,CAAC,CAAC;AAEH,kBAAe,EAAE,CAAC;AAGlB,MAAM,CAAC,OAAO,GAAG,EAAE,CAAC;AACpB,MAAM,CAAC,OAAO,CAAC,OAAO,GAAG,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/@sindresorhus/is/dist/source/index.d.ts b/node_modules/@sindresorhus/is/dist/source/index.d.ts new file mode 100644 index 00000000..f5fe7225 --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/source/index.d.ts @@ -0,0 +1,59 @@ +/// +declare function is(value: any): string; +declare namespace is { + const undefined: (value: any) => boolean; + const string: (value: any) => boolean; + const number: (value: any) => boolean; + const function_: (value: any) => boolean; + const null_: (value: any) => boolean; + const class_: (value: any) => any; + const boolean: (value: any) => boolean; + const symbol: (value: any) => boolean; + const array: (arg: any) => arg is any[]; + const buffer: (obj: any) => obj is Buffer; + const nullOrUndefined: (value: any) => boolean; + const object: (value: any) => boolean; + const iterable: (value: any) => boolean; + const generator: (value: any) => boolean; + const nativePromise: (value: any) => boolean; + const promise: (value: any) => boolean; + const generatorFunction: (value: any) => boolean; + const asyncFunction: (value: any) => boolean; + const regExp: (value: any) => boolean; + const date: (value: any) => boolean; + const error: (value: any) => boolean; + const map: (value: any) => boolean; + const set: (value: any) => boolean; + const weakMap: (value: any) => boolean; + const weakSet: (value: any) => boolean; + const int8Array: (value: any) => boolean; + const uint8Array: (value: any) => boolean; + const uint8ClampedArray: (value: any) => boolean; + const int16Array: (value: any) => boolean; + const uint16Array: (value: any) => boolean; + const int32Array: (value: any) => boolean; + const uint32Array: (value: any) => boolean; + const float32Array: (value: any) => boolean; + const float64Array: (value: any) => boolean; + const arrayBuffer: (value: any) => boolean; + const sharedArrayBuffer: (value: any) => boolean; + const truthy: (value: any) => boolean; + const falsy: (value: any) => boolean; + const nan: (value: any) => boolean; + const primitive: (value: any) => boolean; + const integer: (value: any) => boolean; + const safeInteger: (value: any) => boolean; + const plainObject: (value: any) => boolean; + const typedArray: (value: any) => boolean; + const arrayLike: (value: any) => boolean; + const inRange: (value: number, range: number | number[]) => boolean; + const domElement: (value: any) => boolean; + const infinite: (value: any) => boolean; + const even: (rem: number) => boolean; + const odd: (rem: number) => boolean; + const empty: (value: any) => boolean; + const emptyOrWhitespace: (value: any) => boolean; + function any(...predicate: any[]): any; + function all(...predicate: any[]): any; +} +export default is; diff --git a/node_modules/@sindresorhus/is/dist/source/index.js b/node_modules/@sindresorhus/is/dist/source/index.js new file mode 100644 index 00000000..2ff01c9c --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/source/index.js @@ -0,0 +1,182 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const util = require("util"); +const toString = Object.prototype.toString; +const getObjectType = (value) => toString.call(value).slice(8, -1); +const isOfType = (type) => (value) => typeof value === type; +const isObjectOfType = (type) => (value) => getObjectType(value) === type; +function is(value) { + if (value === null) { + return 'null'; + } + if (value === true || value === false) { + return 'boolean'; + } + const type = typeof value; + if (type === 'undefined') { + return 'undefined'; + } + if (type === 'string') { + return 'string'; + } + if (type === 'number') { + return 'number'; + } + if (type === 'symbol') { + return 'symbol'; + } + if (is.function_(value)) { + return 'Function'; + } + if (Array.isArray(value)) { + return 'Array'; + } + if (Buffer.isBuffer(value)) { + return 'Buffer'; + } + const tagType = getObjectType(value); + if (tagType) { + return tagType; + } + if (value instanceof String || value instanceof Boolean || value instanceof Number) { + throw new TypeError('Please don\'t use object wrappers for primitive types'); + } + return 'Object'; +} +(function (is) { + const isObject = (value) => typeof value === 'object'; + is.undefined = isOfType('undefined'); + is.string = isOfType('string'); + is.number = isOfType('number'); + is.function_ = isOfType('function'); + is.null_ = (value) => value === null; + is.class_ = (value) => is.function_(value) && value.toString().startsWith('class '); + is.boolean = (value) => value === true || value === false; + is.symbol = isOfType('symbol'); + is.array = Array.isArray; + is.buffer = Buffer.isBuffer; + is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value); + is.object = (value) => !is.nullOrUndefined(value) && (is.function_(value) || isObject(value)); + is.iterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.iterator]); + is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw); + is.nativePromise = isObjectOfType('Promise'); + const hasPromiseAPI = (value) => !is.null_(value) && + isObject(value) && + is.function_(value.then) && + is.function_(value.catch); + is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value); + const isFunctionOfType = (type) => (value) => is.function_(value) && is.function_(value.constructor) && value.constructor.name === type; + is.generatorFunction = isFunctionOfType('GeneratorFunction'); + is.asyncFunction = isFunctionOfType('AsyncFunction'); + is.regExp = isObjectOfType('RegExp'); + is.date = isObjectOfType('Date'); + is.error = isObjectOfType('Error'); + is.map = isObjectOfType('Map'); + is.set = isObjectOfType('Set'); + is.weakMap = isObjectOfType('WeakMap'); + is.weakSet = isObjectOfType('WeakSet'); + is.int8Array = isObjectOfType('Int8Array'); + is.uint8Array = isObjectOfType('Uint8Array'); + is.uint8ClampedArray = isObjectOfType('Uint8ClampedArray'); + is.int16Array = isObjectOfType('Int16Array'); + is.uint16Array = isObjectOfType('Uint16Array'); + is.int32Array = isObjectOfType('Int32Array'); + is.uint32Array = isObjectOfType('Uint32Array'); + is.float32Array = isObjectOfType('Float32Array'); + is.float64Array = isObjectOfType('Float64Array'); + is.arrayBuffer = isObjectOfType('ArrayBuffer'); + is.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer'); + is.truthy = (value) => Boolean(value); + is.falsy = (value) => !value; + is.nan = (value) => Number.isNaN(value); + const primitiveTypes = new Set([ + 'undefined', + 'string', + 'number', + 'boolean', + 'symbol' + ]); + is.primitive = (value) => is.null_(value) || primitiveTypes.has(typeof value); + is.integer = (value) => Number.isInteger(value); + is.safeInteger = (value) => Number.isSafeInteger(value); + is.plainObject = (value) => { + let prototype; + return getObjectType(value) === 'Object' && + (prototype = Object.getPrototypeOf(value), prototype === null || + prototype === Object.getPrototypeOf({})); + }; + const typedArrayTypes = new Set([ + 'Int8Array', + 'Uint8Array', + 'Uint8ClampedArray', + 'Int16Array', + 'Uint16Array', + 'Int32Array', + 'Uint32Array', + 'Float32Array', + 'Float64Array' + ]); + is.typedArray = (value) => typedArrayTypes.has(getObjectType(value)); + const isValidLength = (value) => is.safeInteger(value) && value > -1; + is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length); + is.inRange = (value, range) => { + if (is.number(range)) { + return value >= Math.min(0, range) && value <= Math.max(range, 0); + } + if (is.array(range) && range.length === 2) { + return value >= Math.min.apply(null, range) && value <= Math.max.apply(null, range); + } + throw new TypeError(`Invalid range: ${util.inspect(range)}`); + }; + const NODE_TYPE_ELEMENT = 1; + const DOM_PROPERTIES_TO_CHECK = [ + 'innerHTML', + 'ownerDocument', + 'style', + 'attributes', + 'nodeValue' + ]; + is.domElement = (value) => is.object(value) && value.nodeType === NODE_TYPE_ELEMENT && is.string(value.nodeName) && + !is.plainObject(value) && DOM_PROPERTIES_TO_CHECK.every(property => property in value); + is.infinite = (value) => value === Infinity || value === -Infinity; + const isAbsoluteMod2 = (value) => (rem) => is.integer(rem) && Math.abs(rem % 2) === value; + is.even = isAbsoluteMod2(0); + is.odd = isAbsoluteMod2(1); + const isWhiteSpaceString = (value) => is.string(value) && /\S/.test(value) === false; + const isEmptyStringOrArray = (value) => (is.string(value) || is.array(value)) && value.length === 0; + const isEmptyObject = (value) => !is.map(value) && !is.set(value) && is.object(value) && Object.keys(value).length === 0; + const isEmptyMapOrSet = (value) => (is.map(value) || is.set(value)) && value.size === 0; + is.empty = (value) => is.falsy(value) || isEmptyStringOrArray(value) || isEmptyObject(value) || isEmptyMapOrSet(value); + is.emptyOrWhitespace = (value) => is.empty(value) || isWhiteSpaceString(value); + const predicateOnArray = (method, predicate, args) => { + const values = Array.prototype.slice.call(args, 1); + if (is.function_(predicate) === false) { + throw new TypeError(`Invalid predicate: ${util.inspect(predicate)}`); + } + if (values.length === 0) { + throw new TypeError('Invalid number of values'); + } + return method.call(values, predicate); + }; + function any(predicate) { + return predicateOnArray(Array.prototype.some, predicate, arguments); + } + is.any = any; + function all(predicate) { + return predicateOnArray(Array.prototype.every, predicate, arguments); + } + is.all = all; +})(is || (is = {})); +Object.defineProperties(is, { + class: { + value: is.class_ + }, + function: { + value: is.function_ + }, + null: { + value: is.null_ + } +}); +exports.default = is; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@sindresorhus/is/dist/source/index.js.map b/node_modules/@sindresorhus/is/dist/source/index.js.map new file mode 100644 index 00000000..5cb0e0c8 --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/source/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../source/index.ts"],"names":[],"mappings":";;AAAA,6BAA6B;AAE7B,MAAM,QAAQ,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC;AAC3C,MAAM,aAAa,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAW,CAAC;AAClF,MAAM,QAAQ,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,KAAU,EAAE,EAAE,CAAC,OAAO,KAAK,KAAK,IAAI,CAAC;AACzE,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,KAAU,EAAE,EAAE,CAAC,aAAa,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC;AAEvF,YAAY,KAAU;IACrB,EAAE,CAAC,CAAC,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC;QACpB,MAAM,CAAC,MAAM,CAAC;IACf,CAAC;IAED,EAAE,CAAC,CAAC,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC;QACvC,MAAM,CAAC,SAAS,CAAC;IAClB,CAAC;IAED,MAAM,IAAI,GAAG,OAAO,KAAK,CAAC;IAE1B,EAAE,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC;QAC1B,MAAM,CAAC,WAAW,CAAC;IACpB,CAAC;IAED,EAAE,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;QACvB,MAAM,CAAC,QAAQ,CAAC;IACjB,CAAC;IAED,EAAE,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;QACvB,MAAM,CAAC,QAAQ,CAAC;IACjB,CAAC;IAED,EAAE,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;QACvB,MAAM,CAAC,QAAQ,CAAC;IACjB,CAAC;IAED,EAAE,CAAC,CAAC,EAAE,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACzB,MAAM,CAAC,UAAU,CAAC;IACnB,CAAC;IAED,EAAE,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAC1B,MAAM,CAAC,OAAO,CAAC;IAChB,CAAC;IAED,EAAE,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAC5B,MAAM,CAAC,QAAQ,CAAC;IACjB,CAAC;IAED,MAAM,OAAO,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC;IACrC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;QACb,MAAM,CAAC,OAAO,CAAC;IAChB,CAAC;IAED,EAAE,CAAC,CAAC,KAAK,YAAY,MAAM,IAAI,KAAK,YAAY,OAAO,IAAI,KAAK,YAAY,MAAM,CAAC,CAAC,CAAC;QACpF,MAAM,IAAI,SAAS,CAAC,uDAAuD,CAAC,CAAC;IAC9E,CAAC;IAED,MAAM,CAAC,QAAQ,CAAC;AACjB,CAAC;AAED,WAAU,EAAE;IACX,MAAM,QAAQ,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,OAAO,KAAK,KAAK,QAAQ,CAAC;IAG9C,YAAS,GAAG,QAAQ,CAAC,WAAW,CAAC,CAAC;IAClC,SAAM,GAAG,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAC5B,SAAM,GAAG,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAC5B,YAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;IACjC,QAAK,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,KAAK,KAAK,IAAI,CAAC;IAEvC,SAAM,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,QAAQ,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IACnF,UAAO,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,CAAC;IAG5D,SAAM,GAAG,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAE5B,QAAK,GAAG,KAAK,CAAC,OAAO,CAAC;IACtB,SAAM,GAAG,MAAM,CAAC,QAAQ,CAAC;IAEzB,kBAAe,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,CAAC;IACnE,SAAM,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;IAC1F,WAAQ,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,eAAe,CAAC,KAAK,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxF,YAAS,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,QAAQ,CAAC,KAAK,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAE/F,gBAAa,GAAG,cAAc,CAAC,SAAS,CAAC,CAAC;IAEvD,MAAM,aAAa,GAAG,CAAC,KAAU,EAAE,EAAE,CACpC,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC;QACb,QAAQ,CAAC,KAAK,CAAC;QACf,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC;QACrB,GAAA,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAEX,UAAO,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,aAAa,CAAC,KAAK,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,CAAC;IAGpF,MAAM,gBAAgB,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,GAAA,SAAS,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,KAAK,CAAC,WAAW,CAAC,IAAI,KAAK,IAAI,CAAC;IAElI,oBAAiB,GAAG,gBAAgB,CAAC,mBAAmB,CAAC,CAAC;IAC1D,gBAAa,GAAG,gBAAgB,CAAC,eAAe,CAAC,CAAC;IAElD,SAAM,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAC;IAClC,OAAI,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC;IAC9B,QAAK,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC;IAChC,MAAG,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;IAC5B,MAAG,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;IAC5B,UAAO,GAAG,cAAc,CAAC,SAAS,CAAC,CAAC;IACpC,UAAO,GAAG,cAAc,CAAC,SAAS,CAAC,CAAC;IAEpC,YAAS,GAAG,cAAc,CAAC,WAAW,CAAC,CAAC;IACxC,aAAU,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAC1C,oBAAiB,GAAG,cAAc,CAAC,mBAAmB,CAAC,CAAC;IACxD,aAAU,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAC1C,cAAW,GAAG,cAAc,CAAC,aAAa,CAAC,CAAC;IAC5C,aAAU,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;IAC1C,cAAW,GAAG,cAAc,CAAC,aAAa,CAAC,CAAC;IAC5C,eAAY,GAAG,cAAc,CAAC,cAAc,CAAC,CAAC;IAC9C,eAAY,GAAG,cAAc,CAAC,cAAc,CAAC,CAAC;IAE9C,cAAW,GAAG,cAAc,CAAC,aAAa,CAAC,CAAC;IAC5C,oBAAiB,GAAG,cAAc,CAAC,mBAAmB,CAAC,CAAC;IAExD,SAAM,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACxC,QAAK,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC;IAE/B,MAAG,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAEvD,MAAM,cAAc,GAAG,IAAI,GAAG,CAAC;QAC9B,WAAW;QACX,QAAQ;QACR,QAAQ;QACR,SAAS;QACT,QAAQ;KACR,CAAC,CAAC;IAEU,YAAS,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,cAAc,CAAC,GAAG,CAAC,OAAO,KAAK,CAAC,CAAC;IAE7E,UAAO,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;IAClD,cAAW,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC;IAE1D,cAAW,GAAG,CAAC,KAAU,EAAE,EAAE;QAEzC,IAAI,SAAS,CAAC;QAEd,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,KAAK,QAAQ;YACvC,CAAC,SAAS,GAAG,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAE,SAAS,KAAK,IAAI;gBAC5D,SAAS,KAAK,MAAM,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC,CAAC;IAC5C,CAAC,CAAC;IAEF,MAAM,eAAe,GAAG,IAAI,GAAG,CAAC;QAC/B,WAAW;QACX,YAAY;QACZ,mBAAmB;QACnB,YAAY;QACZ,aAAa;QACb,YAAY;QACZ,aAAa;QACb,cAAc;QACd,cAAc;KACd,CAAC,CAAC;IACU,aAAU,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,eAAe,CAAC,GAAG,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC;IAEpF,MAAM,aAAa,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,WAAW,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC,CAAC,CAAC;IAC1D,YAAS,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,eAAe,CAAC,KAAK,CAAC,IAAI,CAAC,GAAA,SAAS,CAAC,KAAK,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;IAExG,UAAO,GAAG,CAAC,KAAa,EAAE,KAAwB,EAAE,EAAE;QAClE,EAAE,CAAC,CAAC,GAAA,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,MAAM,CAAC,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,KAAe,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,KAAe,EAAE,CAAC,CAAC,CAAC;QACvF,CAAC;QAED,EAAE,CAAC,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC;YAExC,MAAM,CAAC,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QACrF,CAAC;QAED,MAAM,IAAI,SAAS,CAAC,kBAAkB,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC9D,CAAC,CAAC;IAEF,MAAM,iBAAiB,GAAG,CAAC,CAAC;IAC5B,MAAM,uBAAuB,GAAG;QAC/B,WAAW;QACX,eAAe;QACf,OAAO;QACP,YAAY;QACZ,WAAW;KACX,CAAC;IAEW,aAAU,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,MAAM,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,QAAQ,KAAK,iBAAiB,IAAI,GAAA,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC;QACxH,CAAC,GAAA,WAAW,CAAC,KAAK,CAAC,IAAI,uBAAuB,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,IAAI,KAAK,CAAC,CAAC;IAExE,WAAQ,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,CAAC,QAAQ,CAAC;IAElF,MAAM,cAAc,GAAG,CAAC,KAAa,EAAE,EAAE,CAAC,CAAC,GAAW,EAAE,EAAE,CAAC,GAAA,OAAO,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,KAAK,CAAC;IAC1F,OAAI,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC;IACzB,MAAG,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC;IAErC,MAAM,kBAAkB,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,MAAM,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,KAAK,CAAC;IACvF,MAAM,oBAAoB,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,MAAM,CAAC,KAAK,CAAC,IAAI,GAAA,KAAK,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,CAAC;IACnG,MAAM,aAAa,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,GAAA,GAAG,CAAC,KAAK,CAAC,IAAI,GAAA,MAAM,CAAC,KAAK,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC;IACrH,MAAM,eAAe,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,CAAC,GAAA,GAAG,CAAC,KAAK,CAAC,IAAI,GAAA,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,CAAC,CAAC;IAE1E,QAAK,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,oBAAoB,CAAC,KAAK,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,IAAI,eAAe,CAAC,KAAK,CAAC,CAAC;IACtH,oBAAiB,GAAG,CAAC,KAAU,EAAE,EAAE,CAAC,GAAA,KAAK,CAAC,KAAK,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,CAAC;IAG3F,MAAM,gBAAgB,GAAG,CAAC,MAAmB,EAAE,SAAc,EAAE,IAAgB,EAAE,EAAE;QAIlF,MAAM,MAAM,GAAG,KAAK,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;QAEnD,EAAE,CAAC,CAAC,GAAA,SAAS,CAAC,SAAS,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC;YACpC,MAAM,IAAI,SAAS,CAAC,sBAAsB,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QACtE,CAAC;QAED,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC;YACzB,MAAM,IAAI,SAAS,CAAC,0BAA0B,CAAC,CAAC;QACjD,CAAC;QAED,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IACvC,CAAC,CAAC;IAMF,aAAoB,SAAc;QACjC,MAAM,CAAC,gBAAgB,CAAC,KAAK,CAAC,SAAS,CAAC,IAAI,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACrE,CAAC;IAFe,MAAG,MAElB,CAAA;IAGD,aAAoB,SAAc;QACjC,MAAM,CAAC,gBAAgB,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACtE,CAAC;IAFe,MAAG,MAElB,CAAA;AAEF,CAAC,EA9KS,EAAE,KAAF,EAAE,QA8KX;AAID,MAAM,CAAC,gBAAgB,CAAC,EAAE,EAAE;IAC3B,KAAK,EAAE;QACN,KAAK,EAAE,EAAE,CAAC,MAAM;KAChB;IACD,QAAQ,EAAE;QACT,KAAK,EAAE,EAAE,CAAC,SAAS;KACnB;IACD,IAAI,EAAE;QACL,KAAK,EAAE,EAAE,CAAC,KAAK;KACf;CACD,CAAC,CAAC;AAEH,kBAAe,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/@sindresorhus/is/dist/source/tests/test.d.ts b/node_modules/@sindresorhus/is/dist/source/tests/test.d.ts new file mode 100644 index 00000000..e69de29b diff --git a/node_modules/@sindresorhus/is/dist/source/tests/test.js b/node_modules/@sindresorhus/is/dist/source/tests/test.js new file mode 100644 index 00000000..7a35b4d9 --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/source/tests/test.js @@ -0,0 +1,622 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const util = require("util"); +const ava_1 = require("ava"); +const jsdom_1 = require("jsdom"); +const __1 = require(".."); +const isNode8orHigher = Number(process.versions.node.split('.')[0]) >= 8; +class ErrorSubclassFixture extends Error { +} +const document = jsdom_1.jsdom(); +const createDomElement = (el) => document.createElement(el); +const types = new Map([ + ['undefined', { + is: __1.default.undefined, + fixtures: [ + undefined + ] + }], + ['null', { + is: __1.default.null_, + fixtures: [ + null + ] + }], + ['string', { + is: __1.default.string, + fixtures: [ + '🦄', + 'hello world', + '' + ] + }], + ['number', { + is: __1.default.number, + fixtures: [ + 6, + 1.4, + 0, + -0, + Infinity, + -Infinity + ] + }], + ['boolean', { + is: __1.default.boolean, + fixtures: [ + true, false + ] + }], + ['symbol', { + is: __1.default.symbol, + fixtures: [ + Symbol('🦄') + ] + }], + ['array', { + is: __1.default.array, + fixtures: [ + [1, 2], + new Array(2) + ] + }], + ['function', { + is: __1.default.function_, + fixtures: [ + function foo() { }, + function () { }, + () => { }, + function () { + return __awaiter(this, void 0, void 0, function* () { }); + }, + function* () { } + ] + }], + ['buffer', { + is: __1.default.buffer, + fixtures: [ + Buffer.from('🦄') + ] + }], + ['object', { + is: __1.default.object, + fixtures: [ + { x: 1 }, + Object.create({ x: 1 }) + ] + }], + ['regExp', { + is: __1.default.regExp, + fixtures: [ + /\w/, + new RegExp('\\w') + ] + }], + ['date', { + is: __1.default.date, + fixtures: [ + new Date() + ] + }], + ['error', { + is: __1.default.error, + fixtures: [ + new Error('🦄'), + new ErrorSubclassFixture() + ] + }], + ['nativePromise', { + is: __1.default.nativePromise, + fixtures: [ + Promise.resolve(), + ] + }], + ['promise', { + is: __1.default.promise, + fixtures: [ + { then() { }, catch() { } } + ] + }], + ['generator', { + is: __1.default.generator, + fixtures: [ + (function* () { yield 4; })() + ] + }], + ['generatorFunction', { + is: __1.default.generatorFunction, + fixtures: [ + function* () { yield 4; } + ] + }], + ['asyncFunction', { + is: __1.default.asyncFunction, + fixtures: [ + function () { + return __awaiter(this, void 0, void 0, function* () { }); + }, + () => __awaiter(this, void 0, void 0, function* () { }) + ] + }], + ['map', { + is: __1.default.map, + fixtures: [ + new Map() + ] + }], + ['set', { + is: __1.default.set, + fixtures: [ + new Set() + ] + }], + ['weakSet', { + is: __1.default.weakSet, + fixtures: [ + new WeakSet() + ] + }], + ['weakMap', { + is: __1.default.weakMap, + fixtures: [ + new WeakMap() + ] + }], + ['int8Array', { + is: __1.default.int8Array, + fixtures: [ + new Int8Array(0) + ] + }], + ['uint8Array', { + is: __1.default.uint8Array, + fixtures: [ + new Uint8Array(0) + ] + }], + ['uint8ClampedArray', { + is: __1.default.uint8ClampedArray, + fixtures: [ + new Uint8ClampedArray(0) + ] + }], + ['int16Array', { + is: __1.default.int16Array, + fixtures: [ + new Int16Array(0) + ] + }], + ['uint16Array', { + is: __1.default.uint16Array, + fixtures: [ + new Uint16Array(0) + ] + }], + ['int32Array', { + is: __1.default.int32Array, + fixtures: [ + new Int32Array(0) + ] + }], + ['uint32Array', { + is: __1.default.uint32Array, + fixtures: [ + new Uint32Array(0) + ] + }], + ['float32Array', { + is: __1.default.float32Array, + fixtures: [ + new Float32Array(0) + ] + }], + ['float64Array', { + is: __1.default.float64Array, + fixtures: [ + new Float64Array(0) + ] + }], + ['arrayBuffer', { + is: __1.default.arrayBuffer, + fixtures: [ + new ArrayBuffer(10) + ] + }], + ['nan', { + is: __1.default.nan, + fixtures: [ + NaN, + Number.NaN + ] + }], + ['nullOrUndefined', { + is: __1.default.nullOrUndefined, + fixtures: [ + null, + undefined + ] + }], + ['plainObject', { + is: __1.default.plainObject, + fixtures: [ + { x: 1 }, + Object.create(null), + new Object() + ] + }], + ['integer', { + is: __1.default.integer, + fixtures: [ + 6 + ] + }], + ['safeInteger', { + is: __1.default.safeInteger, + fixtures: [ + Math.pow(2, 53) - 1, + -Math.pow(2, 53) + 1 + ] + }], + ['domElement', { + is: __1.default.domElement, + fixtures: [ + 'div', + 'input', + 'span', + 'img', + 'canvas', + 'script' + ].map(createDomElement) + } + ], ['non-domElements', { + is: value => !__1.default.domElement(value), + fixtures: [ + document.createTextNode('data'), + document.createProcessingInstruction('xml-stylesheet', 'href="mycss.css" type="text/css"'), + document.createComment('This is a comment'), + document, + document.implementation.createDocumentType('svg:svg', '-//W3C//DTD SVG 1.1//EN', 'http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd'), + document.createDocumentFragment() + ] + }], + ['infinite', { + is: __1.default.infinite, + fixtures: [ + Infinity, + -Infinity + ] + }] +]); +const testType = (t, type, exclude) => { + const testData = types.get(type); + if (testData === undefined) { + t.fail(`is.${type} not defined`); + return; + } + const { is } = testData; + for (const [key, { fixtures }] of types) { + if (exclude && exclude.indexOf(key) !== -1) { + continue; + } + const assert = key === type ? t.true.bind(t) : t.false.bind(t); + for (const fixture of fixtures) { + assert(is(fixture), `Value: ${util.inspect(fixture)}`); + } + } +}; +ava_1.default('is', t => { + t.is(__1.default(null), 'null'); + t.is(__1.default(undefined), 'undefined'); +}); +ava_1.default('is.undefined', t => { + testType(t, 'undefined', ['nullOrUndefined']); +}); +ava_1.default('is.null', t => { + testType(t, 'null', ['nullOrUndefined']); +}); +ava_1.default('is.string', t => { + testType(t, 'string'); +}); +ava_1.default('is.number', t => { + testType(t, 'number', ['nan', 'integer', 'safeInteger', 'infinite']); +}); +ava_1.default('is.boolean', t => { + testType(t, 'boolean'); +}); +ava_1.default('is.symbol', t => { + testType(t, 'symbol'); +}); +ava_1.default('is.array', t => { + testType(t, 'array'); +}); +ava_1.default('is.function', t => { + testType(t, 'function', ['generatorFunction', 'asyncFunction']); +}); +ava_1.default('is.buffer', t => { + testType(t, 'buffer'); +}); +ava_1.default('is.object', t => { + const testData = types.get('object'); + if (testData === undefined) { + t.fail('is.object not defined'); + return; + } + for (const el of testData.fixtures) { + t.true(__1.default.object(el)); + } +}); +ava_1.default('is.regExp', t => { + testType(t, 'regExp'); +}); +ava_1.default('is.date', t => { + testType(t, 'date'); +}); +ava_1.default('is.error', t => { + testType(t, 'error'); +}); +if (isNode8orHigher) { + ava_1.default('is.nativePromise', t => { + testType(t, 'nativePromise'); + }); + ava_1.default('is.promise', t => { + testType(t, 'promise', ['nativePromise']); + }); +} +ava_1.default('is.generator', t => { + testType(t, 'generator'); +}); +ava_1.default('is.generatorFunction', t => { + testType(t, 'generatorFunction', ['function']); +}); +ava_1.default('is.map', t => { + testType(t, 'map'); +}); +ava_1.default('is.set', t => { + testType(t, 'set'); +}); +ava_1.default('is.weakMap', t => { + testType(t, 'weakMap'); +}); +ava_1.default('is.weakSet', t => { + testType(t, 'weakSet'); +}); +ava_1.default('is.int8Array', t => { + testType(t, 'int8Array'); +}); +ava_1.default('is.uint8Array', t => { + testType(t, 'uint8Array', ['buffer']); +}); +ava_1.default('is.uint8ClampedArray', t => { + testType(t, 'uint8ClampedArray'); +}); +ava_1.default('is.int16Array', t => { + testType(t, 'int16Array'); +}); +ava_1.default('is.uint16Array', t => { + testType(t, 'uint16Array'); +}); +ava_1.default('is.int32Array', t => { + testType(t, 'int32Array'); +}); +ava_1.default('is.uint32Array', t => { + testType(t, 'uint32Array'); +}); +ava_1.default('is.float32Array', t => { + testType(t, 'float32Array'); +}); +ava_1.default('is.float64Array', t => { + testType(t, 'float64Array'); +}); +ava_1.default('is.arrayBuffer', t => { + testType(t, 'arrayBuffer'); +}); +ava_1.default('is.dataView', t => { + testType(t, 'arrayBuffer'); +}); +ava_1.default('is.truthy', t => { + t.true(__1.default.truthy('unicorn')); + t.true(__1.default.truthy('🦄')); + t.true(__1.default.truthy(new Set())); + t.true(__1.default.truthy(Symbol('🦄'))); + t.true(__1.default.truthy(true)); +}); +ava_1.default('is.falsy', t => { + t.true(__1.default.falsy(false)); + t.true(__1.default.falsy(0)); + t.true(__1.default.falsy('')); + t.true(__1.default.falsy(null)); + t.true(__1.default.falsy(undefined)); + t.true(__1.default.falsy(NaN)); +}); +ava_1.default('is.nan', t => { + testType(t, 'nan'); +}); +ava_1.default('is.nullOrUndefined', t => { + testType(t, 'nullOrUndefined', ['undefined', 'null']); +}); +ava_1.default('is.primitive', t => { + const primitives = [ + undefined, + null, + '🦄', + 6, + Infinity, + -Infinity, + true, + false, + Symbol('🦄') + ]; + for (const el of primitives) { + t.true(__1.default.primitive(el)); + } +}); +ava_1.default('is.integer', t => { + testType(t, 'integer', ['number', 'safeInteger']); + t.false(__1.default.integer(1.4)); +}); +ava_1.default('is.safeInteger', t => { + testType(t, 'safeInteger', ['number', 'integer']); + t.false(__1.default.safeInteger(Math.pow(2, 53))); + t.false(__1.default.safeInteger(-Math.pow(2, 53))); +}); +ava_1.default('is.plainObject', t => { + testType(t, 'plainObject', ['object', 'promise']); +}); +ava_1.default('is.iterable', t => { + t.true(__1.default.iterable('')); + t.true(__1.default.iterable([])); + t.true(__1.default.iterable(new Map())); + t.false(__1.default.iterable(null)); + t.false(__1.default.iterable(undefined)); + t.false(__1.default.iterable(0)); + t.false(__1.default.iterable(NaN)); + t.false(__1.default.iterable(Infinity)); + t.false(__1.default.iterable({})); +}); +ava_1.default('is.class', t => { + class Foo { + } + const classDeclarations = [ + Foo, + class Bar extends Foo { + } + ]; + for (const x of classDeclarations) { + t.true(__1.default.class_(x)); + } +}); +ava_1.default('is.typedArray', t => { + const typedArrays = [ + new Int8Array(0), + new Uint8Array(0), + new Uint8ClampedArray(0), + new Uint16Array(0), + new Int32Array(0), + new Uint32Array(0), + new Float32Array(0), + new Float64Array(0) + ]; + for (const el of typedArrays) { + t.true(__1.default.typedArray(el)); + } + t.false(__1.default.typedArray(new ArrayBuffer(1))); + t.false(__1.default.typedArray([])); + t.false(__1.default.typedArray({})); +}); +ava_1.default('is.arrayLike', t => { + (() => { + t.true(__1.default.arrayLike(arguments)); + })(); + t.true(__1.default.arrayLike([])); + t.true(__1.default.arrayLike('unicorn')); + t.false(__1.default.arrayLike({})); + t.false(__1.default.arrayLike(() => { })); + t.false(__1.default.arrayLike(new Map())); +}); +ava_1.default('is.inRange', t => { + const x = 3; + t.true(__1.default.inRange(x, [0, 5])); + t.true(__1.default.inRange(x, [5, 0])); + t.true(__1.default.inRange(x, [-5, 5])); + t.true(__1.default.inRange(x, [5, -5])); + t.false(__1.default.inRange(x, [4, 8])); + t.true(__1.default.inRange(-7, [-5, -10])); + t.true(__1.default.inRange(-5, [-5, -10])); + t.true(__1.default.inRange(-10, [-5, -10])); + t.true(__1.default.inRange(x, 10)); + t.true(__1.default.inRange(0, 0)); + t.true(__1.default.inRange(-2, -3)); + t.false(__1.default.inRange(x, 2)); + t.false(__1.default.inRange(-3, -2)); + t.throws(() => { + __1.default.inRange(0, []); + }); + t.throws(() => { + __1.default.inRange(0, [5]); + }); + t.throws(() => { + __1.default.inRange(0, [1, 2, 3]); + }); +}); +ava_1.default('is.domElement', t => { + testType(t, 'domElement'); + t.false(__1.default.domElement({ nodeType: 1, nodeName: 'div' })); +}); +ava_1.default('is.infinite', t => { + testType(t, 'infinite', ['number']); +}); +ava_1.default('is.even', t => { + for (const el of [-6, 2, 4]) { + t.true(__1.default.even(el)); + } + for (const el of [-3, 1, 5]) { + t.false(__1.default.even(el)); + } +}); +ava_1.default('is.odd', t => { + for (const el of [-5, 7, 13]) { + t.true(__1.default.odd(el)); + } + for (const el of [-8, 8, 10]) { + t.false(__1.default.odd(el)); + } +}); +ava_1.default('is.empty', t => { + t.true(__1.default.empty(null)); + t.true(__1.default.empty(undefined)); + t.true(__1.default.empty(false)); + t.false(__1.default.empty(true)); + t.true(__1.default.empty('')); + t.false(__1.default.empty('🦄')); + t.true(__1.default.empty([])); + t.false(__1.default.empty(['🦄'])); + t.true(__1.default.empty({})); + t.false(__1.default.empty({ unicorn: '🦄' })); + const tempMap = new Map(); + t.true(__1.default.empty(tempMap)); + tempMap.set('unicorn', '🦄'); + t.false(__1.default.empty(tempMap)); + const tempSet = new Set(); + t.true(__1.default.empty(tempSet)); + tempSet.add(1); + t.false(__1.default.empty(tempSet)); +}); +ava_1.default('is.emptyOrWhitespace', t => { + t.true(__1.default.emptyOrWhitespace('')); + t.true(__1.default.emptyOrWhitespace(' ')); + t.false(__1.default.emptyOrWhitespace('🦄')); + t.false(__1.default.emptyOrWhitespace('unicorn')); +}); +ava_1.default('is.any', t => { + t.true(__1.default.any(__1.default.string, {}, true, '🦄')); + t.true(__1.default.any(__1.default.object, false, {}, 'unicorns')); + t.false(__1.default.any(__1.default.boolean, '🦄', [], 3)); + t.false(__1.default.any(__1.default.integer, true, 'lol', {})); + t.throws(() => { + __1.default.any(null, true); + }); + t.throws(() => { + __1.default.any(__1.default.string); + }); +}); +ava_1.default('is.all', t => { + t.true(__1.default.all(__1.default.object, {}, new Set(), new Map())); + t.true(__1.default.all(__1.default.boolean, true, false)); + t.false(__1.default.all(__1.default.string, '🦄', [])); + t.false(__1.default.all(__1.default.set, new Map(), {})); + t.throws(() => { + __1.default.all(null, true); + }); + t.throws(() => { + __1.default.all(__1.default.string); + }); +}); +//# sourceMappingURL=test.js.map \ No newline at end of file diff --git a/node_modules/@sindresorhus/is/dist/source/tests/test.js.map b/node_modules/@sindresorhus/is/dist/source/tests/test.js.map new file mode 100644 index 00000000..ab9ec153 --- /dev/null +++ b/node_modules/@sindresorhus/is/dist/source/tests/test.js.map @@ -0,0 +1 @@ +{"version":3,"file":"test.js","sourceRoot":"","sources":["../../../source/tests/test.ts"],"names":[],"mappings":";;;;;;;;;;AAAA,6BAA6B;AAC7B,6BAA+C;AAC/C,iCAA4B;AAC5B,0BAAmB;AAEnB,MAAM,eAAe,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AAKzE,0BAA2B,SAAQ,KAAK;CAAG;AAE3C,MAAM,QAAQ,GAAG,aAAK,EAAE,CAAC;AACzB,MAAM,gBAAgB,GAAG,CAAC,EAAU,EAAE,EAAE,CAAC,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC;AAOpE,MAAM,KAAK,GAAG,IAAI,GAAG,CAAe;IACnC,CAAC,WAAW,EAAE;YACb,EAAE,EAAE,WAAC,CAAC,SAAS;YACf,QAAQ,EAAE;gBACT,SAAS;aACT;SACD,CAAC;IACF,CAAC,MAAM,EAAE;YACR,EAAE,EAAE,WAAC,CAAC,KAAK;YACX,QAAQ,EAAE;gBACT,IAAI;aACJ;SACD,CAAC;IACF,CAAC,QAAQ,EAAE;YACV,EAAE,EAAE,WAAC,CAAC,MAAM;YACZ,QAAQ,EAAE;gBACT,IAAI;gBACJ,aAAa;gBACb,EAAE;aACF;SACD,CAAC;IACF,CAAC,QAAQ,EAAE;YACV,EAAE,EAAE,WAAC,CAAC,MAAM;YACZ,QAAQ,EAAE;gBACT,CAAC;gBACD,GAAG;gBACH,CAAC;gBACD,CAAC,CAAC;gBACF,QAAQ;gBACR,CAAC,QAAQ;aACT;SACD,CAAC;IACF,CAAC,SAAS,EAAE;YACX,EAAE,EAAE,WAAC,CAAC,OAAO;YACb,QAAQ,EAAE;gBACT,IAAI,EAAE,KAAK;aACX;SACD,CAAC;IACF,CAAC,QAAQ,EAAE;YACV,EAAE,EAAE,WAAC,CAAC,MAAM;YACZ,QAAQ,EAAE;gBACT,MAAM,CAAC,IAAI,CAAC;aACZ;SACD,CAAC;IACF,CAAC,OAAO,EAAE;YACT,EAAE,EAAE,WAAC,CAAC,KAAK;YACX,QAAQ,EAAE;gBACT,CAAC,CAAC,EAAE,CAAC,CAAC;gBACN,IAAI,KAAK,CAAC,CAAC,CAAC;aACZ;SACD,CAAC;IACF,CAAC,UAAU,EAAE;YACZ,EAAE,EAAE,WAAC,CAAC,SAAS;YACf,QAAQ,EAAE;gBAET,iBAAgB,CAAC;gBACjB,cAAY,CAAC;gBACb,GAAG,EAAE,GAAE,CAAC;gBACR;0EAAkB,CAAC;iBAAA;gBACnB,QAAS,CAAC,MAAS,CAAC;aAEpB;SACD,CAAC;IACF,CAAC,QAAQ,EAAE;YACV,EAAE,EAAE,WAAC,CAAC,MAAM;YACZ,QAAQ,EAAE;gBACT,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;aACjB;SACD,CAAC;IACF,CAAC,QAAQ,EAAE;YACV,EAAE,EAAE,WAAC,CAAC,MAAM;YACZ,QAAQ,EAAE;gBACT,EAAC,CAAC,EAAE,CAAC,EAAC;gBACN,MAAM,CAAC,MAAM,CAAC,EAAC,CAAC,EAAE,CAAC,EAAC,CAAC;aACrB;SACD,CAAC;IACF,CAAC,QAAQ,EAAE;YACV,EAAE,EAAE,WAAC,CAAC,MAAM;YACZ,QAAQ,EAAE;gBACT,IAAI;gBACJ,IAAI,MAAM,CAAC,KAAK,CAAC;aACjB;SACD,CAAC;IACF,CAAC,MAAM,EAAE;YACR,EAAE,EAAE,WAAC,CAAC,IAAI;YACV,QAAQ,EAAE;gBACT,IAAI,IAAI,EAAE;aACV;SACD,CAAC;IACF,CAAC,OAAO,EAAE;YACT,EAAE,EAAE,WAAC,CAAC,KAAK;YACX,QAAQ,EAAE;gBACT,IAAI,KAAK,CAAC,IAAI,CAAC;gBACf,IAAI,oBAAoB,EAAE;aAC1B;SACD,CAAC;IACF,CAAC,eAAe,EAAE;YACjB,EAAE,EAAE,WAAC,CAAC,aAAa;YACnB,QAAQ,EAAE;gBACT,OAAO,CAAC,OAAO,EAAE;aAEjB;SACD,CAAC;IACF,CAAC,SAAS,EAAE;YACX,EAAE,EAAE,WAAC,CAAC,OAAO;YACb,QAAQ,EAAE;gBACT,EAAC,IAAI,KAAI,CAAC,EAAE,KAAK,KAAI,CAAC,EAAC;aACvB;SACD,CAAC;IACF,CAAC,WAAW,EAAE;YACb,EAAE,EAAE,WAAC,CAAC,SAAS;YACf,QAAQ,EAAE;gBACT,CAAC,QAAS,CAAC,MAAK,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;aAC7B;SACD,CAAC;IACF,CAAC,mBAAmB,EAAE;YACrB,EAAE,EAAE,WAAC,CAAC,iBAAiB;YACvB,QAAQ,EAAE;gBACT,QAAS,CAAC,MAAK,MAAM,CAAC,CAAC,CAAC,CAAC;aACzB;SACD,CAAC;IACF,CAAC,eAAe,EAAE;YACjB,EAAE,EAAE,WAAC,CAAC,aAAa;YACnB,QAAQ,EAAE;gBACT;0EAAkB,CAAC;iBAAA;gBACnB,GAAS,EAAE,gDAAE,CAAC,CAAA;aACd;SACD,CAAC;IACF,CAAC,KAAK,EAAE;YACP,EAAE,EAAE,WAAC,CAAC,GAAG;YACT,QAAQ,EAAE;gBACT,IAAI,GAAG,EAAE;aACT;SACD,CAAC;IACF,CAAC,KAAK,EAAE;YACP,EAAE,EAAE,WAAC,CAAC,GAAG;YACT,QAAQ,EAAE;gBACT,IAAI,GAAG,EAAE;aACT;SACD,CAAC;IACF,CAAC,SAAS,EAAE;YACX,EAAE,EAAE,WAAC,CAAC,OAAO;YACb,QAAQ,EAAE;gBACT,IAAI,OAAO,EAAE;aACb;SACD,CAAC;IACF,CAAC,SAAS,EAAE;YACX,EAAE,EAAE,WAAC,CAAC,OAAO;YACb,QAAQ,EAAE;gBACT,IAAI,OAAO,EAAE;aACb;SACD,CAAC;IACF,CAAC,WAAW,EAAE;YACb,EAAE,EAAE,WAAC,CAAC,SAAS;YACf,QAAQ,EAAE;gBACT,IAAI,SAAS,CAAC,CAAC,CAAC;aAChB;SACD,CAAC;IACF,CAAC,YAAY,EAAE;YACd,EAAE,EAAE,WAAC,CAAC,UAAU;YAChB,QAAQ,EAAE;gBACT,IAAI,UAAU,CAAC,CAAC,CAAC;aACjB;SACD,CAAC;IACF,CAAC,mBAAmB,EAAE;YACrB,EAAE,EAAE,WAAC,CAAC,iBAAiB;YACvB,QAAQ,EAAE;gBACT,IAAI,iBAAiB,CAAC,CAAC,CAAC;aACxB;SACD,CAAC;IACF,CAAC,YAAY,EAAE;YACd,EAAE,EAAE,WAAC,CAAC,UAAU;YAChB,QAAQ,EAAE;gBACT,IAAI,UAAU,CAAC,CAAC,CAAC;aACjB;SACD,CAAC;IACF,CAAC,aAAa,EAAE;YACf,EAAE,EAAE,WAAC,CAAC,WAAW;YACjB,QAAQ,EAAE;gBACT,IAAI,WAAW,CAAC,CAAC,CAAC;aAClB;SACD,CAAC;IACF,CAAC,YAAY,EAAE;YACd,EAAE,EAAE,WAAC,CAAC,UAAU;YAChB,QAAQ,EAAE;gBACT,IAAI,UAAU,CAAC,CAAC,CAAC;aACjB;SACD,CAAC;IACF,CAAC,aAAa,EAAE;YACf,EAAE,EAAE,WAAC,CAAC,WAAW;YACjB,QAAQ,EAAE;gBACT,IAAI,WAAW,CAAC,CAAC,CAAC;aAClB;SACD,CAAC;IACF,CAAC,cAAc,EAAE;YAChB,EAAE,EAAE,WAAC,CAAC,YAAY;YAClB,QAAQ,EAAE;gBACT,IAAI,YAAY,CAAC,CAAC,CAAC;aACnB;SACD,CAAC;IACF,CAAC,cAAc,EAAE;YAChB,EAAE,EAAE,WAAC,CAAC,YAAY;YAClB,QAAQ,EAAE;gBACT,IAAI,YAAY,CAAC,CAAC,CAAC;aACnB;SACD,CAAC;IACF,CAAC,aAAa,EAAE;YACf,EAAE,EAAE,WAAC,CAAC,WAAW;YACjB,QAAQ,EAAE;gBACT,IAAI,WAAW,CAAC,EAAE,CAAC;aACnB;SACD,CAAC;IACF,CAAC,KAAK,EAAE;YACP,EAAE,EAAE,WAAC,CAAC,GAAG;YACT,QAAQ,EAAE;gBACT,GAAG;gBACH,MAAM,CAAC,GAAG;aACV;SACD,CAAC;IACF,CAAC,iBAAiB,EAAE;YACnB,EAAE,EAAE,WAAC,CAAC,eAAe;YACrB,QAAQ,EAAE;gBACT,IAAI;gBACJ,SAAS;aACT;SACD,CAAC;IACF,CAAC,aAAa,EAAE;YACf,EAAE,EAAE,WAAC,CAAC,WAAW;YACjB,QAAQ,EAAE;gBACT,EAAC,CAAC,EAAE,CAAC,EAAC;gBACN,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC;gBACnB,IAAI,MAAM,EAAE;aACZ;SACD,CAAC;IACF,CAAC,SAAS,EAAE;YACX,EAAE,EAAE,WAAC,CAAC,OAAO;YACb,QAAQ,EAAE;gBACT,CAAC;aACD;SACD,CAAC;IACF,CAAC,aAAa,EAAE;YACf,EAAE,EAAE,WAAC,CAAC,WAAW;YACjB,QAAQ,EAAE;gBACT,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC;gBACnB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC;aACpB;SACD,CAAC;IACF,CAAC,YAAY,EAAE;YACd,EAAE,EAAE,WAAC,CAAC,UAAU;YAChB,QAAQ,EAAE;gBACT,KAAK;gBACL,OAAO;gBACP,MAAM;gBACN,KAAK;gBACL,QAAQ;gBACR,QAAQ;aACR,CAAC,GAAG,CAAC,gBAAgB,CAAC;SAAE;KACzB,EAAE,CAAC,iBAAiB,EAAE;YACtB,EAAE,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,WAAC,CAAC,UAAU,CAAC,KAAK,CAAC;YACjC,QAAQ,EAAE;gBACT,QAAQ,CAAC,cAAc,CAAC,MAAM,CAAC;gBAC/B,QAAQ,CAAC,2BAA2B,CAAC,gBAAgB,EAAE,kCAAkC,CAAC;gBAC1F,QAAQ,CAAC,aAAa,CAAC,mBAAmB,CAAC;gBAC3C,QAAQ;gBACR,QAAQ,CAAC,cAAc,CAAC,kBAAkB,CAAC,SAAS,EAAE,yBAAyB,EAAE,kDAAkD,CAAC;gBACpI,QAAQ,CAAC,sBAAsB,EAAE;aACjC;SACD,CAAC;IACF,CAAC,UAAU,EAAE;YACZ,EAAE,EAAG,WAAC,CAAC,QAAQ;YACf,QAAQ,EAAE;gBACT,QAAQ;gBACR,CAAC,QAAQ;aACT;SACD,CAAC;CACF,CAAC,CAAC;AAGH,MAAM,QAAQ,GAAG,CAAC,CAA6B,EAAE,IAAY,EAAE,OAAkB,EAAE,EAAE;IACpF,MAAM,QAAQ,GAAG,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;IAEjC,EAAE,CAAC,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC;QAC5B,CAAC,CAAC,IAAI,CAAC,MAAM,IAAI,cAAc,CAAC,CAAC;QAEjC,MAAM,CAAC;IACR,CAAC;IAED,MAAM,EAAC,EAAE,EAAC,GAAG,QAAQ,CAAC;IAEtB,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,EAAC,QAAQ,EAAC,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC;QAGvC,EAAE,CAAC,CAAC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5C,QAAQ,CAAC;QACV,CAAC;QAED,MAAM,MAAM,GAAG,GAAG,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAE/D,GAAG,CAAC,CAAC,MAAM,OAAO,IAAI,QAAQ,CAAC,CAAC,CAAC;YAChC,MAAM,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,UAAU,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;QACxD,CAAC;IACF,CAAC;AACF,CAAC,CAAC;AAEF,aAAI,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE;IACd,CAAC,CAAC,EAAE,CAAC,WAAC,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,CAAC;IACtB,CAAC,CAAC,EAAE,CAAC,WAAC,CAAC,SAAS,CAAC,EAAE,WAAW,CAAC,CAAC;AAGjC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACxB,QAAQ,CAAC,CAAC,EAAE,WAAW,EAAE,CAAC,iBAAiB,CAAC,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE;IACnB,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,iBAAiB,CAAC,CAAC,CAAC;AAC1C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,WAAW,EAAE,CAAC,CAAC,EAAE;IACrB,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,WAAW,EAAE,CAAC,CAAC,EAAE;IACrB,QAAQ,CAAC,CAAC,EAAE,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,aAAa,EAAE,UAAU,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,YAAY,EAAE,CAAC,CAAC,EAAE;IACtB,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,WAAW,EAAE,CAAC,CAAC,EAAE;IACrB,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,UAAU,EAAE,CAAC,CAAC,EAAE;IACpB,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;AACtB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;IACvB,QAAQ,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,mBAAmB,EAAE,eAAe,CAAC,CAAC,CAAC;AACjE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,WAAW,EAAE,CAAC,CAAC,EAAE;IACrB,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,WAAW,EAAE,CAAC,CAAC,EAAE;IACrB,MAAM,QAAQ,GAAG,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;IAErC,EAAE,CAAC,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC;QAC5B,CAAC,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QAEhC,MAAM,CAAC;IACR,CAAC;IAED,GAAG,CAAC,CAAC,MAAM,EAAE,IAAI,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;QACpC,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC;IACtB,CAAC;AACF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,WAAW,EAAE,CAAC,CAAC,EAAE;IACrB,QAAQ,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE;IACnB,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC;AACrB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,UAAU,EAAE,CAAC,CAAC,EAAE;IACpB,QAAQ,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;AACtB,CAAC,CAAC,CAAC;AAEH,EAAE,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC;IACrB,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE;QAC5B,QAAQ,CAAC,CAAC,EAAE,eAAe,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;IAEH,aAAI,CAAC,YAAY,EAAE,CAAC,CAAC,EAAE;QACtB,QAAQ,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC;IAC3C,CAAC,CAAC,CAAC;AAKJ,CAAC;AAED,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACxB,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC;AAC1B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE;IAChC,QAAQ,CAAC,CAAC,EAAE,mBAAmB,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC;AAChD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,EAAE;IAClB,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;AACpB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,EAAE;IAClB,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;AACpB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,YAAY,EAAE,CAAC,CAAC,EAAE;IACtB,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,YAAY,EAAE,CAAC,CAAC,EAAE;IACtB,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACxB,QAAQ,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC;AAC1B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,CAAC,CAAC,EAAE;IACzB,QAAQ,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACvC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE;IAChC,QAAQ,CAAC,CAAC,EAAE,mBAAmB,CAAC,CAAC;AAClC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,CAAC,CAAC,EAAE;IACzB,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC,EAAE;IAC1B,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC;AAC5B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,CAAC,CAAC,EAAE;IACzB,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC,EAAE;IAC1B,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC;AAC5B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iBAAiB,EAAE,CAAC,CAAC,EAAE;IAC3B,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAC7B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iBAAiB,EAAE,CAAC,CAAC,EAAE;IAC3B,QAAQ,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAC7B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC,EAAE;IAC1B,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC;AAC5B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;IACvB,QAAQ,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC;AAC5B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,WAAW,EAAE,CAAC,CAAC,EAAE;IACrB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC;IAC5B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC;IACvB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,MAAM,CAAC,IAAI,GAAG,EAAE,CAAC,CAAC,CAAC;IAC5B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAC/B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,UAAU,EAAE,CAAC,CAAC,EAAE;IACpB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;IACvB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;IACnB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;IACpB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;IACtB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;IAC3B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;AACtB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,EAAE;IAClB,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;AACpB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,oBAAoB,EAAE,CAAC,CAAC,EAAE;IAC9B,QAAQ,CAAC,CAAC,EAAE,iBAAiB,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACxB,MAAM,UAAU,GAAG;QAClB,SAAS;QACT,IAAI;QACJ,IAAI;QACJ,CAAC;QACD,QAAQ;QACR,CAAC,QAAQ;QACT,IAAI;QACJ,KAAK;QACL,MAAM,CAAC,IAAI,CAAC;KACZ,CAAC;IAEF,GAAG,CAAC,CAAC,MAAM,EAAE,IAAI,UAAU,CAAC,CAAC,CAAC;QAC7B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC;IACzB,CAAC;AACF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,YAAY,EAAE,CAAC,CAAC,EAAE;IACtB,QAAQ,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,QAAQ,EAAE,aAAa,CAAC,CAAC,CAAC;IAClD,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;AACzB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC,EAAE;IAC1B,QAAQ,CAAC,CAAC,EAAE,aAAa,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC,CAAC;IAClD,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;AAC1C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC,EAAE;IAC1B,QAAQ,CAAC,CAAC,EAAE,aAAa,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC,CAAC;AACnD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;IACvB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,QAAQ,CAAC,IAAI,GAAG,EAAE,CAAC,CAAC,CAAC;IAC9B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC;IAC/B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;IACvB,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzB,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC9B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;AACzB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,UAAU,EAAE,CAAC,CAAC,EAAE;IACpB;KAAY;IACZ,MAAM,iBAAiB,GAAG;QACzB,GAAG;QACH,SAAU,SAAQ,GAAG;SAAG;KACxB,CAAC;IAEF,GAAG,CAAC,CAAC,MAAM,CAAC,IAAI,iBAAiB,CAAC,CAAC,CAAC;QACnC,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;IACrB,CAAC;AACF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,CAAC,CAAC,EAAE;IAGzB,MAAM,WAAW,GAAG;QACnB,IAAI,SAAS,CAAC,CAAC,CAAC;QAChB,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,IAAI,iBAAiB,CAAC,CAAC,CAAC;QACxB,IAAI,WAAW,CAAC,CAAC,CAAC;QAClB,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,IAAI,WAAW,CAAC,CAAC,CAAC;QAClB,IAAI,YAAY,CAAC,CAAC,CAAC;QACnB,IAAI,YAAY,CAAC,CAAC,CAAC;KACnB,CAAC;IAEF,GAAG,CAAC,CAAC,MAAM,EAAE,IAAI,WAAW,CAAC,CAAC,CAAC;QAC9B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC;IAC1B,CAAC;IAED,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,UAAU,CAAC,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC;IAC1B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACxB,CAAC,GAAG,EAAE;QACL,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC;IAChC,CAAC,CAAC,EAAE,CAAC;IACL,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC;IACxB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC;IAE/B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,CAAC;IACzB,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,SAAS,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC,CAAC;IAC/B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,SAAS,CAAC,IAAI,GAAG,EAAE,CAAC,CAAC,CAAC;AACjC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,YAAY,EAAE,CAAC,CAAC,EAAE;IACtB,MAAM,CAAC,GAAG,CAAC,CAAC;IAEZ,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;IAC7B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;IAC7B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;IAC9B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC9B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;IAC9B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAElC,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IACzB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACxB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;IAC1B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACzB,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;IAE3B,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;QACb,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;IAClB,CAAC,CAAC,CAAC;IAEH,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;QACb,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;IACnB,CAAC,CAAC,CAAC;IAEH,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;QACb,WAAC,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACzB,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,CAAC,CAAC,EAAE;IACzB,QAAQ,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC1B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,UAAU,CAAC,EAAC,QAAQ,EAAE,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAC,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;IACvB,QAAQ,CAAC,CAAC,EAAE,UAAU,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACrC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE;IACnB,GAAG,CAAC,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC7B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;IACpB,CAAC;IAED,GAAG,CAAC,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC7B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;IACrB,CAAC;AACF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,EAAE;IAClB,GAAG,CAAC,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;QAC9B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;IACnB,CAAC;IAED,GAAG,CAAC,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;QAC9B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;IACpB,CAAC;AACF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,UAAU,EAAE,CAAC,CAAC,EAAE;IACpB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;IACtB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC;IAE3B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;IACvB,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;IAEvB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;IACpB,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;IAEvB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;IACpB,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAEzB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;IACpB,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,KAAK,CAAC,EAAC,OAAO,EAAE,IAAI,EAAC,CAAC,CAAC,CAAC;IAElC,MAAM,OAAO,GAAG,IAAI,GAAG,EAAE,CAAC;IAC1B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;IAEzB,OAAO,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;IAC7B,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;IAE1B,MAAM,OAAO,GAAG,IAAI,GAAG,EAAE,CAAC;IAC1B,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;IAEzB,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IACf,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;AAC3B,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE;IAChC,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,iBAAiB,CAAC,EAAE,CAAC,CAAC,CAAC;IAChC,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC,CAAC;IAClC,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC,CAAC;IACnC,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,iBAAiB,CAAC,SAAS,CAAC,CAAC,CAAC;AACzC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,EAAE;IAClB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,MAAM,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC;IAE3C,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;QACb,WAAC,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;IACnB,CAAC,CAAC,CAAC;IAEH,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;QACb,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,MAAM,CAAC,CAAC;IACjB,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,EAAE;IAClB,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,MAAM,EAAE,EAAE,EAAE,IAAI,GAAG,EAAE,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC,CAAC;IAClD,CAAC,CAAC,IAAI,CAAC,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,OAAO,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;IACtC,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,MAAM,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC;IACnC,CAAC,CAAC,KAAK,CAAC,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,GAAG,EAAE,IAAI,GAAG,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC;IAErC,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;QACb,WAAC,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;IACnB,CAAC,CAAC,CAAC;IAEH,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE;QACb,WAAC,CAAC,GAAG,CAAC,WAAC,CAAC,MAAM,CAAC,CAAC;IACjB,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@sindresorhus/is/license b/node_modules/@sindresorhus/is/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/@sindresorhus/is/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@sindresorhus/is/package.json b/node_modules/@sindresorhus/is/package.json new file mode 100644 index 00000000..c4a3a24f --- /dev/null +++ b/node_modules/@sindresorhus/is/package.json @@ -0,0 +1,62 @@ +{ + "name": "@sindresorhus/is", + "version": "0.7.0", + "description": "Type check values: `is.string('🦄') //=> true`", + "license": "MIT", + "repository": "sindresorhus/is", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "publishConfig": { + "access": "public" + }, + "main": "dist/index.js", + "engines": { + "node": ">=4" + }, + "scripts": { + "lint": "tslint --format stylish --project .", + "build": "tsc", + "test": "npm run lint && npm run build && ava dist/tests", + "prepublish": "npm run build && del dist/tests" + }, + "files": [ + "dist" + ], + "keywords": [ + "type", + "types", + "is", + "check", + "checking", + "validate", + "validation", + "utility", + "util", + "typeof", + "instanceof", + "object", + "assert", + "assertion", + "test", + "kind", + "primitive", + "verify", + "compare" + ], + "devDependencies": { + "@types/jsdom": "^2.0.31", + "@types/node": "^8.0.47", + "@types/tempy": "^0.1.0", + "ava": "*", + "del-cli": "^1.1.0", + "jsdom": "^9.12.0", + "tempy": "^0.2.1", + "tslint": "^5.8.0", + "tslint-xo": "^0.3.0", + "typescript": "^2.6.1" + }, + "types": "dist/index.d.ts" +} diff --git a/node_modules/@sindresorhus/is/readme.md b/node_modules/@sindresorhus/is/readme.md new file mode 100644 index 00000000..67fad06e --- /dev/null +++ b/node_modules/@sindresorhus/is/readme.md @@ -0,0 +1,323 @@ +# is [![Build Status](https://travis-ci.org/sindresorhus/is.svg?branch=master)](https://travis-ci.org/sindresorhus/is) + +> Type check values: `is.string('🦄') //=> true` + + + + +## Install + +``` +$ npm install @sindresorhus/is +``` + + +## Usage + +```js +const is = require('@sindresorhus/is'); + +is('🦄'); +//=> 'string' + +is(new Map()); +//=> 'Map' + +is.number(6); +//=> true +``` + + +## API + +### is(value) + +Returns the type of `value`. + +Primitives are lowercase and object types are camelcase. + +Example: + +- `'undefined'` +- `'null'` +- `'string'` +- `'symbol'` +- `'Array'` +- `'Function'` +- `'Object'` + +Note: It will throw if you try to feed it object-wrapped primitives, as that's a bad practice. For example `new String('foo')`. + +### is.{method} + +All the below methods accept a value and returns a boolean for whether the value is of the desired type. + +#### Primitives + +##### .undefined(value) +##### .null(value) +##### .string(value) +##### .number(value) +##### .boolean(value) +##### .symbol(value) + +#### Built-in types + +##### .array(value) +##### .function(value) +##### .buffer(value) +##### .object(value) + +Keep in mind that [functions are objects too](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions). + +##### .regExp(value) +##### .date(value) +##### .error(value) +##### .nativePromise(value) +##### .promise(value) + +Returns `true` for any object with a `.then()` and `.catch()` method. Prefer this one over `.nativePromise()` as you usually want to allow userland promise implementations too. + +##### .generator(value) + +Returns `true` for any object that implements its own `.next()` and `.throw()` methods and has a function definition for `Symbol.iterator`. + +##### .generatorFunction(value) + +##### .asyncFunction(value) + +Returns `true` for any `async` function that can be called with the `await` operator. + +```js +is.asyncFunction(async () => {}); +// => true + +is.asyncFunction(() => {}); +// => false +``` + +##### .boundFunction(value) + +Returns `true` for any `bound` function. + +```js +is.boundFunction(() => {}); +// => true + +is.boundFunction(function () {}.bind(null)); +// => true + +is.boundFunction(function () {}); +// => false +``` + +##### .map(value) +##### .set(value) +##### .weakMap(value) +##### .weakSet(value) + +#### Typed arrays + +##### .int8Array(value) +##### .uint8Array(value) +##### .uint8ClampedArray(value) +##### .int16Array(value) +##### .uint16Array(value) +##### .int32Array(value) +##### .uint32Array(value) +##### .float32Array(value) +##### .float64Array(value) + +#### Structured data + +##### .arrayBuffer(value) +##### .sharedArrayBuffer(value) +##### .dataView(value) + +#### Miscellaneous + +##### .directInstanceOf(value, class) + +Returns `true` if `value` is a direct instance of `class`. + +```js +is.directInstanceOf(new Error(), Error); +//=> true + +class UnicornError extends Error {}; + +is.directInstanceOf(new UnicornError(), Error); +//=> false +``` + +##### .truthy(value) + +Returns `true` for all values that evaluate to true in a boolean context: + +```js +is.truthy('🦄'); +//=> true + +is.truthy(undefined); +//=> false +``` + +##### .falsy(value) + +Returns `true` if `value` is one of: `false`, `0`, `''`, `null`, `undefined`, `NaN`. + +##### .nan(value) +##### .nullOrUndefined(value) +##### .primitive(value) + +JavaScript primitives are as follows: `null`, `undefined`, `string`, `number`, `boolean`, `symbol`. + +##### .integer(value) + +##### .safeInteger(value) + +Returns `true` if `value` is a [safe integer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger). + +##### .plainObject(value) + +An object is plain if it's created by either `{}`, `new Object()`, or `Object.create(null)`. + +##### .iterable(value) +##### .class(value) + +Returns `true` for instances created by a ES2015 class. + +##### .typedArray(value) + +##### .arrayLike(value) + +A `value` is array-like if it is not a function and has a `value.length` that is a safe integer greater than or equal to 0. + +```js +is.arrayLike(document.forms); +//=> true + +function () { + is.arrayLike(arguments); + //=> true +} +``` + +##### .inRange(value, range) + +Check if `value` (number) is in the given `range`. The range is an array of two values, lower bound and upper bound, in no specific order. + +```js +is.inRange(3, [0, 5]); +is.inRange(3, [5, 0]); +is.inRange(0, [-2, 2]); +``` + +##### .inRange(value, upperBound) + +Check if `value` (number) is in the range of `0` to `upperBound`. + +```js +is.inRange(3, 10); +``` + +##### .domElement(value) + +Returns `true` if `value` is a DOM Element. + +##### .nodeStream(value) + +Returns `true` if `value` is a Node.js [stream](https://nodejs.org/api/stream.html). + +```js +const fs = require('fs'); +is.nodeStream(fs.createReadStream('unicorn.png')); +//=> true +``` + +##### .infinite(value) + +Check if `value` is `Infinity` or `-Infinity`. + +##### .even(value) + +Returns `true` if `value` is an even integer. + +##### .odd(value) + +Returns `true` if `value` is an odd integer. + +##### .empty(value) + +Returns `true` if `value` is falsy or an empty string, array, object, map, or set. + +##### .emptyOrWhitespace(value) + +Returns `true` if `is.empty(value)` or a string that is all whitespace. + + +##### .any(predicate, ...values) + +Returns `true` if **any** of the input `values` returns true in the `predicate`: + +```js +is.any(is.string, {}, true, '🦄'); +//=> true + +is.any(is.boolean, 'unicorns', [], new Map()); +//=> false +``` + +##### .all(predicate, ...values) + +Returns `true` if **all** of the input `values` returns true in the `predicate`: + +```js +is.all(is.object, {}, new Map(), new Set()); +//=> true + +is.all(is.string, '🦄', [], 'unicorns'); +//=> false +``` + +## FAQ + +### Why yet another type checking module? + +There are hundreds of type checking modules on npm, unfortunately, I couldn't find any that fit my needs: + +- Includes both type methods and ability to get the type +- Types of primitives returned as lowercase and object types as camelcase +- Covers all built-ins +- Unsurprising behavior +- Well-maintained +- Comprehensive test suite + +For the ones I found, pick 3 of these. + +The most common mistakes I noticed in these modules was using `instanceof` for type checking, forgetting that functions are objects, and omitting `symbol` as a primitive. + + +## Related + +- [is-stream](https://github.com/sindresorhus/is-stream) - Check if something is a Node.js stream +- [is-observable](https://github.com/sindresorhus/is-observable) - Check if a value is an Observable +- [file-type](https://github.com/sindresorhus/file-type) - Detect the file type of a Buffer/Uint8Array +- [is-ip](https://github.com/sindresorhus/is-ip) - Check if a string is an IP address +- [is-array-sorted](https://github.com/sindresorhus/is-array-sorted) - Check if an Array is sorted +- [is-error-constructor](https://github.com/sindresorhus/is-error-constructor) - Check if a value is an error constructor +- [is-empty-iterable](https://github.com/sindresorhus/is-empty-iterable) - Check if an Iterable is empty +- [is-blob](https://github.com/sindresorhus/is-blob) - Check if a value is a Blob - File-like object of immutable, raw data +- [has-emoji](https://github.com/sindresorhus/has-emoji) - Check whether a string has any emoji + + +## Created by + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Giora Guttsait](https://github.com/gioragutt) +- [Brandon Smith](https://github.com/brandon93s) + + +## License + +MIT diff --git a/node_modules/@tokenizer/token/README.md b/node_modules/@tokenizer/token/README.md new file mode 100644 index 00000000..41160492 --- /dev/null +++ b/node_modules/@tokenizer/token/README.md @@ -0,0 +1,19 @@ +[![npm version](https://badge.fury.io/js/%40tokenizer%2Ftoken.svg)](https://www.npmjs.com/package/@tokenizer/token) +[![npm downloads](http://img.shields.io/npm/dm/@tokenizer/token.svg)](https://npmcharts.com/compare/@tokenizer/token?interval=30) + +# @tokenizer/token + +TypeScript definition of an [strtok3](https://github.com/Borewit/strtok3) token. + +## Licence + +(The MIT License) + +Copyright (c) 2020 Borewit + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/@tokenizer/token/index.d.ts b/node_modules/@tokenizer/token/index.d.ts new file mode 100644 index 00000000..3703b77c --- /dev/null +++ b/node_modules/@tokenizer/token/index.d.ts @@ -0,0 +1,30 @@ +/** + * Read-only token + * See https://github.com/Borewit/strtok3 for more information + */ +export interface IGetToken { + + /** + * Length of encoded token in bytes + */ + len: number; + + /** + * Decode value from buffer at offset + * @param array - Uint8Array to read the decoded value from + * @param offset - Decode offset + * @return decoded value + */ + get(array: Array, offset: number): Value; +} + +export interface IToken extends IGetToken { + /** + * Encode value to buffer + * @param array - Uint8Array to write the encoded value to + * @param offset - Buffer write offset + * @param value - Value to decode of type T + * @return offset plus number of bytes written + */ + put(array: Array, offset: number, value: Value): number +} diff --git a/node_modules/@tokenizer/token/package.json b/node_modules/@tokenizer/token/package.json new file mode 100644 index 00000000..4bb38fd2 --- /dev/null +++ b/node_modules/@tokenizer/token/package.json @@ -0,0 +1,33 @@ +{ + "name": "@tokenizer/token", + "version": "0.3.0", + "description": "TypeScript definition for strtok3 token", + "main": "", + "types": "index.d.ts", + "files": [ + "index.d.ts" + ], + "keywords": [ + "token", + "interface", + "tokenizer", + "TypeScript" + ], + "author": { + "name": "Borewit", + "url": "https://github.com/Borewit" + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/Borewit/tokenizer-token.git" + }, + "bugs": { + "url": "https://github.com/Borewit/tokenizer-token/issues" + }, + "typeScriptVersion": "3.0", + "dependencies": {}, + "devDependencies": { + "@types/node": "^13.1.0" + } +} diff --git a/node_modules/abbrev/LICENSE b/node_modules/abbrev/LICENSE new file mode 100644 index 00000000..9bcfa9d7 --- /dev/null +++ b/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/abbrev/README.md b/node_modules/abbrev/README.md new file mode 100644 index 00000000..99746fe6 --- /dev/null +++ b/node_modules/abbrev/README.md @@ -0,0 +1,23 @@ +# abbrev-js + +Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). + +Usage: + + var abbrev = require("abbrev"); + abbrev("foo", "fool", "folding", "flop"); + + // returns: + { fl: 'flop' + , flo: 'flop' + , flop: 'flop' + , fol: 'folding' + , fold: 'folding' + , foldi: 'folding' + , foldin: 'folding' + , folding: 'folding' + , foo: 'foo' + , fool: 'fool' + } + +This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff --git a/node_modules/abbrev/abbrev.js b/node_modules/abbrev/abbrev.js new file mode 100644 index 00000000..7b1dc5d6 --- /dev/null +++ b/node_modules/abbrev/abbrev.js @@ -0,0 +1,61 @@ +module.exports = exports = abbrev.abbrev = abbrev + +abbrev.monkeyPatch = monkeyPatch + +function monkeyPatch () { + Object.defineProperty(Array.prototype, 'abbrev', { + value: function () { return abbrev(this) }, + enumerable: false, configurable: true, writable: true + }) + + Object.defineProperty(Object.prototype, 'abbrev', { + value: function () { return abbrev(Object.keys(this)) }, + enumerable: false, configurable: true, writable: true + }) +} + +function abbrev (list) { + if (arguments.length !== 1 || !Array.isArray(list)) { + list = Array.prototype.slice.call(arguments, 0) + } + for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { + args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + args = args.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + var abbrevs = {} + , prev = "" + for (var i = 0, l = args.length ; i < l ; i ++) { + var current = args[i] + , next = args[i + 1] || "" + , nextMatches = true + , prevMatches = true + if (current === next) continue + for (var j = 0, cl = current.length ; j < cl ; j ++) { + var curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j ++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (var a = current.substr(0, j) ; j <= cl ; j ++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/node_modules/abbrev/package.json b/node_modules/abbrev/package.json new file mode 100644 index 00000000..bf4e8015 --- /dev/null +++ b/node_modules/abbrev/package.json @@ -0,0 +1,21 @@ +{ + "name": "abbrev", + "version": "1.1.1", + "description": "Like ruby's abbrev module, but in js", + "author": "Isaac Z. Schlueter ", + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": "http://github.com/isaacs/abbrev-js", + "license": "ISC", + "devDependencies": { + "tap": "^10.1" + }, + "files": [ + "abbrev.js" + ] +} diff --git a/node_modules/accepts/HISTORY.md b/node_modules/accepts/HISTORY.md new file mode 100644 index 00000000..cb5990c7 --- /dev/null +++ b/node_modules/accepts/HISTORY.md @@ -0,0 +1,243 @@ +1.3.8 / 2022-02-02 +================== + + * deps: mime-types@~2.1.34 + - deps: mime-db@~1.51.0 + * deps: negotiator@0.6.3 + +1.3.7 / 2019-04-29 +================== + + * deps: negotiator@0.6.2 + - Fix sorting charset, encoding, and language with extra parameters + +1.3.6 / 2019-04-28 +================== + + * deps: mime-types@~2.1.24 + - deps: mime-db@~1.40.0 + +1.3.5 / 2018-02-28 +================== + + * deps: mime-types@~2.1.18 + - deps: mime-db@~1.33.0 + +1.3.4 / 2017-08-22 +================== + + * deps: mime-types@~2.1.16 + - deps: mime-db@~1.29.0 + +1.3.3 / 2016-05-02 +================== + + * deps: mime-types@~2.1.11 + - deps: mime-db@~1.23.0 + * deps: negotiator@0.6.1 + - perf: improve `Accept` parsing speed + - perf: improve `Accept-Charset` parsing speed + - perf: improve `Accept-Encoding` parsing speed + - perf: improve `Accept-Language` parsing speed + +1.3.2 / 2016-03-08 +================== + + * deps: mime-types@~2.1.10 + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + - deps: mime-db@~1.22.0 + +1.3.1 / 2016-01-19 +================== + + * deps: mime-types@~2.1.9 + - deps: mime-db@~1.21.0 + +1.3.0 / 2015-09-29 +================== + + * deps: mime-types@~2.1.7 + - deps: mime-db@~1.19.0 + * deps: negotiator@0.6.0 + - Fix including type extensions in parameters in `Accept` parsing + - Fix parsing `Accept` parameters with quoted equals + - Fix parsing `Accept` parameters with quoted semicolons + - Lazy-load modules from main entry point + - perf: delay type concatenation until needed + - perf: enable strict mode + - perf: hoist regular expressions + - perf: remove closures getting spec properties + - perf: remove a closure from media type parsing + - perf: remove property delete from media type parsing + +1.2.13 / 2015-09-06 +=================== + + * deps: mime-types@~2.1.6 + - deps: mime-db@~1.18.0 + +1.2.12 / 2015-07-30 +=================== + + * deps: mime-types@~2.1.4 + - deps: mime-db@~1.16.0 + +1.2.11 / 2015-07-16 +=================== + + * deps: mime-types@~2.1.3 + - deps: mime-db@~1.15.0 + +1.2.10 / 2015-07-01 +=================== + + * deps: mime-types@~2.1.2 + - deps: mime-db@~1.14.0 + +1.2.9 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - perf: fix deopt during mapping + +1.2.8 / 2015-06-07 +================== + + * deps: mime-types@~2.1.0 + - deps: mime-db@~1.13.0 + * perf: avoid argument reassignment & argument slice + * perf: avoid negotiator recursive construction + * perf: enable strict mode + * perf: remove unnecessary bitwise operator + +1.2.7 / 2015-05-10 +================== + + * deps: negotiator@0.5.3 + - Fix media type parameter matching to be case-insensitive + +1.2.6 / 2015-05-07 +================== + + * deps: mime-types@~2.0.11 + - deps: mime-db@~1.9.1 + * deps: negotiator@0.5.2 + - Fix comparing media types with quoted values + - Fix splitting media types with quoted commas + +1.2.5 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - deps: mime-db@~1.8.0 + +1.2.4 / 2015-02-14 +================== + + * Support Node.js 0.6 + * deps: mime-types@~2.0.9 + - deps: mime-db@~1.7.0 + * deps: negotiator@0.5.1 + - Fix preference sorting to be stable for long acceptable lists + +1.2.3 / 2015-01-31 +================== + + * deps: mime-types@~2.0.8 + - deps: mime-db@~1.6.0 + +1.2.2 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - deps: mime-db@~1.5.0 + +1.2.1 / 2014-12-30 +================== + + * deps: mime-types@~2.0.5 + - deps: mime-db@~1.3.1 + +1.2.0 / 2014-12-19 +================== + + * deps: negotiator@0.5.0 + - Fix list return order when large accepted list + - Fix missing identity encoding when q=0 exists + - Remove dynamic building of Negotiator class + +1.1.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - deps: mime-db@~1.3.0 + +1.1.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - deps: mime-db@~1.2.0 + +1.1.2 / 2014-10-14 +================== + + * deps: negotiator@0.4.9 + - Fix error when media type has invalid parameter + +1.1.1 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - deps: mime-db@~1.1.0 + * deps: negotiator@0.4.8 + - Fix all negotiations to be case-insensitive + - Stable sort preferences of same quality according to client order + +1.1.0 / 2014-09-02 +================== + + * update `mime-types` + +1.0.7 / 2014-07-04 +================== + + * Fix wrong type returned from `type` when match after unknown extension + +1.0.6 / 2014-06-24 +================== + + * deps: negotiator@0.4.7 + +1.0.5 / 2014-06-20 +================== + + * fix crash when unknown extension given + +1.0.4 / 2014-06-19 +================== + + * use `mime-types` + +1.0.3 / 2014-06-11 +================== + + * deps: negotiator@0.4.6 + - Order by specificity when quality is the same + +1.0.2 / 2014-05-29 +================== + + * Fix interpretation when header not in request + * deps: pin negotiator@0.4.5 + +1.0.1 / 2014-01-18 +================== + + * Identity encoding isn't always acceptable + * deps: negotiator@~0.4.0 + +1.0.0 / 2013-12-27 +================== + + * Genesis diff --git a/node_modules/accepts/LICENSE b/node_modules/accepts/LICENSE new file mode 100644 index 00000000..06166077 --- /dev/null +++ b/node_modules/accepts/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/accepts/README.md b/node_modules/accepts/README.md new file mode 100644 index 00000000..82680c53 --- /dev/null +++ b/node_modules/accepts/README.md @@ -0,0 +1,140 @@ +# accepts + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][github-actions-ci-image]][github-actions-ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). +Extracted from [koa](https://www.npmjs.com/package/koa) for general use. + +In addition to negotiator, it allows: + +- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` + as well as `('text/html', 'application/json')`. +- Allows type shorthands such as `json`. +- Returns `false` when no types match +- Treats non-existent headers as `*` + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install accepts +``` + +## API + +```js +var accepts = require('accepts') +``` + +### accepts(req) + +Create a new `Accepts` object for the given `req`. + +#### .charset(charsets) + +Return the first accepted charset. If nothing in `charsets` is accepted, +then `false` is returned. + +#### .charsets() + +Return the charsets that the request accepts, in the order of the client's +preference (most preferred first). + +#### .encoding(encodings) + +Return the first accepted encoding. If nothing in `encodings` is accepted, +then `false` is returned. + +#### .encodings() + +Return the encodings that the request accepts, in the order of the client's +preference (most preferred first). + +#### .language(languages) + +Return the first accepted language. If nothing in `languages` is accepted, +then `false` is returned. + +#### .languages() + +Return the languages that the request accepts, in the order of the client's +preference (most preferred first). + +#### .type(types) + +Return the first accepted type (and it is returned as the same text as what +appears in the `types` array). If nothing in `types` is accepted, then `false` +is returned. + +The `types` array can contain full MIME types or file extensions. Any value +that is not a full MIME types is passed to `require('mime-types').lookup`. + +#### .types() + +Return the types that the request accepts, in the order of the client's +preference (most preferred first). + +## Examples + +### Simple type negotiation + +This simple example shows how to use `accepts` to return a different typed +respond body based on what the client wants to accept. The server lists it's +preferences in order and will get back the best match between the client and +server. + +```js +var accepts = require('accepts') +var http = require('http') + +function app (req, res) { + var accept = accepts(req) + + // the order of this list is significant; should be server preferred order + switch (accept.type(['json', 'html'])) { + case 'json': + res.setHeader('Content-Type', 'application/json') + res.write('{"hello":"world!"}') + break + case 'html': + res.setHeader('Content-Type', 'text/html') + res.write('hello, world!') + break + default: + // the fallback is text/plain, so no need to specify it above + res.setHeader('Content-Type', 'text/plain') + res.write('hello, world!') + break + } + + res.end() +} + +http.createServer(app).listen(3000) +``` + +You can test this out with the cURL program: +```sh +curl -I -H'Accept: text/html' http://localhost:3000/ +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master +[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master +[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci +[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml +[node-version-image]: https://badgen.net/npm/node/accepts +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/accepts +[npm-url]: https://npmjs.org/package/accepts +[npm-version-image]: https://badgen.net/npm/v/accepts diff --git a/node_modules/accepts/index.js b/node_modules/accepts/index.js new file mode 100644 index 00000000..e9b2f63f --- /dev/null +++ b/node_modules/accepts/index.js @@ -0,0 +1,238 @@ +/*! + * accepts + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var Negotiator = require('negotiator') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = Accepts + +/** + * Create a new Accepts object for the given req. + * + * @param {object} req + * @public + */ + +function Accepts (req) { + if (!(this instanceof Accepts)) { + return new Accepts(req) + } + + this.headers = req.headers + this.negotiator = new Negotiator(req) +} + +/** + * Check if the given `type(s)` is acceptable, returning + * the best match when true, otherwise `undefined`, in which + * case you should respond with 406 "Not Acceptable". + * + * The `type` value may be a single mime type string + * such as "application/json", the extension name + * such as "json" or an array `["json", "html", "text/plain"]`. When a list + * or array is given the _best_ match, if any is returned. + * + * Examples: + * + * // Accept: text/html + * this.types('html'); + * // => "html" + * + * // Accept: text/*, application/json + * this.types('html'); + * // => "html" + * this.types('text/html'); + * // => "text/html" + * this.types('json', 'text'); + * // => "json" + * this.types('application/json'); + * // => "application/json" + * + * // Accept: text/*, application/json + * this.types('image/png'); + * this.types('png'); + * // => undefined + * + * // Accept: text/*;q=.5, application/json + * this.types(['html', 'json']); + * this.types('html', 'json'); + * // => "json" + * + * @param {String|Array} types... + * @return {String|Array|Boolean} + * @public + */ + +Accepts.prototype.type = +Accepts.prototype.types = function (types_) { + var types = types_ + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i] + } + } + + // no types, return all requested types + if (!types || types.length === 0) { + return this.negotiator.mediaTypes() + } + + // no accept header, return first given type + if (!this.headers.accept) { + return types[0] + } + + var mimes = types.map(extToMime) + var accepts = this.negotiator.mediaTypes(mimes.filter(validMime)) + var first = accepts[0] + + return first + ? types[mimes.indexOf(first)] + : false +} + +/** + * Return accepted encodings or best fit based on `encodings`. + * + * Given `Accept-Encoding: gzip, deflate` + * an array sorted by quality is returned: + * + * ['gzip', 'deflate'] + * + * @param {String|Array} encodings... + * @return {String|Array} + * @public + */ + +Accepts.prototype.encoding = +Accepts.prototype.encodings = function (encodings_) { + var encodings = encodings_ + + // support flattened arguments + if (encodings && !Array.isArray(encodings)) { + encodings = new Array(arguments.length) + for (var i = 0; i < encodings.length; i++) { + encodings[i] = arguments[i] + } + } + + // no encodings, return all requested encodings + if (!encodings || encodings.length === 0) { + return this.negotiator.encodings() + } + + return this.negotiator.encodings(encodings)[0] || false +} + +/** + * Return accepted charsets or best fit based on `charsets`. + * + * Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5` + * an array sorted by quality is returned: + * + * ['utf-8', 'utf-7', 'iso-8859-1'] + * + * @param {String|Array} charsets... + * @return {String|Array} + * @public + */ + +Accepts.prototype.charset = +Accepts.prototype.charsets = function (charsets_) { + var charsets = charsets_ + + // support flattened arguments + if (charsets && !Array.isArray(charsets)) { + charsets = new Array(arguments.length) + for (var i = 0; i < charsets.length; i++) { + charsets[i] = arguments[i] + } + } + + // no charsets, return all requested charsets + if (!charsets || charsets.length === 0) { + return this.negotiator.charsets() + } + + return this.negotiator.charsets(charsets)[0] || false +} + +/** + * Return accepted languages or best fit based on `langs`. + * + * Given `Accept-Language: en;q=0.8, es, pt` + * an array sorted by quality is returned: + * + * ['es', 'pt', 'en'] + * + * @param {String|Array} langs... + * @return {Array|String} + * @public + */ + +Accepts.prototype.lang = +Accepts.prototype.langs = +Accepts.prototype.language = +Accepts.prototype.languages = function (languages_) { + var languages = languages_ + + // support flattened arguments + if (languages && !Array.isArray(languages)) { + languages = new Array(arguments.length) + for (var i = 0; i < languages.length; i++) { + languages[i] = arguments[i] + } + } + + // no languages, return all requested languages + if (!languages || languages.length === 0) { + return this.negotiator.languages() + } + + return this.negotiator.languages(languages)[0] || false +} + +/** + * Convert extnames to mime. + * + * @param {String} type + * @return {String} + * @private + */ + +function extToMime (type) { + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if mime is valid. + * + * @param {String} type + * @return {String} + * @private + */ + +function validMime (type) { + return typeof type === 'string' +} diff --git a/node_modules/accepts/package.json b/node_modules/accepts/package.json new file mode 100644 index 00000000..0f2d15da --- /dev/null +++ b/node_modules/accepts/package.json @@ -0,0 +1,47 @@ +{ + "name": "accepts", + "description": "Higher-level content negotiation", + "version": "1.3.8", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/accepts", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "7.32.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "4.3.1", + "eslint-plugin-standard": "4.1.0", + "mocha": "9.2.0", + "nyc": "15.1.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + }, + "keywords": [ + "content", + "negotiation", + "accept", + "accepts" + ] +} diff --git a/node_modules/ansi-styles/index.d.ts b/node_modules/ansi-styles/index.d.ts new file mode 100644 index 00000000..44a907e5 --- /dev/null +++ b/node_modules/ansi-styles/index.d.ts @@ -0,0 +1,345 @@ +declare type CSSColor = + | 'aliceblue' + | 'antiquewhite' + | 'aqua' + | 'aquamarine' + | 'azure' + | 'beige' + | 'bisque' + | 'black' + | 'blanchedalmond' + | 'blue' + | 'blueviolet' + | 'brown' + | 'burlywood' + | 'cadetblue' + | 'chartreuse' + | 'chocolate' + | 'coral' + | 'cornflowerblue' + | 'cornsilk' + | 'crimson' + | 'cyan' + | 'darkblue' + | 'darkcyan' + | 'darkgoldenrod' + | 'darkgray' + | 'darkgreen' + | 'darkgrey' + | 'darkkhaki' + | 'darkmagenta' + | 'darkolivegreen' + | 'darkorange' + | 'darkorchid' + | 'darkred' + | 'darksalmon' + | 'darkseagreen' + | 'darkslateblue' + | 'darkslategray' + | 'darkslategrey' + | 'darkturquoise' + | 'darkviolet' + | 'deeppink' + | 'deepskyblue' + | 'dimgray' + | 'dimgrey' + | 'dodgerblue' + | 'firebrick' + | 'floralwhite' + | 'forestgreen' + | 'fuchsia' + | 'gainsboro' + | 'ghostwhite' + | 'gold' + | 'goldenrod' + | 'gray' + | 'green' + | 'greenyellow' + | 'grey' + | 'honeydew' + | 'hotpink' + | 'indianred' + | 'indigo' + | 'ivory' + | 'khaki' + | 'lavender' + | 'lavenderblush' + | 'lawngreen' + | 'lemonchiffon' + | 'lightblue' + | 'lightcoral' + | 'lightcyan' + | 'lightgoldenrodyellow' + | 'lightgray' + | 'lightgreen' + | 'lightgrey' + | 'lightpink' + | 'lightsalmon' + | 'lightseagreen' + | 'lightskyblue' + | 'lightslategray' + | 'lightslategrey' + | 'lightsteelblue' + | 'lightyellow' + | 'lime' + | 'limegreen' + | 'linen' + | 'magenta' + | 'maroon' + | 'mediumaquamarine' + | 'mediumblue' + | 'mediumorchid' + | 'mediumpurple' + | 'mediumseagreen' + | 'mediumslateblue' + | 'mediumspringgreen' + | 'mediumturquoise' + | 'mediumvioletred' + | 'midnightblue' + | 'mintcream' + | 'mistyrose' + | 'moccasin' + | 'navajowhite' + | 'navy' + | 'oldlace' + | 'olive' + | 'olivedrab' + | 'orange' + | 'orangered' + | 'orchid' + | 'palegoldenrod' + | 'palegreen' + | 'paleturquoise' + | 'palevioletred' + | 'papayawhip' + | 'peachpuff' + | 'peru' + | 'pink' + | 'plum' + | 'powderblue' + | 'purple' + | 'rebeccapurple' + | 'red' + | 'rosybrown' + | 'royalblue' + | 'saddlebrown' + | 'salmon' + | 'sandybrown' + | 'seagreen' + | 'seashell' + | 'sienna' + | 'silver' + | 'skyblue' + | 'slateblue' + | 'slategray' + | 'slategrey' + | 'snow' + | 'springgreen' + | 'steelblue' + | 'tan' + | 'teal' + | 'thistle' + | 'tomato' + | 'turquoise' + | 'violet' + | 'wheat' + | 'white' + | 'whitesmoke' + | 'yellow' + | 'yellowgreen'; + +declare namespace ansiStyles { + interface ColorConvert { + /** + The RGB color space. + + @param red - (`0`-`255`) + @param green - (`0`-`255`) + @param blue - (`0`-`255`) + */ + rgb(red: number, green: number, blue: number): string; + + /** + The RGB HEX color space. + + @param hex - A hexadecimal string containing RGB data. + */ + hex(hex: string): string; + + /** + @param keyword - A CSS color name. + */ + keyword(keyword: CSSColor): string; + + /** + The HSL color space. + + @param hue - (`0`-`360`) + @param saturation - (`0`-`100`) + @param lightness - (`0`-`100`) + */ + hsl(hue: number, saturation: number, lightness: number): string; + + /** + The HSV color space. + + @param hue - (`0`-`360`) + @param saturation - (`0`-`100`) + @param value - (`0`-`100`) + */ + hsv(hue: number, saturation: number, value: number): string; + + /** + The HSV color space. + + @param hue - (`0`-`360`) + @param whiteness - (`0`-`100`) + @param blackness - (`0`-`100`) + */ + hwb(hue: number, whiteness: number, blackness: number): string; + + /** + Use a [4-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4-bit) to set text color. + */ + ansi(ansi: number): string; + + /** + Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color. + */ + ansi256(ansi: number): string; + } + + interface CSPair { + /** + The ANSI terminal control sequence for starting this style. + */ + readonly open: string; + + /** + The ANSI terminal control sequence for ending this style. + */ + readonly close: string; + } + + interface ColorBase { + readonly ansi: ColorConvert; + readonly ansi256: ColorConvert; + readonly ansi16m: ColorConvert; + + /** + The ANSI terminal control sequence for ending this color. + */ + readonly close: string; + } + + interface Modifier { + /** + Resets the current color chain. + */ + readonly reset: CSPair; + + /** + Make text bold. + */ + readonly bold: CSPair; + + /** + Emitting only a small amount of light. + */ + readonly dim: CSPair; + + /** + Make text italic. (Not widely supported) + */ + readonly italic: CSPair; + + /** + Make text underline. (Not widely supported) + */ + readonly underline: CSPair; + + /** + Inverse background and foreground colors. + */ + readonly inverse: CSPair; + + /** + Prints the text, but makes it invisible. + */ + readonly hidden: CSPair; + + /** + Puts a horizontal line through the center of the text. (Not widely supported) + */ + readonly strikethrough: CSPair; + } + + interface ForegroundColor { + readonly black: CSPair; + readonly red: CSPair; + readonly green: CSPair; + readonly yellow: CSPair; + readonly blue: CSPair; + readonly cyan: CSPair; + readonly magenta: CSPair; + readonly white: CSPair; + + /** + Alias for `blackBright`. + */ + readonly gray: CSPair; + + /** + Alias for `blackBright`. + */ + readonly grey: CSPair; + + readonly blackBright: CSPair; + readonly redBright: CSPair; + readonly greenBright: CSPair; + readonly yellowBright: CSPair; + readonly blueBright: CSPair; + readonly cyanBright: CSPair; + readonly magentaBright: CSPair; + readonly whiteBright: CSPair; + } + + interface BackgroundColor { + readonly bgBlack: CSPair; + readonly bgRed: CSPair; + readonly bgGreen: CSPair; + readonly bgYellow: CSPair; + readonly bgBlue: CSPair; + readonly bgCyan: CSPair; + readonly bgMagenta: CSPair; + readonly bgWhite: CSPair; + + /** + Alias for `bgBlackBright`. + */ + readonly bgGray: CSPair; + + /** + Alias for `bgBlackBright`. + */ + readonly bgGrey: CSPair; + + readonly bgBlackBright: CSPair; + readonly bgRedBright: CSPair; + readonly bgGreenBright: CSPair; + readonly bgYellowBright: CSPair; + readonly bgBlueBright: CSPair; + readonly bgCyanBright: CSPair; + readonly bgMagentaBright: CSPair; + readonly bgWhiteBright: CSPair; + } +} + +declare const ansiStyles: { + readonly modifier: ansiStyles.Modifier; + readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase; + readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase; + readonly codes: ReadonlyMap; +} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier; + +export = ansiStyles; diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js new file mode 100644 index 00000000..5d82581a --- /dev/null +++ b/node_modules/ansi-styles/index.js @@ -0,0 +1,163 @@ +'use strict'; + +const wrapAnsi16 = (fn, offset) => (...args) => { + const code = fn(...args); + return `\u001B[${code + offset}m`; +}; + +const wrapAnsi256 = (fn, offset) => (...args) => { + const code = fn(...args); + return `\u001B[${38 + offset};5;${code}m`; +}; + +const wrapAnsi16m = (fn, offset) => (...args) => { + const rgb = fn(...args); + return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; +}; + +const ansi2ansi = n => n; +const rgb2rgb = (r, g, b) => [r, g, b]; + +const setLazyProperty = (object, property, get) => { + Object.defineProperty(object, property, { + get: () => { + const value = get(); + + Object.defineProperty(object, property, { + value, + enumerable: true, + configurable: true + }); + + return value; + }, + enumerable: true, + configurable: true + }); +}; + +/** @type {typeof import('color-convert')} */ +let colorConvert; +const makeDynamicStyles = (wrap, targetSpace, identity, isBackground) => { + if (colorConvert === undefined) { + colorConvert = require('color-convert'); + } + + const offset = isBackground ? 10 : 0; + const styles = {}; + + for (const [sourceSpace, suite] of Object.entries(colorConvert)) { + const name = sourceSpace === 'ansi16' ? 'ansi' : sourceSpace; + if (sourceSpace === targetSpace) { + styles[name] = wrap(identity, offset); + } else if (typeof suite === 'object') { + styles[name] = wrap(suite[targetSpace], offset); + } + } + + return styles; +}; + +function assembleStyles() { + const codes = new Map(); + const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + + // Bright color + blackBright: [90, 39], + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } + }; + + // Alias bright black as gray (and grey) + styles.color.gray = styles.color.blackBright; + styles.bgColor.bgGray = styles.bgColor.bgBlackBright; + styles.color.grey = styles.color.blackBright; + styles.bgColor.bgGrey = styles.bgColor.bgBlackBright; + + for (const [groupName, group] of Object.entries(styles)) { + for (const [styleName, style] of Object.entries(group)) { + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m` + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + } + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false + }); + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + setLazyProperty(styles.color, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, false)); + setLazyProperty(styles.color, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, false)); + setLazyProperty(styles.color, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, false)); + setLazyProperty(styles.bgColor, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, true)); + setLazyProperty(styles.bgColor, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, true)); + setLazyProperty(styles.bgColor, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, true)); + + return styles; +} + +// Make the export immutable +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/ansi-styles/license b/node_modules/ansi-styles/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/ansi-styles/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json new file mode 100644 index 00000000..75393284 --- /dev/null +++ b/node_modules/ansi-styles/package.json @@ -0,0 +1,56 @@ +{ + "name": "ansi-styles", + "version": "4.3.0", + "description": "ANSI escape codes for styling strings in the terminal", + "license": "MIT", + "repository": "chalk/ansi-styles", + "funding": "https://github.com/chalk/ansi-styles?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd", + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "color-convert": "^2.0.1" + }, + "devDependencies": { + "@types/color-convert": "^1.9.0", + "ava": "^2.3.0", + "svg-term-cli": "^2.1.1", + "tsd": "^0.11.0", + "xo": "^0.25.3" + } +} diff --git a/node_modules/ansi-styles/readme.md b/node_modules/ansi-styles/readme.md new file mode 100644 index 00000000..24883de8 --- /dev/null +++ b/node_modules/ansi-styles/readme.md @@ -0,0 +1,152 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + + + +## Install + +``` +$ npm install ansi-styles +``` + +## Usage + +```js +const style = require('ansi-styles'); + +console.log(`${style.green.open}Hello world!${style.green.close}`); + + +// Color conversion between 16/256/truecolor +// NOTE: If conversion goes to 16 colors or 256 colors, the original color +// may be degraded to fit that color palette. This means terminals +// that do not support 16 million colors will best-match the +// original color. +console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); +console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); +console.log(style.color.ansi16m.hex('#abcdef') + 'Hello world!' + style.color.close); +``` + +## API + +Each style has an `open` and `close` property. + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `blackBright` (alias: `gray`, `grey`) +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` (alias: `bgGray`, `bgGrey`) +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + +## Advanced usage + +By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `style.modifier` +- `style.color` +- `style.bgColor` + +###### Example + +```js +console.log(style.color.green.open); +``` + +Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. + +###### Example + +```js +console.log(style.codes.get(36)); +//=> 39 +``` + +## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) + +`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. + +The following color spaces from `color-convert` are supported: + +- `rgb` +- `hex` +- `keyword` +- `hsl` +- `hsv` +- `hwb` +- `ansi` +- `ansi256` + +To use these, call the associated conversion function with the intended output, for example: + +```js +style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code +style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code + +style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code +style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code + +style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code +style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code +``` + +## Related + +- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + +## For enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of `ansi-styles` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-ansi-styles?utm_source=npm-ansi-styles&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/anymatch/LICENSE b/node_modules/anymatch/LICENSE new file mode 100644 index 00000000..491766ca --- /dev/null +++ b/node_modules/anymatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com) + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/anymatch/README.md b/node_modules/anymatch/README.md new file mode 100644 index 00000000..1dd67f53 --- /dev/null +++ b/node_modules/anymatch/README.md @@ -0,0 +1,87 @@ +anymatch [![Build Status](https://travis-ci.org/micromatch/anymatch.svg?branch=master)](https://travis-ci.org/micromatch/anymatch) [![Coverage Status](https://img.shields.io/coveralls/micromatch/anymatch.svg?branch=master)](https://coveralls.io/r/micromatch/anymatch?branch=master) +====== +Javascript module to match a string against a regular expression, glob, string, +or function that takes the string as an argument and returns a truthy or falsy +value. The matcher can also be an array of any or all of these. Useful for +allowing a very flexible user-defined config to define things like file paths. + +__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__ + + +Usage +----- +```sh +npm install anymatch +``` + +#### anymatch(matchers, testString, [returnIndex], [options]) +* __matchers__: (_Array|String|RegExp|Function_) +String to be directly matched, string with glob patterns, regular expression +test, function that takes the testString as an argument and returns a truthy +value if it should be matched, or an array of any number and mix of these types. +* __testString__: (_String|Array_) The string to test against the matchers. If +passed as an array, the first element of the array will be used as the +`testString` for non-function matchers, while the entire array will be applied +as the arguments for function matchers. +* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options. + * __returnIndex__: (_Boolean [optional]_) If true, return the array index of +the first matcher that that testString matched, or -1 if no match, instead of a +boolean result. + +```js +const anymatch = require('anymatch'); + +const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ; + +anymatch(matchers, 'path/to/file.js'); // true +anymatch(matchers, 'path/anyjs/baz.js'); // true +anymatch(matchers, 'path/to/foo.js'); // true +anymatch(matchers, 'path/to/bar.js'); // true +anymatch(matchers, 'bar.js'); // false + +// returnIndex = true +anymatch(matchers, 'foo.js', {returnIndex: true}); // 2 +anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1 + +// any picomatc + +// using globs to match directories and their children +anymatch('node_modules', 'node_modules'); // true +anymatch('node_modules', 'node_modules/somelib/index.js'); // false +anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true +anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false +anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true + +const matcher = anymatch(matchers); +['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ] +anymatch master* ❯ + +``` + +#### anymatch(matchers) +You can also pass in only your matcher(s) to get a curried function that has +already been bound to the provided matching criteria. This can be used as an +`Array#filter` callback. + +```js +var matcher = anymatch(matchers); + +matcher('path/to/file.js'); // true +matcher('path/anyjs/baz.js', true); // 1 + +['foo.js', 'bar.js'].filter(matcher); // ['foo.js'] +``` + +Changelog +---------- +[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases) + +- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only. +- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information). +- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch) +for glob pattern matching. Issues with glob pattern matching should be +reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues). + +License +------- +[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE) diff --git a/node_modules/anymatch/index.d.ts b/node_modules/anymatch/index.d.ts new file mode 100644 index 00000000..3ef7eaad --- /dev/null +++ b/node_modules/anymatch/index.d.ts @@ -0,0 +1,20 @@ +type AnymatchFn = (testString: string) => boolean; +type AnymatchPattern = string|RegExp|AnymatchFn; +type AnymatchMatcher = AnymatchPattern|AnymatchPattern[] +type AnymatchTester = { + (testString: string|any[], returnIndex: true): number; + (testString: string|any[]): boolean; +} + +type PicomatchOptions = {dot: boolean}; + +declare const anymatch: { + (matchers: AnymatchMatcher): AnymatchTester; + (matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester; + (matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number; + (matchers: AnymatchMatcher, testString: string|any[]): boolean; +} + +export {AnymatchMatcher as Matcher} +export {AnymatchTester as Tester} +export default anymatch diff --git a/node_modules/anymatch/index.js b/node_modules/anymatch/index.js new file mode 100644 index 00000000..8eb73e9c --- /dev/null +++ b/node_modules/anymatch/index.js @@ -0,0 +1,104 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { value: true }); + +const picomatch = require('picomatch'); +const normalizePath = require('normalize-path'); + +/** + * @typedef {(testString: string) => boolean} AnymatchFn + * @typedef {string|RegExp|AnymatchFn} AnymatchPattern + * @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher + */ +const BANG = '!'; +const DEFAULT_OPTIONS = {returnIndex: false}; +const arrify = (item) => Array.isArray(item) ? item : [item]; + +/** + * @param {AnymatchPattern} matcher + * @param {object} options + * @returns {AnymatchFn} + */ +const createPattern = (matcher, options) => { + if (typeof matcher === 'function') { + return matcher; + } + if (typeof matcher === 'string') { + const glob = picomatch(matcher, options); + return (string) => matcher === string || glob(string); + } + if (matcher instanceof RegExp) { + return (string) => matcher.test(string); + } + return (string) => false; +}; + +/** + * @param {Array} patterns + * @param {Array} negPatterns + * @param {String|Array} args + * @param {Boolean} returnIndex + * @returns {boolean|number} + */ +const matchPatterns = (patterns, negPatterns, args, returnIndex) => { + const isList = Array.isArray(args); + const _path = isList ? args[0] : args; + if (!isList && typeof _path !== 'string') { + throw new TypeError('anymatch: second argument must be a string: got ' + + Object.prototype.toString.call(_path)) + } + const path = normalizePath(_path, false); + + for (let index = 0; index < negPatterns.length; index++) { + const nglob = negPatterns[index]; + if (nglob(path)) { + return returnIndex ? -1 : false; + } + } + + const applied = isList && [path].concat(args.slice(1)); + for (let index = 0; index < patterns.length; index++) { + const pattern = patterns[index]; + if (isList ? pattern(...applied) : pattern(path)) { + return returnIndex ? index : true; + } + } + + return returnIndex ? -1 : false; +}; + +/** + * @param {AnymatchMatcher} matchers + * @param {Array|string} testString + * @param {object} options + * @returns {boolean|number|Function} + */ +const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => { + if (matchers == null) { + throw new TypeError('anymatch: specify first argument'); + } + const opts = typeof options === 'boolean' ? {returnIndex: options} : options; + const returnIndex = opts.returnIndex || false; + + // Early cache for matchers. + const mtchers = arrify(matchers); + const negatedGlobs = mtchers + .filter(item => typeof item === 'string' && item.charAt(0) === BANG) + .map(item => item.slice(1)) + .map(item => picomatch(item, opts)); + const patterns = mtchers + .filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG)) + .map(matcher => createPattern(matcher, opts)); + + if (testString == null) { + return (testString, ri = false) => { + const returnIndex = typeof ri === 'boolean' ? ri : false; + return matchPatterns(patterns, negatedGlobs, testString, returnIndex); + } + } + + return matchPatterns(patterns, negatedGlobs, testString, returnIndex); +}; + +anymatch.default = anymatch; +module.exports = anymatch; diff --git a/node_modules/anymatch/package.json b/node_modules/anymatch/package.json new file mode 100644 index 00000000..2cb2307e --- /dev/null +++ b/node_modules/anymatch/package.json @@ -0,0 +1,48 @@ +{ + "name": "anymatch", + "version": "3.1.3", + "description": "Matches strings against configurable strings, globs, regular expressions, and/or functions", + "files": [ + "index.js", + "index.d.ts" + ], + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "author": { + "name": "Elan Shanker", + "url": "https://github.com/es128" + }, + "license": "ISC", + "homepage": "https://github.com/micromatch/anymatch", + "repository": { + "type": "git", + "url": "https://github.com/micromatch/anymatch" + }, + "keywords": [ + "match", + "any", + "string", + "file", + "fs", + "list", + "glob", + "regex", + "regexp", + "regular", + "expression", + "function" + ], + "scripts": { + "test": "nyc mocha", + "mocha": "mocha" + }, + "devDependencies": { + "mocha": "^6.1.3", + "nyc": "^14.0.0" + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/arch/LICENSE b/node_modules/arch/LICENSE new file mode 100644 index 00000000..c7e68527 --- /dev/null +++ b/node_modules/arch/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/arch/README.md b/node_modules/arch/README.md new file mode 100644 index 00000000..8ab14a62 --- /dev/null +++ b/node_modules/arch/README.md @@ -0,0 +1,71 @@ +# arch [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/arch/master.svg +[travis-url]: https://travis-ci.org/feross/arch +[npm-image]: https://img.shields.io/npm/v/arch.svg +[npm-url]: https://npmjs.org/package/arch +[downloads-image]: https://img.shields.io/npm/dm/arch.svg +[downloads-url]: https://npmjs.org/package/arch +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### Better `os.arch()` for node and the browser -- detect OS architecture + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/arch2.svg)](https://saucelabs.com/u/arch2) + +This module is used by [WebTorrent Desktop](http://webtorrent.io/desktop) to +determine if the user is on a 32-bit vs. 64-bit operating system to offer the +right app installer. + +In Node.js, the `os.arch()` method (and `process.arch` property) returns a string +identifying the operating system CPU architecture **for which the Node.js binary +was compiled**. + +This is not the same as the **operating system CPU architecture**. For example, +you can run Node.js 32-bit on a 64-bit OS. In that situation, `os.arch()` will +return a misleading 'x86' (32-bit) value, instead of 'x64' (64-bit). + +Use this package to get the actual operating system CPU architecture. + +**BONUS: This package works in the browser too.** + +## install + +``` +npm install arch +``` + +## usage + +```js +var arch = require('arch') +console.log(arch()) // always returns 'x64' or 'x86' +``` + +In the browser, there is no spec that defines where this information lives, so we +check all known locations including `navigator.userAgent`, `navigator.platform`, +and `navigator.cpuClass` to make a best guess. + +If there is no *affirmative indication* that the architecture is 64-bit, then +32-bit will be assumed. This makes this package perfect for determining what +installer executable to offer to desktop app users. If there is ambiguity, then +the user will get the 32-bit installer, which will work fine even for a user with +a 64-bit OS. + +For reference, `x64` means 64-bit and `x86` means 32-bit. + +Here is some history behind these naming conventions: + +- https://en.wikipedia.org/wiki/X86 +- https://en.wikipedia.org/wiki/IA-32 +- https://en.wikipedia.org/wiki/X86-64 + +## Node.js proposal - `os.sysarch()` + +Note: There is +[a proposal](https://github.com/nodejs/node-v0.x-archive/issues/2862#issuecomment-103942051) +to add this functionality to Node.js as `os.sysarch()`. + +## license + +MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/arch/browser.js b/node_modules/arch/browser.js new file mode 100644 index 00000000..eca04d05 --- /dev/null +++ b/node_modules/arch/browser.js @@ -0,0 +1,43 @@ +module.exports = function arch () { + /** + * User agent strings that indicate a 64-bit OS. + * See: http://stackoverflow.com/a/13709431/292185 + */ + var userAgent = navigator.userAgent + if ([ + 'x86_64', + 'x86-64', + 'Win64', + 'x64;', + 'amd64', + 'AMD64', + 'WOW64', + 'x64_64' + ].some(function (str) { + return userAgent.indexOf(str) > -1 + })) { + return 'x64' + } + + /** + * Platform strings that indicate a 64-bit OS. + * See: http://stackoverflow.com/a/19883965/292185 + */ + var platform = navigator.platform + if (platform === 'MacIntel' || platform === 'Linux x86_64') { + return 'x64' + } + + /** + * CPU class strings that indicate a 64-bit OS. + * See: http://stackoverflow.com/a/6267019/292185 + */ + if (navigator.cpuClass === 'x64') { + return 'x64' + } + + /** + * If none of the above, assume the architecture is 32-bit. + */ + return 'x86' +} diff --git a/node_modules/arch/index.d.ts b/node_modules/arch/index.d.ts new file mode 100644 index 00000000..acbce662 --- /dev/null +++ b/node_modules/arch/index.d.ts @@ -0,0 +1,4 @@ + +declare function arch(): 'x64' | 'x86'; + +export = arch; diff --git a/node_modules/arch/index.js b/node_modules/arch/index.js new file mode 100644 index 00000000..7d90b8ad --- /dev/null +++ b/node_modules/arch/index.js @@ -0,0 +1,60 @@ +/*! arch. MIT License. Feross Aboukhadijeh */ +var cp = require('child_process') +var fs = require('fs') +var path = require('path') + +/** + * Returns the operating system's CPU architecture. This is different than + * `process.arch` or `os.arch()` which returns the architecture the Node.js (or + * Electron) binary was compiled for. + */ +module.exports = function arch () { + /** + * The running binary is 64-bit, so the OS is clearly 64-bit. + */ + if (process.arch === 'x64') { + return 'x64' + } + + /** + * All recent versions of Mac OS are 64-bit. + */ + if (process.platform === 'darwin') { + return 'x64' + } + + /** + * On Windows, the most reliable way to detect a 64-bit OS from within a 32-bit + * app is based on the presence of a WOW64 file: %SystemRoot%\SysNative. + * See: https://twitter.com/feross/status/776949077208510464 + */ + if (process.platform === 'win32') { + var useEnv = false + try { + useEnv = !!(process.env.SYSTEMROOT && fs.statSync(process.env.SYSTEMROOT)) + } catch (err) {} + + var sysRoot = useEnv ? process.env.SYSTEMROOT : 'C:\\Windows' + + // If %SystemRoot%\SysNative exists, we are in a WOW64 FS Redirected application. + var isWOW64 = false + try { + isWOW64 = !!fs.statSync(path.join(sysRoot, 'sysnative')) + } catch (err) {} + + return isWOW64 ? 'x64' : 'x86' + } + + /** + * On Linux, use the `getconf` command to get the architecture. + */ + if (process.platform === 'linux') { + var output = cp.execSync('getconf LONG_BIT', { encoding: 'utf8' }) + return output === '64\n' ? 'x64' : 'x86' + } + + /** + * If none of the above, assume the architecture is 32-bit. + */ + return 'x86' +} diff --git a/node_modules/arch/package.json b/node_modules/arch/package.json new file mode 100644 index 00000000..70a0446e --- /dev/null +++ b/node_modules/arch/package.json @@ -0,0 +1,60 @@ +{ + "name": "arch", + "description": "Better `os.arch()` for node and the browser -- detect OS architecture", + "version": "2.2.0", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "browser": "browser.js", + "types": "./index.d.ts", + "bugs": { + "url": "https://github.com/feross/arch/issues" + }, + "devDependencies": { + "airtap": "^3.0.0", + "standard": "*", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/feross/arch", + "keywords": [ + "browser", + "browserify", + "arch", + "cpu info", + "cpus", + "architecture", + "navigator.platform", + "x64", + "x86", + "64 bit", + "32 bit" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/arch.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "airtap -- test/*.js", + "test-browser-local": "airtap --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/archive-type/index.js b/node_modules/archive-type/index.js new file mode 100644 index 00000000..c8dcd5b3 --- /dev/null +++ b/node_modules/archive-type/index.js @@ -0,0 +1,18 @@ +'use strict'; +const fileType = require('file-type'); + +const exts = new Set([ + '7z', + 'bz2', + 'gz', + 'rar', + 'tar', + 'zip', + 'xz', + 'gz' +]); + +module.exports = input => { + const ret = fileType(input); + return exts.has(ret && ret.ext) ? ret : null; +}; diff --git a/node_modules/archive-type/license b/node_modules/archive-type/license new file mode 100644 index 00000000..a8ecbbe9 --- /dev/null +++ b/node_modules/archive-type/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Kevin Mårtensson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/archive-type/node_modules/file-type/index.js b/node_modules/archive-type/node_modules/file-type/index.js new file mode 100644 index 00000000..66f2c445 --- /dev/null +++ b/node_modules/archive-type/node_modules/file-type/index.js @@ -0,0 +1,545 @@ +'use strict'; + +module.exports = input => { + const buf = new Uint8Array(input); + + if (!(buf && buf.length > 1)) { + return null; + } + + const check = (header, opts) => { + opts = Object.assign({ + offset: 0 + }, opts); + + for (let i = 0; i < header.length; i++) { + if (header[i] !== buf[i + opts.offset]) { + return false; + } + } + + return true; + }; + + if (check([0xFF, 0xD8, 0xFF])) { + return { + ext: 'jpg', + mime: 'image/jpeg' + }; + } + + if (check([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A])) { + return { + ext: 'png', + mime: 'image/png' + }; + } + + if (check([0x47, 0x49, 0x46])) { + return { + ext: 'gif', + mime: 'image/gif' + }; + } + + if (check([0x57, 0x45, 0x42, 0x50], {offset: 8})) { + return { + ext: 'webp', + mime: 'image/webp' + }; + } + + if (check([0x46, 0x4C, 0x49, 0x46])) { + return { + ext: 'flif', + mime: 'image/flif' + }; + } + + // Needs to be before `tif` check + if ( + (check([0x49, 0x49, 0x2A, 0x0]) || check([0x4D, 0x4D, 0x0, 0x2A])) && + check([0x43, 0x52], {offset: 8}) + ) { + return { + ext: 'cr2', + mime: 'image/x-canon-cr2' + }; + } + + if ( + check([0x49, 0x49, 0x2A, 0x0]) || + check([0x4D, 0x4D, 0x0, 0x2A]) + ) { + return { + ext: 'tif', + mime: 'image/tiff' + }; + } + + if (check([0x42, 0x4D])) { + return { + ext: 'bmp', + mime: 'image/bmp' + }; + } + + if (check([0x49, 0x49, 0xBC])) { + return { + ext: 'jxr', + mime: 'image/vnd.ms-photo' + }; + } + + if (check([0x38, 0x42, 0x50, 0x53])) { + return { + ext: 'psd', + mime: 'image/vnd.adobe.photoshop' + }; + } + + // Needs to be before the `zip` check + if ( + check([0x50, 0x4B, 0x3, 0x4]) && + check([0x6D, 0x69, 0x6D, 0x65, 0x74, 0x79, 0x70, 0x65, 0x61, 0x70, 0x70, 0x6C, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6F, 0x6E, 0x2F, 0x65, 0x70, 0x75, 0x62, 0x2B, 0x7A, 0x69, 0x70], {offset: 30}) + ) { + return { + ext: 'epub', + mime: 'application/epub+zip' + }; + } + + // Needs to be before `zip` check + // Assumes signed `.xpi` from addons.mozilla.org + if ( + check([0x50, 0x4B, 0x3, 0x4]) && + check([0x4D, 0x45, 0x54, 0x41, 0x2D, 0x49, 0x4E, 0x46, 0x2F, 0x6D, 0x6F, 0x7A, 0x69, 0x6C, 0x6C, 0x61, 0x2E, 0x72, 0x73, 0x61], {offset: 30}) + ) { + return { + ext: 'xpi', + mime: 'application/x-xpinstall' + }; + } + + if ( + check([0x50, 0x4B]) && + (buf[2] === 0x3 || buf[2] === 0x5 || buf[2] === 0x7) && + (buf[3] === 0x4 || buf[3] === 0x6 || buf[3] === 0x8) + ) { + return { + ext: 'zip', + mime: 'application/zip' + }; + } + + if (check([0x75, 0x73, 0x74, 0x61, 0x72], {offset: 257})) { + return { + ext: 'tar', + mime: 'application/x-tar' + }; + } + + if ( + check([0x52, 0x61, 0x72, 0x21, 0x1A, 0x7]) && + (buf[6] === 0x0 || buf[6] === 0x1) + ) { + return { + ext: 'rar', + mime: 'application/x-rar-compressed' + }; + } + + if (check([0x1F, 0x8B, 0x8])) { + return { + ext: 'gz', + mime: 'application/gzip' + }; + } + + if (check([0x42, 0x5A, 0x68])) { + return { + ext: 'bz2', + mime: 'application/x-bzip2' + }; + } + + if (check([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])) { + return { + ext: '7z', + mime: 'application/x-7z-compressed' + }; + } + + if (check([0x78, 0x01])) { + return { + ext: 'dmg', + mime: 'application/x-apple-diskimage' + }; + } + + if ( + ( + check([0x0, 0x0, 0x0]) && + (buf[3] === 0x18 || buf[3] === 0x20) && + check([0x66, 0x74, 0x79, 0x70], {offset: 4}) + ) || + check([0x33, 0x67, 0x70, 0x35]) || + ( + check([0x0, 0x0, 0x0, 0x1C, 0x66, 0x74, 0x79, 0x70, 0x6D, 0x70, 0x34, 0x32]) && + check([0x6D, 0x70, 0x34, 0x31, 0x6D, 0x70, 0x34, 0x32, 0x69, 0x73, 0x6F, 0x6D], {offset: 16}) + ) || + check([0x0, 0x0, 0x0, 0x1C, 0x66, 0x74, 0x79, 0x70, 0x69, 0x73, 0x6F, 0x6D]) || + check([0x0, 0x0, 0x0, 0x1C, 0x66, 0x74, 0x79, 0x70, 0x6D, 0x70, 0x34, 0x32, 0x0, 0x0, 0x0, 0x0]) + ) { + return { + ext: 'mp4', + mime: 'video/mp4' + }; + } + + if (check([0x0, 0x0, 0x0, 0x1C, 0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x56])) { + return { + ext: 'm4v', + mime: 'video/x-m4v' + }; + } + + if (check([0x4D, 0x54, 0x68, 0x64])) { + return { + ext: 'mid', + mime: 'audio/midi' + }; + } + + // https://github.com/threatstack/libmagic/blob/master/magic/Magdir/matroska + if (check([0x1A, 0x45, 0xDF, 0xA3])) { + const sliced = buf.subarray(4, 4 + 4096); + const idPos = sliced.findIndex((el, i, arr) => arr[i] === 0x42 && arr[i + 1] === 0x82); + + if (idPos >= 0) { + const docTypePos = idPos + 3; + const findDocType = type => Array.from(type).every((c, i) => sliced[docTypePos + i] === c.charCodeAt(0)); + + if (findDocType('matroska')) { + return { + ext: 'mkv', + mime: 'video/x-matroska' + }; + } + + if (findDocType('webm')) { + return { + ext: 'webm', + mime: 'video/webm' + }; + } + } + } + + if (check([0x0, 0x0, 0x0, 0x14, 0x66, 0x74, 0x79, 0x70, 0x71, 0x74, 0x20, 0x20]) || + check([0x66, 0x72, 0x65, 0x65], {offset: 4}) || + check([0x66, 0x74, 0x79, 0x70, 0x71, 0x74, 0x20, 0x20], {offset: 4}) || + check([0x6D, 0x64, 0x61, 0x74], {offset: 4}) || // MJPEG + check([0x77, 0x69, 0x64, 0x65], {offset: 4})) { + return { + ext: 'mov', + mime: 'video/quicktime' + }; + } + + if ( + check([0x52, 0x49, 0x46, 0x46]) && + check([0x41, 0x56, 0x49], {offset: 8}) + ) { + return { + ext: 'avi', + mime: 'video/x-msvideo' + }; + } + + if (check([0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9])) { + return { + ext: 'wmv', + mime: 'video/x-ms-wmv' + }; + } + + if (check([0x0, 0x0, 0x1, 0xBA])) { + return { + ext: 'mpg', + mime: 'video/mpeg' + }; + } + + if ( + check([0x49, 0x44, 0x33]) || + check([0xFF, 0xFB]) + ) { + return { + ext: 'mp3', + mime: 'audio/mpeg' + }; + } + + if ( + check([0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x41], {offset: 4}) || + check([0x4D, 0x34, 0x41, 0x20]) + ) { + return { + ext: 'm4a', + mime: 'audio/m4a' + }; + } + + // Needs to be before `ogg` check + if (check([0x4F, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64], {offset: 28})) { + return { + ext: 'opus', + mime: 'audio/opus' + }; + } + + if (check([0x4F, 0x67, 0x67, 0x53])) { + return { + ext: 'ogg', + mime: 'audio/ogg' + }; + } + + if (check([0x66, 0x4C, 0x61, 0x43])) { + return { + ext: 'flac', + mime: 'audio/x-flac' + }; + } + + if ( + check([0x52, 0x49, 0x46, 0x46]) && + check([0x57, 0x41, 0x56, 0x45], {offset: 8}) + ) { + return { + ext: 'wav', + mime: 'audio/x-wav' + }; + } + + if (check([0x23, 0x21, 0x41, 0x4D, 0x52, 0x0A])) { + return { + ext: 'amr', + mime: 'audio/amr' + }; + } + + if (check([0x25, 0x50, 0x44, 0x46])) { + return { + ext: 'pdf', + mime: 'application/pdf' + }; + } + + if (check([0x4D, 0x5A])) { + return { + ext: 'exe', + mime: 'application/x-msdownload' + }; + } + + if ( + (buf[0] === 0x43 || buf[0] === 0x46) && + check([0x57, 0x53], {offset: 1}) + ) { + return { + ext: 'swf', + mime: 'application/x-shockwave-flash' + }; + } + + if (check([0x7B, 0x5C, 0x72, 0x74, 0x66])) { + return { + ext: 'rtf', + mime: 'application/rtf' + }; + } + + if (check([0x00, 0x61, 0x73, 0x6D])) { + return { + ext: 'wasm', + mime: 'application/wasm' + }; + } + + if ( + check([0x77, 0x4F, 0x46, 0x46]) && + ( + check([0x00, 0x01, 0x00, 0x00], {offset: 4}) || + check([0x4F, 0x54, 0x54, 0x4F], {offset: 4}) + ) + ) { + return { + ext: 'woff', + mime: 'application/font-woff' + }; + } + + if ( + check([0x77, 0x4F, 0x46, 0x32]) && + ( + check([0x00, 0x01, 0x00, 0x00], {offset: 4}) || + check([0x4F, 0x54, 0x54, 0x4F], {offset: 4}) + ) + ) { + return { + ext: 'woff2', + mime: 'application/font-woff' + }; + } + + if ( + check([0x4C, 0x50], {offset: 34}) && + ( + check([0x00, 0x00, 0x01], {offset: 8}) || + check([0x01, 0x00, 0x02], {offset: 8}) || + check([0x02, 0x00, 0x02], {offset: 8}) + ) + ) { + return { + ext: 'eot', + mime: 'application/octet-stream' + }; + } + + if (check([0x00, 0x01, 0x00, 0x00, 0x00])) { + return { + ext: 'ttf', + mime: 'application/font-sfnt' + }; + } + + if (check([0x4F, 0x54, 0x54, 0x4F, 0x00])) { + return { + ext: 'otf', + mime: 'application/font-sfnt' + }; + } + + if (check([0x00, 0x00, 0x01, 0x00])) { + return { + ext: 'ico', + mime: 'image/x-icon' + }; + } + + if (check([0x46, 0x4C, 0x56, 0x01])) { + return { + ext: 'flv', + mime: 'video/x-flv' + }; + } + + if (check([0x25, 0x21])) { + return { + ext: 'ps', + mime: 'application/postscript' + }; + } + + if (check([0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])) { + return { + ext: 'xz', + mime: 'application/x-xz' + }; + } + + if (check([0x53, 0x51, 0x4C, 0x69])) { + return { + ext: 'sqlite', + mime: 'application/x-sqlite3' + }; + } + + if (check([0x4E, 0x45, 0x53, 0x1A])) { + return { + ext: 'nes', + mime: 'application/x-nintendo-nes-rom' + }; + } + + if (check([0x43, 0x72, 0x32, 0x34])) { + return { + ext: 'crx', + mime: 'application/x-google-chrome-extension' + }; + } + + if ( + check([0x4D, 0x53, 0x43, 0x46]) || + check([0x49, 0x53, 0x63, 0x28]) + ) { + return { + ext: 'cab', + mime: 'application/vnd.ms-cab-compressed' + }; + } + + // Needs to be before `ar` check + if (check([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A, 0x64, 0x65, 0x62, 0x69, 0x61, 0x6E, 0x2D, 0x62, 0x69, 0x6E, 0x61, 0x72, 0x79])) { + return { + ext: 'deb', + mime: 'application/x-deb' + }; + } + + if (check([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E])) { + return { + ext: 'ar', + mime: 'application/x-unix-archive' + }; + } + + if (check([0xED, 0xAB, 0xEE, 0xDB])) { + return { + ext: 'rpm', + mime: 'application/x-rpm' + }; + } + + if ( + check([0x1F, 0xA0]) || + check([0x1F, 0x9D]) + ) { + return { + ext: 'Z', + mime: 'application/x-compress' + }; + } + + if (check([0x4C, 0x5A, 0x49, 0x50])) { + return { + ext: 'lz', + mime: 'application/x-lzip' + }; + } + + if (check([0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1])) { + return { + ext: 'msi', + mime: 'application/x-msi' + }; + } + + if (check([0x06, 0x0E, 0x2B, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0D, 0x01, 0x02, 0x01, 0x01, 0x02])) { + return { + ext: 'mxf', + mime: 'application/mxf' + }; + } + + if (check([0x42, 0x4C, 0x45, 0x4E, 0x44, 0x45, 0x52])) { + return { + ext: 'blend', + mime: 'application/x-blender' + }; + } + + return null; +}; diff --git a/node_modules/archive-type/node_modules/file-type/license b/node_modules/archive-type/node_modules/file-type/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/archive-type/node_modules/file-type/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/archive-type/node_modules/file-type/package.json b/node_modules/archive-type/node_modules/file-type/package.json new file mode 100644 index 00000000..9175dbc3 --- /dev/null +++ b/node_modules/archive-type/node_modules/file-type/package.json @@ -0,0 +1,107 @@ +{ + "name": "file-type", + "version": "4.4.0", + "description": "Detect the file type of a Buffer/Uint8Array", + "license": "MIT", + "repository": "sindresorhus/file-type", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "mime", + "file", + "type", + "archive", + "image", + "img", + "pic", + "picture", + "flash", + "photo", + "video", + "type", + "detect", + "check", + "is", + "exif", + "exe", + "binary", + "buffer", + "uint8array", + "jpg", + "png", + "gif", + "webp", + "flif", + "cr2", + "tif", + "bmp", + "jxr", + "psd", + "zip", + "tar", + "rar", + "gz", + "bz2", + "7z", + "dmg", + "mp4", + "m4v", + "mid", + "mkv", + "webm", + "mov", + "avi", + "mpg", + "mp3", + "m4a", + "ogg", + "opus", + "flac", + "wav", + "amr", + "pdf", + "epub", + "exe", + "swf", + "rtf", + "woff", + "woff2", + "eot", + "ttf", + "otf", + "ico", + "flv", + "ps", + "xz", + "sqlite", + "xpi", + "cab", + "deb", + "ar", + "rpm", + "Z", + "lz", + "msi", + "mxf", + "wasm", + "webassembly", + "blend" + ], + "devDependencies": { + "ava": "*", + "read-chunk": "^2.0.0", + "xo": "*" + } +} diff --git a/node_modules/archive-type/node_modules/file-type/readme.md b/node_modules/archive-type/node_modules/file-type/readme.md new file mode 100644 index 00000000..d09f1de4 --- /dev/null +++ b/node_modules/archive-type/node_modules/file-type/readme.md @@ -0,0 +1,154 @@ +# file-type [![Build Status](https://travis-ci.org/sindresorhus/file-type.svg?branch=master)](https://travis-ci.org/sindresorhus/file-type) + +> Detect the file type of a Buffer/Uint8Array + +The file type is detected by checking the [magic number](http://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer. + + +## Install + +``` +$ npm install --save file-type +``` + + +## Usage + +##### Node.js + +```js +const readChunk = require('read-chunk'); +const fileType = require('file-type'); +const buffer = readChunk.sync('unicorn.png', 0, 4100); + +fileType(buffer); +//=> {ext: 'png', mime: 'image/png'} +``` + +Or from a remote location: + +```js +const http = require('http'); +const fileType = require('file-type'); +const url = 'http://assets-cdn.github.com/images/spinners/octocat-spinner-32.gif'; + +http.get(url, res => { + res.once('data', chunk => { + res.destroy(); + console.log(fileType(chunk)); + //=> {ext: 'gif', mime: 'image/gif'} + }); +}); +``` + +##### Browser + +```js +const xhr = new XMLHttpRequest(); +xhr.open('GET', 'unicorn.png'); +xhr.responseType = 'arraybuffer'; + +xhr.onload = () => { + fileType(new Uint8Array(this.response)); + //=> {ext: 'png', mime: 'image/png'} +}; + +xhr.send(); +``` + + +## API + +### fileType(input) + +Returns an `Object` with: + +- `ext` - One of the [supported file types](#supported-file-types) +- `mime` - The [MIME type](http://en.wikipedia.org/wiki/Internet_media_type) + +Or `null` when no match. + +#### input + +Type: `Buffer` `Uint8Array` + +It only needs the first 4100 bytes. + + +## Supported file types + +- [`jpg`](https://en.wikipedia.org/wiki/JPEG) +- [`png`](https://en.wikipedia.org/wiki/Portable_Network_Graphics) +- [`gif`](https://en.wikipedia.org/wiki/GIF) +- [`webp`](https://en.wikipedia.org/wiki/WebP) +- [`flif`](https://en.wikipedia.org/wiki/Free_Lossless_Image_Format) +- [`cr2`](http://fileinfo.com/extension/cr2) +- [`tif`](https://en.wikipedia.org/wiki/Tagged_Image_File_Format) +- [`bmp`](https://en.wikipedia.org/wiki/BMP_file_format) +- [`jxr`](https://en.wikipedia.org/wiki/JPEG_XR) +- [`psd`](https://en.wikipedia.org/wiki/Adobe_Photoshop#File_format) +- [`zip`](https://en.wikipedia.org/wiki/Zip_(file_format)) +- [`tar`](https://en.wikipedia.org/wiki/Tar_(computing)#File_format) +- [`rar`](https://en.wikipedia.org/wiki/RAR_(file_format)) +- [`gz`](https://en.wikipedia.org/wiki/Gzip) +- [`bz2`](https://en.wikipedia.org/wiki/Bzip2) +- [`7z`](https://en.wikipedia.org/wiki/7z) +- [`dmg`](https://en.wikipedia.org/wiki/Apple_Disk_Image) +- [`mp4`](https://en.wikipedia.org/wiki/MPEG-4_Part_14#Filename_extensions) +- [`m4v`](https://en.wikipedia.org/wiki/M4V) +- [`mid`](https://en.wikipedia.org/wiki/MIDI) +- [`mkv`](https://en.wikipedia.org/wiki/Matroska) +- [`webm`](https://en.wikipedia.org/wiki/WebM) +- [`mov`](https://en.wikipedia.org/wiki/QuickTime_File_Format) +- [`avi`](https://en.wikipedia.org/wiki/Audio_Video_Interleave) +- [`wmv`](https://en.wikipedia.org/wiki/Windows_Media_Video) +- [`mpg`](https://en.wikipedia.org/wiki/MPEG-1) +- [`mp3`](https://en.wikipedia.org/wiki/MP3) +- [`m4a`](https://en.wikipedia.org/wiki/MPEG-4_Part_14#.MP4_versus_.M4A) +- [`ogg`](https://en.wikipedia.org/wiki/Ogg) +- [`opus`](https://en.wikipedia.org/wiki/Opus_(audio_format)) +- [`flac`](https://en.wikipedia.org/wiki/FLAC) +- [`wav`](https://en.wikipedia.org/wiki/WAV) +- [`amr`](https://en.wikipedia.org/wiki/Adaptive_Multi-Rate_audio_codec) +- [`pdf`](https://en.wikipedia.org/wiki/Portable_Document_Format) +- [`epub`](https://en.wikipedia.org/wiki/EPUB) +- [`exe`](https://en.wikipedia.org/wiki/.exe) +- [`swf`](https://en.wikipedia.org/wiki/SWF) +- [`rtf`](https://en.wikipedia.org/wiki/Rich_Text_Format) +- [`woff`](https://en.wikipedia.org/wiki/Web_Open_Font_Format) +- [`woff2`](https://en.wikipedia.org/wiki/Web_Open_Font_Format) +- [`eot`](https://en.wikipedia.org/wiki/Embedded_OpenType) +- [`ttf`](https://en.wikipedia.org/wiki/TrueType) +- [`otf`](https://en.wikipedia.org/wiki/OpenType) +- [`ico`](https://en.wikipedia.org/wiki/ICO_(file_format)) +- [`flv`](https://en.wikipedia.org/wiki/Flash_Video) +- [`ps`](https://en.wikipedia.org/wiki/Postscript) +- [`xz`](https://en.wikipedia.org/wiki/Xz) +- [`sqlite`](https://www.sqlite.org/fileformat2.html) +- [`nes`](http://fileinfo.com/extension/nes) +- [`crx`](https://developer.chrome.com/extensions/crx) +- [`xpi`](https://en.wikipedia.org/wiki/XPInstall) +- [`cab`](https://en.wikipedia.org/wiki/Cabinet_(file_format)) +- [`deb`](https://en.wikipedia.org/wiki/Deb_(file_format)) +- [`ar`](https://en.wikipedia.org/wiki/Ar_(Unix)) +- [`rpm`](http://fileinfo.com/extension/rpm) +- [`Z`](http://fileinfo.com/extension/z) +- [`lz`](https://en.wikipedia.org/wiki/Lzip) +- [`msi`](https://en.wikipedia.org/wiki/Windows_Installer) +- [`mxf`](https://en.wikipedia.org/wiki/Material_Exchange_Format) +- [`wasm`](https://en.wikipedia.org/wiki/WebAssembly) +- [`blend`](https://wiki.blender.org/index.php/Dev:Source/Architecture/File_Format) + +*SVG isn't included as it requires the whole file to be read, but you can get it [here](https://github.com/sindresorhus/is-svg).* + +*Pull request welcome for additional commonly used file types.* + + +## Related + +- [file-type-cli](https://github.com/sindresorhus/file-type-cli) - CLI for this module + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/archive-type/package.json b/node_modules/archive-type/package.json new file mode 100644 index 00000000..b57b03a2 --- /dev/null +++ b/node_modules/archive-type/package.json @@ -0,0 +1,45 @@ +{ + "name": "archive-type", + "version": "4.0.0", + "description": "Detect the archive type of a Buffer/Uint8Array", + "license": "MIT", + "repository": "kevva/archive-type", + "author": { + "name": "Kevin Mårtensson", + "email": "kevinmartensson@gmail.com", + "url": "https://github.com/kevva" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "7zip", + "archive", + "buffer", + "bz2", + "bzip2", + "check", + "detect", + "gz", + "gzip", + "mime", + "rar", + "zip", + "file", + "type" + ], + "dependencies": { + "file-type": "^4.2.0" + }, + "devDependencies": { + "ava": "*", + "pify": "^2.3.0", + "xo": "*" + } +} diff --git a/node_modules/archive-type/readme.md b/node_modules/archive-type/readme.md new file mode 100644 index 00000000..75495115 --- /dev/null +++ b/node_modules/archive-type/readme.md @@ -0,0 +1,62 @@ +# archive-type [![Build Status](https://travis-ci.org/kevva/archive-type.svg?branch=master)](https://travis-ci.org/kevva/archive-type) + +> Detect the archive type of a Buffer/Uint8Array + + +## Install + +``` +$ npm install --save archive-type +``` + + +## Usage + +```js +const archiveType = require('archive-type'); +const readChunk = require('read-chunk'); +const buffer = readChunk.sync('unicorn.zip', 0, 262); + +archiveType(buffer); +//=> {ext: 'zip', mime: 'application/zip'} +``` + + +## API + +### archiveType(input) + +Returns an `Object` with: + +- `ext` - One of the [supported file types](#supported-file-types) +- `mime` - The [MIME type](http://en.wikipedia.org/wiki/Internet_media_type) + +Or `null` when no match. + +#### input + +Type: `Buffer` `Uint8Array` + +It only needs the first 262 bytes. + + +## Supported file types + +- `7z` +- `bz2` +- `gz` +- `rar` +- `tar` +- `zip` +- `xz` +- `gz` + + +## Related + +- [archive-type-cli](https://github.com/kevva/archive-type-cli) - CLI for this module + + +## License + +MIT © [Kevin Mårtensson](https://github.com/kevva) diff --git a/node_modules/array-flatten/LICENSE b/node_modules/array-flatten/LICENSE new file mode 100644 index 00000000..983fbe8a --- /dev/null +++ b/node_modules/array-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/array-flatten/README.md b/node_modules/array-flatten/README.md new file mode 100644 index 00000000..91fa5b63 --- /dev/null +++ b/node_modules/array-flatten/README.md @@ -0,0 +1,43 @@ +# Array Flatten + +[![NPM version][npm-image]][npm-url] +[![NPM downloads][downloads-image]][downloads-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +> Flatten an array of nested arrays into a single flat array. Accepts an optional depth. + +## Installation + +``` +npm install array-flatten --save +``` + +## Usage + +```javascript +var flatten = require('array-flatten') + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9]) +//=> [1, 2, 3, 4, 5, 6, 7, 8, 9] + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2) +//=> [1, 2, 3, [4, [5], 6], 7, 8, 9] + +(function () { + flatten(arguments) //=> [1, 2, 3] +})(1, [2, 3]) +``` + +## License + +MIT + +[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat +[npm-url]: https://npmjs.org/package/array-flatten +[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat +[downloads-url]: https://npmjs.org/package/array-flatten +[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat +[travis-url]: https://travis-ci.org/blakeembrey/array-flatten +[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat +[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master diff --git a/node_modules/array-flatten/array-flatten.js b/node_modules/array-flatten/array-flatten.js new file mode 100644 index 00000000..089117b3 --- /dev/null +++ b/node_modules/array-flatten/array-flatten.js @@ -0,0 +1,64 @@ +'use strict' + +/** + * Expose `arrayFlatten`. + */ +module.exports = arrayFlatten + +/** + * Recursive flatten function with depth. + * + * @param {Array} array + * @param {Array} result + * @param {Number} depth + * @return {Array} + */ +function flattenWithDepth (array, result, depth) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (depth > 0 && Array.isArray(value)) { + flattenWithDepth(value, result, depth - 1) + } else { + result.push(value) + } + } + + return result +} + +/** + * Recursive flatten function. Omitting depth is slightly faster. + * + * @param {Array} array + * @param {Array} result + * @return {Array} + */ +function flattenForever (array, result) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (Array.isArray(value)) { + flattenForever(value, result) + } else { + result.push(value) + } + } + + return result +} + +/** + * Flatten an array, with the ability to define a depth. + * + * @param {Array} array + * @param {Number} depth + * @return {Array} + */ +function arrayFlatten (array, depth) { + if (depth == null) { + return flattenForever(array, []) + } + + return flattenWithDepth(array, [], depth) +} diff --git a/node_modules/array-flatten/package.json b/node_modules/array-flatten/package.json new file mode 100644 index 00000000..1a24e2a1 --- /dev/null +++ b/node_modules/array-flatten/package.json @@ -0,0 +1,39 @@ +{ + "name": "array-flatten", + "version": "1.1.1", + "description": "Flatten an array of nested arrays into a single flat array", + "main": "array-flatten.js", + "files": [ + "array-flatten.js", + "LICENSE" + ], + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "repository": { + "type": "git", + "url": "git://github.com/blakeembrey/array-flatten.git" + }, + "keywords": [ + "array", + "flatten", + "arguments", + "depth" + ], + "author": { + "name": "Blake Embrey", + "email": "hello@blakeembrey.com", + "url": "http://blakeembrey.me" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/blakeembrey/array-flatten/issues" + }, + "homepage": "https://github.com/blakeembrey/array-flatten", + "devDependencies": { + "istanbul": "^0.3.13", + "mocha": "^2.2.4", + "pre-commit": "^1.0.7", + "standard": "^3.7.3" + } +} diff --git a/node_modules/array-union/index.d.ts b/node_modules/array-union/index.d.ts new file mode 100644 index 00000000..379fc1d2 --- /dev/null +++ b/node_modules/array-union/index.d.ts @@ -0,0 +1,25 @@ +/** +Create an array of unique values, in order, from the input arrays. + +@example +``` +import arrayUnion = require('array-union'); + +arrayUnion([1, 1, 2, 3], [2, 3]); +//=> [1, 2, 3] + +arrayUnion(['foo', 'foo', 'bar']); +//=> ['foo', 'bar'] + +arrayUnion(['🐱', '🦄', '🐻'], ['🦄', '🌈']); +//=> ['🐱', '🦄', '🐻', '🌈'] + +arrayUnion(['🐱', '🦄'], ['🐻', '🦄'], ['🐶', '🌈', '🌈']); +//=> ['🐱', '🦄', '🐻', '🐶', '🌈'] +``` +*/ +declare function arrayUnion( + ...arguments: readonly ArgumentsType[] +): ArgumentsType; + +export = arrayUnion; diff --git a/node_modules/array-union/index.js b/node_modules/array-union/index.js new file mode 100644 index 00000000..a5a3df85 --- /dev/null +++ b/node_modules/array-union/index.js @@ -0,0 +1,3 @@ +const arrayUnion = (...arguments_) => [...new Set(arguments_.flat())]; + +export default arrayUnion; diff --git a/node_modules/array-union/license b/node_modules/array-union/license new file mode 100644 index 00000000..fa7ceba3 --- /dev/null +++ b/node_modules/array-union/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/array-union/package.json b/node_modules/array-union/package.json new file mode 100644 index 00000000..e7f7468b --- /dev/null +++ b/node_modules/array-union/package.json @@ -0,0 +1,46 @@ +{ + "name": "array-union", + "version": "3.0.1", + "description": "Create an array of unique values, in order, from the input arrays", + "license": "MIT", + "repository": "sindresorhus/array-union", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "array", + "set", + "uniq", + "unique", + "duplicate", + "remove", + "union", + "combine", + "merge" + ], + "devDependencies": { + "ava": "^3.15.0", + "tsd": "^0.14.0", + "xo": "^0.38.2" + }, + "tsd": { + "compilerOptions": { + "esModuleInterop": true + } + } +} diff --git a/node_modules/array-union/readme.md b/node_modules/array-union/readme.md new file mode 100644 index 00000000..b0b67fbd --- /dev/null +++ b/node_modules/array-union/readme.md @@ -0,0 +1,39 @@ +# array-union + +> Create an array of unique values, in order, from the input arrays + +## Install + +``` +$ npm install array-union +``` + +## Usage + +```js +import arrayUnion from 'array-union'; + +arrayUnion([1, 1, 2, 3], [2, 3]); +//=> [1, 2, 3] + +arrayUnion(['foo', 'foo', 'bar']); +//=> ['foo', 'bar'] + +arrayUnion(['🐱', '🦄', '🐻'], ['🦄', '🌈']); +//=> ['🐱', '🦄', '🐻', '🌈'] + +arrayUnion(['🐱', '🦄'], ['🐻', '🦄'], ['🐶', '🌈', '🌈']); +//=> ['🐱', '🦄', '🐻', '🐶', '🌈'] +``` + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/async/CHANGELOG.md b/node_modules/async/CHANGELOG.md new file mode 100644 index 00000000..8a9a1bfd --- /dev/null +++ b/node_modules/async/CHANGELOG.md @@ -0,0 +1,348 @@ +# v3.2.4 +- Fix a bug in `priorityQueue` where it didn't wait for the result. (#1725) +- Fix a bug where `unshiftAsync` was included in `priorityQueue`. (#1790) + +# v3.2.3 +- Fix bugs in comment parsing in `autoInject`. (#1767, #1780) + +# v3.2.2 +- Fix potential prototype pollution exploit + +# v3.2.1 +- Use `queueMicrotask` if available to the environment (#1761) +- Minor perf improvement in `priorityQueue` (#1727) +- More examples in documentation (#1726) +- Various doc fixes (#1708, #1712, #1717, #1740, #1739, #1749, #1756) +- Improved test coverage (#1754) + +# v3.2.0 +- Fix a bug in Safari related to overwriting `func.name` +- Remove built-in browserify configuration (#1653) +- Varios doc fixes (#1688, #1703, #1704) + +# v3.1.1 +- Allow redefining `name` property on wrapped functions. + +# v3.1.0 + +- Added `q.pushAsync` and `q.unshiftAsync`, analagous to `q.push` and `q.unshift`, except they always do not accept a callback, and reject if processing the task errors. (#1659) +- Promises returned from `q.push` and `q.unshift` when a callback is not passed now resolve even if an error ocurred. (#1659) +- Fixed a parsing bug in `autoInject` with complicated function bodies (#1663) +- Added ES6+ configuration for Browserify bundlers (#1653) +- Various doc fixes (#1664, #1658, #1665, #1652) + +# v3.0.1 + +## Bug fixes +- Fixed a regression where arrays passed to `queue` and `cargo` would be completely flattened. (#1645) +- Clarified Async's browser support (#1643) + + +# v3.0.0 + +The `async`/`await` release! + +There are a lot of new features and subtle breaking changes in this major version, but the biggest feature is that most Async methods return a Promise if you omit the callback, meaning you can `await` them from within an `async` function. + +```js +const results = await async.mapLimit(urls, 5, async url => { + const resp = await fetch(url) + return resp.body +}) +``` + +## Breaking Changes +- Most Async methods return a Promise when the final callback is omitted, making them `await`-able! (#1572) +- We are now making heavy use of ES2015 features, this means we have dropped out-of-the-box support for Node 4 and earlier, and many old versions of browsers. (#1541, #1553) +- In `queue`, `priorityQueue`, `cargo` and `cargoQueue`, the "event"-style methods, like `q.drain` and `q.saturated` are now methods that register a callback, rather than properties you assign a callback to. They are now of the form `q.drain(callback)`. If you do not pass a callback a Promise will be returned for the next occurrence of the event, making them `await`-able, e.g. `await q.drain()`. (#1586, #1641) +- Calling `callback(false)` will cancel an async method, preventing further iteration and callback calls. This is useful for preventing memory leaks when you break out of an async flow by calling an outer callback. (#1064, #1542) +- `during` and `doDuring` have been removed, and instead `whilst`, `doWhilst`, `until` and `doUntil` now have asynchronous `test` functions. (#850, #1557) +- `limits` of less than 1 now cause an error to be thrown in queues and collection methods. (#1249, #1552) +- `memoize` no longer memoizes errors (#1465, #1466) +- `applyEach`/`applyEachSeries` have a simpler interface, to make them more easily type-able. It always returns a function that takes in a single callback argument. If that callback is omitted, a promise is returned, making it awaitable. (#1228, #1640) + +## New Features +- Async generators are now supported in all the Collection methods. (#1560) +- Added `cargoQueue`, a queue with both `concurrency` and `payload` size parameters. (#1567) +- Queue objects returned from `queue` now have a `Symbol.iterator` method, meaning they can be iterated over to inspect the current list of items in the queue. (#1459, #1556) +- A ESM-flavored `async.mjs` is included in the `async` package. This is described in the `package.json` `"module"` field, meaning it should be automatically used by Webpack and other compatible bundlers. + +## Bug fixes +- Better handle arbitrary error objects in `asyncify` (#1568, #1569) + +## Other +- Removed Lodash as a dependency (#1283, #1528) +- Miscellaneous docs fixes (#1393, #1501, #1540, #1543, #1558, #1563, #1564, #1579, #1581) +- Miscellaneous test fixes (#1538) + +------- + +# v2.6.1 +- Updated lodash to prevent `npm audit` warnings. (#1532, #1533) +- Made `async-es` more optimized for webpack users (#1517) +- Fixed a stack overflow with large collections and a synchronous iterator (#1514) +- Various small fixes/chores (#1505, #1511, #1527, #1530) + +# v2.6.0 +- Added missing aliases for many methods. Previously, you could not (e.g.) `require('async/find')` or use `async.anyLimit`. (#1483) +- Improved `queue` performance. (#1448, #1454) +- Add missing sourcemap (#1452, #1453) +- Various doc updates (#1448, #1471, #1483) + +# v2.5.0 +- Added `concatLimit`, the `Limit` equivalent of [`concat`](https://caolan.github.io/async/docs.html#concat) ([#1426](https://github.com/caolan/async/issues/1426), [#1430](https://github.com/caolan/async/pull/1430)) +- `concat` improvements: it now preserves order, handles falsy values and the `iteratee` callback takes a variable number of arguments ([#1437](https://github.com/caolan/async/issues/1437), [#1436](https://github.com/caolan/async/pull/1436)) +- Fixed an issue in `queue` where there was a size discrepancy between `workersList().length` and `running()` ([#1428](https://github.com/caolan/async/issues/1428), [#1429](https://github.com/caolan/async/pull/1429)) +- Various doc fixes ([#1422](https://github.com/caolan/async/issues/1422), [#1424](https://github.com/caolan/async/pull/1424)) + +# v2.4.1 +- Fixed a bug preventing functions wrapped with `timeout()` from being re-used. ([#1418](https://github.com/caolan/async/issues/1418), [#1419](https://github.com/caolan/async/issues/1419)) + +# v2.4.0 +- Added `tryEach`, for running async functions in parallel, where you only expect one to succeed. ([#1365](https://github.com/caolan/async/issues/1365), [#687](https://github.com/caolan/async/issues/687)) +- Improved performance, most notably in `parallel` and `waterfall` ([#1395](https://github.com/caolan/async/issues/1395)) +- Added `queue.remove()`, for removing items in a `queue` ([#1397](https://github.com/caolan/async/issues/1397), [#1391](https://github.com/caolan/async/issues/1391)) +- Fixed using `eval`, preventing Async from running in pages with Content Security Policy ([#1404](https://github.com/caolan/async/issues/1404), [#1403](https://github.com/caolan/async/issues/1403)) +- Fixed errors thrown in an `asyncify`ed function's callback being caught by the underlying Promise ([#1408](https://github.com/caolan/async/issues/1408)) +- Fixed timing of `queue.empty()` ([#1367](https://github.com/caolan/async/issues/1367)) +- Various doc fixes ([#1314](https://github.com/caolan/async/issues/1314), [#1394](https://github.com/caolan/async/issues/1394), [#1412](https://github.com/caolan/async/issues/1412)) + +# v2.3.0 +- Added support for ES2017 `async` functions. Wherever you can pass a Node-style/CPS function that uses a callback, you can also pass an `async` function. Previously, you had to wrap `async` functions with `asyncify`. The caveat is that it will only work if `async` functions are supported natively in your environment, transpiled implementations can't be detected. ([#1386](https://github.com/caolan/async/issues/1386), [#1390](https://github.com/caolan/async/issues/1390)) +- Small doc fix ([#1392](https://github.com/caolan/async/issues/1392)) + +# v2.2.0 +- Added `groupBy`, and the `Series`/`Limit` equivalents, analogous to [`_.groupBy`](http://lodash.com/docs#groupBy) ([#1364](https://github.com/caolan/async/issues/1364)) +- Fixed `transform` bug when `callback` was not passed ([#1381](https://github.com/caolan/async/issues/1381)) +- Added note about `reflect` to `parallel` docs ([#1385](https://github.com/caolan/async/issues/1385)) + +# v2.1.5 +- Fix `auto` bug when function names collided with Array.prototype ([#1358](https://github.com/caolan/async/issues/1358)) +- Improve some error messages ([#1349](https://github.com/caolan/async/issues/1349)) +- Avoid stack overflow case in queue +- Fixed an issue in `some`, `every` and `find` where processing would continue after the result was determined. +- Cleanup implementations of `some`, `every` and `find` + +# v2.1.3 +- Make bundle size smaller +- Create optimized hotpath for `filter` in array case. + +# v2.1.2 +- Fixed a stackoverflow bug with `detect`, `some`, `every` on large inputs ([#1293](https://github.com/caolan/async/issues/1293)). + +# v2.1.0 + +- `retry` and `retryable` now support an optional `errorFilter` function that determines if the `task` should retry on the error ([#1256](https://github.com/caolan/async/issues/1256), [#1261](https://github.com/caolan/async/issues/1261)) +- Optimized array iteration in `race`, `cargo`, `queue`, and `priorityQueue` ([#1253](https://github.com/caolan/async/issues/1253)) +- Added alias documentation to doc site ([#1251](https://github.com/caolan/async/issues/1251), [#1254](https://github.com/caolan/async/issues/1254)) +- Added [BootStrap scrollspy](http://getbootstrap.com/javascript/#scrollspy) to docs to highlight in the sidebar the current method being viewed ([#1289](https://github.com/caolan/async/issues/1289), [#1300](https://github.com/caolan/async/issues/1300)) +- Various minor doc fixes ([#1263](https://github.com/caolan/async/issues/1263), [#1264](https://github.com/caolan/async/issues/1264), [#1271](https://github.com/caolan/async/issues/1271), [#1278](https://github.com/caolan/async/issues/1278), [#1280](https://github.com/caolan/async/issues/1280), [#1282](https://github.com/caolan/async/issues/1282), [#1302](https://github.com/caolan/async/issues/1302)) + +# v2.0.1 + +- Significantly optimized all iteration based collection methods such as `each`, `map`, `filter`, etc ([#1245](https://github.com/caolan/async/issues/1245), [#1246](https://github.com/caolan/async/issues/1246), [#1247](https://github.com/caolan/async/issues/1247)). + +# v2.0.0 + +Lots of changes here! + +First and foremost, we have a slick new [site for docs](https://caolan.github.io/async/). Special thanks to [**@hargasinski**](https://github.com/hargasinski) for his work converting our old docs to `jsdoc` format and implementing the new website. Also huge ups to [**@ivanseidel**](https://github.com/ivanseidel) for designing our new logo. It was a long process for both of these tasks, but I think these changes turned out extraordinary well. + +The biggest feature is modularization. You can now `require("async/series")` to only require the `series` function. Every Async library function is available this way. You still can `require("async")` to require the entire library, like you could do before. + +We also provide Async as a collection of ES2015 modules. You can now `import {each} from 'async-es'` or `import waterfall from 'async-es/waterfall'`. If you are using only a few Async functions, and are using a ES bundler such as Rollup, this can significantly lower your build size. + +Major thanks to [**@Kikobeats**](github.com/Kikobeats), [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for doing the majority of the modularization work, as well as [**@jdalton**](github.com/jdalton) and [**@Rich-Harris**](github.com/Rich-Harris) for advisory work on the general modularization strategy. + +Another one of the general themes of the 2.0 release is standardization of what an "async" function is. We are now more strictly following the node-style continuation passing style. That is, an async function is a function that: + +1. Takes a variable number of arguments +2. The last argument is always a callback +3. The callback can accept any number of arguments +4. The first argument passed to the callback will be treated as an error result, if the argument is truthy +5. Any number of result arguments can be passed after the "error" argument +6. The callback is called once and exactly once, either on the same tick or later tick of the JavaScript event loop. + +There were several cases where Async accepted some functions that did not strictly have these properties, most notably `auto`, `every`, `some`, `filter`, `reject` and `detect`. + +Another theme is performance. We have eliminated internal deferrals in all cases where they make sense. For example, in `waterfall` and `auto`, there was a `setImmediate` between each task -- these deferrals have been removed. A `setImmediate` call can add up to 1ms of delay. This might not seem like a lot, but it can add up if you are using many Async functions in the course of processing a HTTP request, for example. Nearly all asynchronous functions that do I/O already have some sort of deferral built in, so the extra deferral is unnecessary. The trade-off of this change is removing our built-in stack-overflow defense. Many synchronous callback calls in series can quickly overflow the JS call stack. If you do have a function that is sometimes synchronous (calling its callback on the same tick), and are running into stack overflows, wrap it with `async.ensureAsync()`. + +Another big performance win has been re-implementing `queue`, `cargo`, and `priorityQueue` with [doubly linked lists](https://en.wikipedia.org/wiki/Doubly_linked_list) instead of arrays. This has lead to queues being an order of [magnitude faster on large sets of tasks](https://github.com/caolan/async/pull/1205). + +## New Features + +- Async is now modularized. Individual functions can be `require()`d from the main package. (`require('async/auto')`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996)) +- Async is also available as a collection of ES2015 modules in the new `async-es` package. (`import {forEachSeries} from 'async-es'`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996)) +- Added `race`, analogous to `Promise.race()`. It will run an array of async tasks in parallel and will call its callback with the result of the first task to respond. ([#568](https://github.com/caolan/async/issues/568), [#1038](https://github.com/caolan/async/issues/1038)) +- Collection methods now accept ES2015 iterators. Maps, Sets, and anything that implements the iterator spec can now be passed directly to `each`, `map`, `parallel`, etc.. ([#579](https://github.com/caolan/async/issues/579), [#839](https://github.com/caolan/async/issues/839), [#1074](https://github.com/caolan/async/issues/1074)) +- Added `mapValues`, for mapping over the properties of an object and returning an object with the same keys. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177)) +- Added `timeout`, a wrapper for an async function that will make the task time-out after the specified time. ([#1007](https://github.com/caolan/async/issues/1007), [#1027](https://github.com/caolan/async/issues/1027)) +- Added `reflect` and `reflectAll`, analagous to [`Promise.reflect()`](http://bluebirdjs.com/docs/api/reflect.html), a wrapper for async tasks that always succeeds, by gathering results and errors into an object. ([#942](https://github.com/caolan/async/issues/942), [#1012](https://github.com/caolan/async/issues/1012), [#1095](https://github.com/caolan/async/issues/1095)) +- `constant` supports dynamic arguments -- it will now always use its last argument as the callback. ([#1016](https://github.com/caolan/async/issues/1016), [#1052](https://github.com/caolan/async/issues/1052)) +- `setImmediate` and `nextTick` now support arguments to partially apply to the deferred function, like the node-native versions do. ([#940](https://github.com/caolan/async/issues/940), [#1053](https://github.com/caolan/async/issues/1053)) +- `auto` now supports resolving cyclic dependencies using [Kahn's algorithm](https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm) ([#1140](https://github.com/caolan/async/issues/1140)). +- Added `autoInject`, a relative of `auto` that automatically spreads a task's dependencies as arguments to the task function. ([#608](https://github.com/caolan/async/issues/608), [#1055](https://github.com/caolan/async/issues/1055), [#1099](https://github.com/caolan/async/issues/1099), [#1100](https://github.com/caolan/async/issues/1100)) +- You can now limit the concurrency of `auto` tasks. ([#635](https://github.com/caolan/async/issues/635), [#637](https://github.com/caolan/async/issues/637)) +- Added `retryable`, a relative of `retry` that wraps an async function, making it retry when called. ([#1058](https://github.com/caolan/async/issues/1058)) +- `retry` now supports specifying a function that determines the next time interval, useful for exponential backoff, logging and other retry strategies. ([#1161](https://github.com/caolan/async/issues/1161)) +- `retry` will now pass all of the arguments the task function was resolved with to the callback ([#1231](https://github.com/caolan/async/issues/1231)). +- Added `q.unsaturated` -- callback called when a `queue`'s number of running workers falls below a threshold. ([#868](https://github.com/caolan/async/issues/868), [#1030](https://github.com/caolan/async/issues/1030), [#1033](https://github.com/caolan/async/issues/1033), [#1034](https://github.com/caolan/async/issues/1034)) +- Added `q.error` -- a callback called whenever a `queue` task calls its callback with an error. ([#1170](https://github.com/caolan/async/issues/1170)) +- `applyEach` and `applyEachSeries` now pass results to the final callback. ([#1088](https://github.com/caolan/async/issues/1088)) + +## Breaking changes + +- Calling a callback more than once is considered an error, and an error will be thrown. This had an explicit breaking change in `waterfall`. If you were relying on this behavior, you should more accurately represent your control flow as an event emitter or stream. ([#814](https://github.com/caolan/async/issues/814), [#815](https://github.com/caolan/async/issues/815), [#1048](https://github.com/caolan/async/issues/1048), [#1050](https://github.com/caolan/async/issues/1050)) +- `auto` task functions now always take the callback as the last argument. If a task has dependencies, the `results` object will be passed as the first argument. To migrate old task functions, wrap them with [`_.flip`](https://lodash.com/docs#flip) ([#1036](https://github.com/caolan/async/issues/1036), [#1042](https://github.com/caolan/async/issues/1042)) +- Internal `setImmediate` calls have been refactored away. This may make existing flows vulnerable to stack overflows if you use many synchronous functions in series. Use `ensureAsync` to work around this. ([#696](https://github.com/caolan/async/issues/696), [#704](https://github.com/caolan/async/issues/704), [#1049](https://github.com/caolan/async/issues/1049), [#1050](https://github.com/caolan/async/issues/1050)) +- `map` used to return an object when iterating over an object. `map` now always returns an array, like in other libraries. The previous object behavior has been split out into `mapValues`. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177)) +- `filter`, `reject`, `some`, `every`, `detect` and their families like `{METHOD}Series` and `{METHOD}Limit` now expect an error as the first callback argument, rather than just a simple boolean. Pass `null` as the first argument, or use `fs.access` instead of `fs.exists`. ([#118](https://github.com/caolan/async/issues/118), [#774](https://github.com/caolan/async/issues/774), [#1028](https://github.com/caolan/async/issues/1028), [#1041](https://github.com/caolan/async/issues/1041)) +- `{METHOD}` and `{METHOD}Series` are now implemented in terms of `{METHOD}Limit`. This is a major internal simplification, and is not expected to cause many problems, but it does subtly affect how functions execute internally. ([#778](https://github.com/caolan/async/issues/778), [#847](https://github.com/caolan/async/issues/847)) +- `retry`'s callback is now optional. Previously, omitting the callback would partially apply the function, meaning it could be passed directly as a task to `series` or `auto`. The partially applied "control-flow" behavior has been separated out into `retryable`. ([#1054](https://github.com/caolan/async/issues/1054), [#1058](https://github.com/caolan/async/issues/1058)) +- The test function for `whilst`, `until`, and `during` used to be passed non-error args from the iteratee function's callback, but this led to weirdness where the first call of the test function would be passed no args. We have made it so the test function is never passed extra arguments, and only the `doWhilst`, `doUntil`, and `doDuring` functions pass iteratee callback arguments to the test function ([#1217](https://github.com/caolan/async/issues/1217), [#1224](https://github.com/caolan/async/issues/1224)) +- The `q.tasks` array has been renamed `q._tasks` and is now implemented as a doubly linked list (DLL). Any code that used to interact with this array will need to be updated to either use the provided helpers or support DLLs ([#1205](https://github.com/caolan/async/issues/1205)). +- The timing of the `q.saturated()` callback in a `queue` has been modified to better reflect when tasks pushed to the queue will start queueing. ([#724](https://github.com/caolan/async/issues/724), [#1078](https://github.com/caolan/async/issues/1078)) +- Removed `iterator` method in favour of [ES2015 iterator protocol](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators ) which natively supports arrays ([#1237](https://github.com/caolan/async/issues/1237)) +- Dropped support for Component, Jam, SPM, and Volo ([#1175](https://github.com/caolan/async/issues/1175), #[#176](https://github.com/caolan/async/issues/176)) + +## Bug Fixes + +- Improved handling of no dependency cases in `auto` & `autoInject` ([#1147](https://github.com/caolan/async/issues/1147)). +- Fixed a bug where the callback generated by `asyncify` with `Promises` could resolve twice ([#1197](https://github.com/caolan/async/issues/1197)). +- Fixed several documented optional callbacks not actually being optional ([#1223](https://github.com/caolan/async/issues/1223)). + +## Other + +- Added `someSeries` and `everySeries` for symmetry, as well as a complete set of `any`/`anyLimit`/`anySeries` and `all`/`/allLmit`/`allSeries` aliases. +- Added `find` as an alias for `detect. (as well as `findLimit` and `findSeries`). +- Various doc fixes ([#1005](https://github.com/caolan/async/issues/1005), [#1008](https://github.com/caolan/async/issues/1008), [#1010](https://github.com/caolan/async/issues/1010), [#1015](https://github.com/caolan/async/issues/1015), [#1021](https://github.com/caolan/async/issues/1021), [#1037](https://github.com/caolan/async/issues/1037), [#1039](https://github.com/caolan/async/issues/1039), [#1051](https://github.com/caolan/async/issues/1051), [#1102](https://github.com/caolan/async/issues/1102), [#1107](https://github.com/caolan/async/issues/1107), [#1121](https://github.com/caolan/async/issues/1121), [#1123](https://github.com/caolan/async/issues/1123), [#1129](https://github.com/caolan/async/issues/1129), [#1135](https://github.com/caolan/async/issues/1135), [#1138](https://github.com/caolan/async/issues/1138), [#1141](https://github.com/caolan/async/issues/1141), [#1153](https://github.com/caolan/async/issues/1153), [#1216](https://github.com/caolan/async/issues/1216), [#1217](https://github.com/caolan/async/issues/1217), [#1232](https://github.com/caolan/async/issues/1232), [#1233](https://github.com/caolan/async/issues/1233), [#1236](https://github.com/caolan/async/issues/1236), [#1238](https://github.com/caolan/async/issues/1238)) + +Thank you [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for taking the lead on version 2 of async. + +------------------------------------------ + +# v1.5.2 +- Allow using `"constructor"` as an argument in `memoize` ([#998](https://github.com/caolan/async/issues/998)) +- Give a better error messsage when `auto` dependency checking fails ([#994](https://github.com/caolan/async/issues/994)) +- Various doc updates ([#936](https://github.com/caolan/async/issues/936), [#956](https://github.com/caolan/async/issues/956), [#979](https://github.com/caolan/async/issues/979), [#1002](https://github.com/caolan/async/issues/1002)) + +# v1.5.1 +- Fix issue with `pause` in `queue` with concurrency enabled ([#946](https://github.com/caolan/async/issues/946)) +- `while` and `until` now pass the final result to callback ([#963](https://github.com/caolan/async/issues/963)) +- `auto` will properly handle concurrency when there is no callback ([#966](https://github.com/caolan/async/issues/966)) +- `auto` will no. properly stop execution when an error occurs ([#988](https://github.com/caolan/async/issues/988), [#993](https://github.com/caolan/async/issues/993)) +- Various doc fixes ([#971](https://github.com/caolan/async/issues/971), [#980](https://github.com/caolan/async/issues/980)) + +# v1.5.0 + +- Added `transform`, analogous to [`_.transform`](http://lodash.com/docs#transform) ([#892](https://github.com/caolan/async/issues/892)) +- `map` now returns an object when an object is passed in, rather than array with non-numeric keys. `map` will begin always returning an array with numeric indexes in the next major release. ([#873](https://github.com/caolan/async/issues/873)) +- `auto` now accepts an optional `concurrency` argument to limit the number o. running tasks ([#637](https://github.com/caolan/async/issues/637)) +- Added `queue#workersList()`, to retrieve the lis. of currently running tasks. ([#891](https://github.com/caolan/async/issues/891)) +- Various code simplifications ([#896](https://github.com/caolan/async/issues/896), [#904](https://github.com/caolan/async/issues/904)) +- Various doc fixes :scroll: ([#890](https://github.com/caolan/async/issues/890), [#894](https://github.com/caolan/async/issues/894), [#903](https://github.com/caolan/async/issues/903), [#905](https://github.com/caolan/async/issues/905), [#912](https://github.com/caolan/async/issues/912)) + +# v1.4.2 + +- Ensure coverage files don't get published on npm ([#879](https://github.com/caolan/async/issues/879)) + +# v1.4.1 + +- Add in overlooked `detectLimit` method ([#866](https://github.com/caolan/async/issues/866)) +- Removed unnecessary files from npm releases ([#861](https://github.com/caolan/async/issues/861)) +- Removed usage of a reserved word to prevent :boom: in older environments ([#870](https://github.com/caolan/async/issues/870)) + +# v1.4.0 + +- `asyncify` now supports promises ([#840](https://github.com/caolan/async/issues/840)) +- Added `Limit` versions of `filter` and `reject` ([#836](https://github.com/caolan/async/issues/836)) +- Add `Limit` versions of `detect`, `some` and `every` ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829)) +- `some`, `every` and `detect` now short circuit early ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829)) +- Improve detection of the global object ([#804](https://github.com/caolan/async/issues/804)), enabling use in WebWorkers +- `whilst` now called with arguments from iterator ([#823](https://github.com/caolan/async/issues/823)) +- `during` now gets called with arguments from iterator ([#824](https://github.com/caolan/async/issues/824)) +- Code simplifications and optimizations aplenty ([diff](https://github.com/caolan/async/compare/v1.3.0...v1.4.0)) + + +# v1.3.0 + +New Features: +- Added `constant` +- Added `asyncify`/`wrapSync` for making sync functions work with callbacks. ([#671](https://github.com/caolan/async/issues/671), [#806](https://github.com/caolan/async/issues/806)) +- Added `during` and `doDuring`, which are like `whilst` with an async truth test. ([#800](https://github.com/caolan/async/issues/800)) +- `retry` now accepts an `interval` parameter to specify a delay between retries. ([#793](https://github.com/caolan/async/issues/793)) +- `async` should work better in Web Workers due to better `root` detection ([#804](https://github.com/caolan/async/issues/804)) +- Callbacks are now optional in `whilst`, `doWhilst`, `until`, and `doUntil` ([#642](https://github.com/caolan/async/issues/642)) +- Various internal updates ([#786](https://github.com/caolan/async/issues/786), [#801](https://github.com/caolan/async/issues/801), [#802](https://github.com/caolan/async/issues/802), [#803](https://github.com/caolan/async/issues/803)) +- Various doc fixes ([#790](https://github.com/caolan/async/issues/790), [#794](https://github.com/caolan/async/issues/794)) + +Bug Fixes: +- `cargo` now exposes the `payload` size, and `cargo.payload` can be changed on the fly after the `cargo` is created. ([#740](https://github.com/caolan/async/issues/740), [#744](https://github.com/caolan/async/issues/744), [#783](https://github.com/caolan/async/issues/783)) + + +# v1.2.1 + +Bug Fix: + +- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782)) + + +# v1.2.0 + +New Features: + +- Added `timesLimit` ([#743](https://github.com/caolan/async/issues/743)) +- `concurrency` can be changed after initialization in `queue` by setting `q.concurrency`. The new concurrency will be reflected the next time a task is processed. ([#747](https://github.com/caolan/async/issues/747), [#772](https://github.com/caolan/async/issues/772)) + +Bug Fixes: + +- Fixed a regression in `each` and family with empty arrays that have additional properties. ([#775](https://github.com/caolan/async/issues/775), [#777](https://github.com/caolan/async/issues/777)) + + +# v1.1.1 + +Bug Fix: + +- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782)) + + +# v1.1.0 + +New Features: + +- `cargo` now supports all of the same methods and event callbacks as `queue`. +- Added `ensureAsync` - A wrapper that ensures an async function calls its callback on a later tick. ([#769](https://github.com/caolan/async/issues/769)) +- Optimized `map`, `eachOf`, and `waterfall` families of functions +- Passing a `null` or `undefined` array to `map`, `each`, `parallel` and families will be treated as an empty array ([#667](https://github.com/caolan/async/issues/667)). +- The callback is now optional for the composed results of `compose` and `seq`. ([#618](https://github.com/caolan/async/issues/618)) +- Reduced file size by 4kb, (minified version by 1kb) +- Added code coverage through `nyc` and `coveralls` ([#768](https://github.com/caolan/async/issues/768)) + +Bug Fixes: + +- `forever` will no longer stack overflow with a synchronous iterator ([#622](https://github.com/caolan/async/issues/622)) +- `eachLimit` and other limit functions will stop iterating once an error occurs ([#754](https://github.com/caolan/async/issues/754)) +- Always pass `null` in callbacks when there is no error ([#439](https://github.com/caolan/async/issues/439)) +- Ensure proper conditions when calling `drain()` after pushing an empty data set to a queue ([#668](https://github.com/caolan/async/issues/668)) +- `each` and family will properly handle an empty array ([#578](https://github.com/caolan/async/issues/578)) +- `eachSeries` and family will finish if the underlying array is modified during execution ([#557](https://github.com/caolan/async/issues/557)) +- `queue` will throw if a non-function is passed to `q.push()` ([#593](https://github.com/caolan/async/issues/593)) +- Doc fixes ([#629](https://github.com/caolan/async/issues/629), [#766](https://github.com/caolan/async/issues/766)) + + +# v1.0.0 + +No known breaking changes, we are simply complying with semver from here on out. + +Changes: + +- Start using a changelog! +- Add `forEachOf` for iterating over Objects (or to iterate Arrays with indexes available) ([#168](https://github.com/caolan/async/issues/168) [#704](https://github.com/caolan/async/issues/704) [#321](https://github.com/caolan/async/issues/321)) +- Detect deadlocks in `auto` ([#663](https://github.com/caolan/async/issues/663)) +- Better support for require.js ([#527](https://github.com/caolan/async/issues/527)) +- Throw if queue created with concurrency `0` ([#714](https://github.com/caolan/async/issues/714)) +- Fix unneeded iteration in `queue.resume()` ([#758](https://github.com/caolan/async/issues/758)) +- Guard against timer mocking overriding `setImmediate` ([#609](https://github.com/caolan/async/issues/609) [#611](https://github.com/caolan/async/issues/611)) +- Miscellaneous doc fixes ([#542](https://github.com/caolan/async/issues/542) [#596](https://github.com/caolan/async/issues/596) [#615](https://github.com/caolan/async/issues/615) [#628](https://github.com/caolan/async/issues/628) [#631](https://github.com/caolan/async/issues/631) [#690](https://github.com/caolan/async/issues/690) [#729](https://github.com/caolan/async/issues/729)) +- Use single noop function internally ([#546](https://github.com/caolan/async/issues/546)) +- Optimize internal `_each`, `_map` and `_keys` functions. diff --git a/node_modules/async/LICENSE b/node_modules/async/LICENSE new file mode 100644 index 00000000..b18aed69 --- /dev/null +++ b/node_modules/async/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010-2018 Caolan McMahon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/async/README.md b/node_modules/async/README.md new file mode 100644 index 00000000..77f645e2 --- /dev/null +++ b/node_modules/async/README.md @@ -0,0 +1,59 @@ +![Async Logo](https://raw.githubusercontent.com/caolan/async/master/logo/async-logo_readme.jpg) + +![Github Actions CI status](https://github.com/caolan/async/actions/workflows/ci.yml/badge.svg) +[![NPM version](https://img.shields.io/npm/v/async.svg)](https://www.npmjs.com/package/async) +[![Coverage Status](https://coveralls.io/repos/caolan/async/badge.svg?branch=master)](https://coveralls.io/r/caolan/async?branch=master) +[![Join the chat at https://gitter.im/caolan/async](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/caolan/async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![jsDelivr Hits](https://data.jsdelivr.com/v1/package/npm/async/badge?style=rounded)](https://www.jsdelivr.com/package/npm/async) + + + +Async is a utility module which provides straight-forward, powerful functions for working with [asynchronous JavaScript](http://caolan.github.io/async/v3/global.html). Although originally designed for use with [Node.js](https://nodejs.org/) and installable via `npm i async`, it can also be used directly in the browser. A ESM/MJS version is included in the main `async` package that should automatically be used with compatible bundlers such as Webpack and Rollup. + +A pure ESM version of Async is available as [`async-es`](https://www.npmjs.com/package/async-es). + +For Documentation, visit + +*For Async v1.5.x documentation, go [HERE](https://github.com/caolan/async/blob/v1.5.2/README.md)* + + +```javascript +// for use with Node-style callbacks... +var async = require("async"); + +var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; +var configs = {}; + +async.forEachOf(obj, (value, key, callback) => { + fs.readFile(__dirname + value, "utf8", (err, data) => { + if (err) return callback(err); + try { + configs[key] = JSON.parse(data); + } catch (e) { + return callback(e); + } + callback(); + }); +}, err => { + if (err) console.error(err.message); + // configs is now a map of JSON data + doSomethingWith(configs); +}); +``` + +```javascript +var async = require("async"); + +// ...or ES2017 async functions +async.mapLimit(urls, 5, async function(url) { + const response = await fetch(url) + return response.body +}, (err, results) => { + if (err) throw err + // results is now an array of the response bodies + console.log(results) +}) +``` diff --git a/node_modules/async/all.js b/node_modules/async/all.js new file mode 100644 index 00000000..148db683 --- /dev/null +++ b/node_modules/async/all.js @@ -0,0 +1,119 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(every, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/allLimit.js b/node_modules/async/allLimit.js new file mode 100644 index 00000000..25b2c089 --- /dev/null +++ b/node_modules/async/allLimit.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everyLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/allSeries.js b/node_modules/async/allSeries.js new file mode 100644 index 00000000..147c3dc5 --- /dev/null +++ b/node_modules/async/allSeries.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everySeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/any.js b/node_modules/async/any.js new file mode 100644 index 00000000..2046cf64 --- /dev/null +++ b/node_modules/async/any.js @@ -0,0 +1,122 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(some, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/anyLimit.js b/node_modules/async/anyLimit.js new file mode 100644 index 00000000..c8a295a8 --- /dev/null +++ b/node_modules/async/anyLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/anySeries.js b/node_modules/async/anySeries.js new file mode 100644 index 00000000..ee0654ba --- /dev/null +++ b/node_modules/async/anySeries.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/apply.js b/node_modules/async/apply.js new file mode 100644 index 00000000..5246833a --- /dev/null +++ b/node_modules/async/apply.js @@ -0,0 +1,55 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (fn, ...args) { + return (...callArgs) => fn(...args, ...callArgs); +}; + +module.exports = exports["default"]; /** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ \ No newline at end of file diff --git a/node_modules/async/applyEach.js b/node_modules/async/applyEach.js new file mode 100644 index 00000000..b08c6701 --- /dev/null +++ b/node_modules/async/applyEach.js @@ -0,0 +1,57 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _applyEach = require('./internal/applyEach.js'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _map = require('./map.js'); + +var _map2 = _interopRequireDefault(_map); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ +exports.default = (0, _applyEach2.default)(_map2.default); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/applyEachSeries.js b/node_modules/async/applyEachSeries.js new file mode 100644 index 00000000..6a19ca3f --- /dev/null +++ b/node_modules/async/applyEachSeries.js @@ -0,0 +1,37 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _applyEach = require('./internal/applyEach.js'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _mapSeries = require('./mapSeries.js'); + +var _mapSeries2 = _interopRequireDefault(_mapSeries); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ +exports.default = (0, _applyEach2.default)(_mapSeries2.default); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/asyncify.js b/node_modules/async/asyncify.js new file mode 100644 index 00000000..3c3bf886 --- /dev/null +++ b/node_modules/async/asyncify.js @@ -0,0 +1,118 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncify; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if ((0, _wrapAsync.isAsync)(func)) { + return function (...args /*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback); + }; + } + + return (0, _initialParams2.default)(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback); + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + (0, _setImmediate2.default)(e => { + throw e; + }, err); + } +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/auto.js b/node_modules/async/auto.js new file mode 100644 index 00000000..c4a85d43 --- /dev/null +++ b/node_modules/async/auto.js @@ -0,0 +1,333 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = auto; + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = (0, _once2.default)(callback || (0, _promiseCallback.promiseCallback)()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + '` has a non-existent dependency `' + dependencyName + '` in ' + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return; + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while (readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = (0, _onlyOnce2.default)((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return; + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return; + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = (0, _wrapAsync2.default)(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error('async.auto cannot execute tasks due to a recursive dependency'); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[_promiseCallback.PROMISE_SYMBOL]; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/autoInject.js b/node_modules/async/autoInject.js new file mode 100644 index 00000000..393baad6 --- /dev/null +++ b/node_modules/async/autoInject.js @@ -0,0 +1,182 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = autoInject; + +var _auto = require('./auto.js'); + +var _auto2 = _interopRequireDefault(_auto); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; +var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; +var FN_ARG_SPLIT = /,/; +var FN_ARG = /(=.+)?(\s*)$/; + +function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index + 1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = endIndex === -1 ? string.length : endIndex; + } else if (endBlockComment !== -1 && string[index] === '/' && string[index + 1] === '*') { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; +} + +function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src); + let [, args] = match; + return args.replace(/\s/g, '').split(FN_ARG_SPLIT).map(arg => arg.replace(FN_ARG, '').trim()); +} + +/** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ +function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = (0, _wrapAsync.isAsync)(taskFn); + var hasNoDeps = !fnIsAsync && taskFn.length === 1 || fnIsAsync && taskFn.length === 0; + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if (taskFn.length === 0 && !fnIsAsync && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + (0, _wrapAsync2.default)(taskFn)(...newArgs); + } + }); + + return (0, _auto2.default)(newTasks, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/bower.json b/node_modules/async/bower.json new file mode 100644 index 00000000..390c6502 --- /dev/null +++ b/node_modules/async/bower.json @@ -0,0 +1,17 @@ +{ + "name": "async", + "main": "dist/async.js", + "ignore": [ + "bower_components", + "lib", + "test", + "node_modules", + "perf", + "support", + "**/.*", + "*.config.js", + "*.json", + "index.js", + "Makefile" + ] +} diff --git a/node_modules/async/cargo.js b/node_modules/async/cargo.js new file mode 100644 index 00000000..aa385f8a --- /dev/null +++ b/node_modules/async/cargo.js @@ -0,0 +1,63 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = cargo; + +var _queue = require('./internal/queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, Infinity, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concat, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/concatLimit.js b/node_modules/async/concatLimit.js new file mode 100644 index 00000000..3d170f17 --- /dev/null +++ b/node_modules/async/concatLimit.js @@ -0,0 +1,60 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +exports.default = (0, _awaitify2.default)(concatLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/concatSeries.js b/node_modules/async/concatSeries.js new file mode 100644 index 00000000..84add3b0 --- /dev/null +++ b/node_modules/async/concatSeries.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concatSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/constant.js b/node_modules/async/constant.js new file mode 100644 index 00000000..07596538 --- /dev/null +++ b/node_modules/async/constant.js @@ -0,0 +1,55 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (...args) { + return function (...ignoredArgs /*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; +}; + +module.exports = exports["default"]; /** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ \ No newline at end of file diff --git a/node_modules/async/detect.js b/node_modules/async/detect.js new file mode 100644 index 00000000..05b2e5c6 --- /dev/null +++ b/node_modules/async/detect.js @@ -0,0 +1,96 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detect, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/detectLimit.js b/node_modules/async/detectLimit.js new file mode 100644 index 00000000..db6961ec --- /dev/null +++ b/node_modules/async/detectLimit.js @@ -0,0 +1,48 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detectLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/detectSeries.js b/node_modules/async/detectSeries.js new file mode 100644 index 00000000..b9131b4a --- /dev/null +++ b/node_modules/async/detectSeries.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(1), coll, iteratee, callback); +} + +exports.default = (0, _awaitify2.default)(detectSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/dir.js b/node_modules/async/dir.js new file mode 100644 index 00000000..950d0a22 --- /dev/null +++ b/node_modules/async/dir.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _consoleFunc = require('./internal/consoleFunc.js'); + +var _consoleFunc2 = _interopRequireDefault(_consoleFunc); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ +exports.default = (0, _consoleFunc2.default)('dir'); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/dist/async.js b/node_modules/async/dist/async.js new file mode 100644 index 00000000..8d5e7825 --- /dev/null +++ b/node_modules/async/dist/async.js @@ -0,0 +1,6059 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (factory((global.async = {}))); +}(this, (function (exports) { 'use strict'; + + /** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ + function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); + } + + function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; + } + + /* istanbul ignore file */ + + var hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; + var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; + var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + + function fallback(fn) { + setTimeout(fn, 0); + } + + function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); + } + + var _defer; + + if (hasQueueMicrotask) { + _defer = queueMicrotask; + } else if (hasSetImmediate) { + _defer = setImmediate; + } else if (hasNextTick) { + _defer = process.nextTick; + } else { + _defer = fallback; + } + + var setImmediate$1 = wrap(_defer); + + /** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ + function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } + + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); + } + + function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); + } + + function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } + } + + function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; + } + + function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; + } + + function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; + } + + function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; + } + + // conditionally promisify a function. + // only return a promise if a callback is omitted + function awaitify (asyncFn, arity = asyncFn.length) { + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) + } + + return awaitable + } + + function applyEach (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; + } + + function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); + } + + function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; + } + + // A temporary value used to identify if the loop should be broken. + // See #1064, #1293 + const breakLoop = {}; + + function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper + } + + function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); + } + + function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } + } + + function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } + } + + function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? {value: obj[key], key} : null; + }; + } + + function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } + + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); + } + + function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; + } + + // for async generators + function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) + + if (err === false) { + done = true; + canceled = true; + return + } + + if (result === breakLoop || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } + + replenish(); + } + + var eachOfLimit = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } + + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; + }; + + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); + } + + var eachOfLimit$2 = awaitify(eachOfLimit$1, 4); + + // eachOf implementation optimized for array-likes + function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } + } + + // a generic version of eachOf which can handle array, object, and iterator cases. + function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$2(coll, Infinity, iteratee, callback); + } + + /** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ + function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); + } + + var eachOf$1 = awaitify(eachOf, 3); + + /** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) + } + var map$1 = awaitify(map, 3); + + /** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ + var applyEach$1 = applyEach(map$1); + + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$2(coll, 1, iteratee, callback) + } + var eachOfSeries$1 = awaitify(eachOfSeries, 3); + + /** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) + } + var mapSeries$1 = awaitify(mapSeries, 3); + + /** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ + var applyEachSeries = applyEach(mapSeries$1); + + const PROMISE_SYMBOL = Symbol('promiseCallback'); + + function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); + + return callback + } + + /** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[PROMISE_SYMBOL] + } + + var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; + var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; + var FN_ARG_SPLIT = /,/; + var FN_ARG = /(=.+)?(\s*)$/; + + function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index+1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = (endIndex === -1) ? string.length : endIndex; + } else if ((endBlockComment !== -1) && (string[index] === '/') && (string[index+1] === '*')) { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; + } + + function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); + } + + /** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ + function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); + + return auto(newTasks, callback); + } + + // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation + // used for queues. This implementation assumes that the node provided by the user can be modified + // to adjust the next and last properties. We implement only the minimal functionality + // for queue support. + class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty () { + while(this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this] + } + + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } + } + + function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; + } + + function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on (event, handler) { + events[event].push(handler); + } + + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } + + var item = q._createTaskItem( + data, + rejectOnError ? promiseCallback : + (callback || promiseCallback) + ); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } + + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); + + }; + + var isProcessing = false; + var q = { + _tasks: new DLL(), + _createTaskItem (data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; + } + + /** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function reduce(coll, memo, iteratee, callback) { + callback = once(callback); + var _iteratee = wrapAsync(iteratee); + return eachOfSeries$1(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); + } + var reduce$1 = awaitify(reduce, 4); + + /** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ + function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } + + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); + + return cb[PROMISE_SYMBOL] + }; + } + + /** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ + function compose(...args) { + return seq(...args.reverse()); + } + + /** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit(limit), coll, iteratee, callback) + } + var mapLimit$1 = awaitify(mapLimit, 4); + + /** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); + } + var concatLimit$1 = awaitify(concatLimit, 4); + + /** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ + function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) + } + var concat$1 = awaitify(concat, 3); + + /** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) + } + var concatSeries$1 = awaitify(concatSeries, 3); + + /** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ + function constant(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; + } + + function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; + } + + /** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) + } + var detect$1 = awaitify(detect, 3); + + /** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ + function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback) + } + var detectLimit$1 = awaitify(detectLimit, 4); + + /** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ + function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback) + } + + var detectSeries$1 = awaitify(detectSeries, 3); + + function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }) + } + + /** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ + var dir = consoleFunc('dir'); + + /** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); + } + + var doWhilst$1 = awaitify(doWhilst, 3); + + /** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); + } + + function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); + } + + /** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ + function eachLimit(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } + + var each = awaitify(eachLimit, 3); + + /** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } + var eachLimit$2 = awaitify(eachLimit$1, 4); + + /** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachSeries(coll, iteratee, callback) { + return eachLimit$2(coll, 1, iteratee, callback) + } + var eachSeries$1 = awaitify(eachSeries, 3); + + /** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ + function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; + } + + /** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) + } + var every$1 = awaitify(every, 3); + + /** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback) + } + var everyLimit$1 = awaitify(everyLimit, 4); + + /** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) + } + var everySeries$1 = awaitify(everySeries, 3); + + function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); + } + + function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); + } + + function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); + } + + /** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) + } + var filter$1 = awaitify(filter, 3); + + /** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ + function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit(limit), coll, iteratee, callback) + } + var filterLimit$1 = awaitify(filterLimit, 4); + + /** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ + function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) + } + var filterSeries$1 = awaitify(filterSeries, 3); + + /** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ + function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); + } + var forever$1 = awaitify(forever, 2); + + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); + } + + var groupByLimit$1 = awaitify(groupByLimit, 4); + + /** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) + } + + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) + } + + /** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ + var log = consoleFunc('log'); + + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); + } + + var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + + /** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) + } + + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) + } + + /** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ + function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + } + + /* istanbul ignore file */ + + /** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ + var _defer$1; + + if (hasNextTick) { + _defer$1 = process.nextTick; + } else if (hasSetImmediate) { + _defer$1 = setImmediate; + } else { + _defer$1 = fallback; + } + + var nextTick = wrap(_defer$1); + + var parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); + }, 3); + + /** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function parallel$1(tasks, callback) { + return parallel(eachOf$1, tasks, callback); + } + + /** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ + function parallelLimit(tasks, limit, callback) { + return parallel(eachOfLimit(limit), tasks, callback); + } + + /** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ + + /** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ + function queue$1 (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); + } + + // Binary min-heap implementation used for priority queue. + // Implementation is stable, i.e. push time is considered for equal priorities + class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty () { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } + } + + function leftChi(i) { + return (i<<1)+1; + } + + function parent(i) { + return ((i+1)>>1)-1; + } + + function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } + } + + /** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. + */ + function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue$1(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new Heap(); + q._createTaskItem = ({data, priority}, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return {data: tasks, priority}; + } + return tasks.map(data => { return {data, priority}; }); + } + + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function(data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; + } + + /** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ + function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } + } + + var race$1 = awaitify(race, 2); + + /** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); + } + + /** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ + function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); + } + + /** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ + function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); + }); + } + return results; + } + + function reject(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); + } + + /** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function reject$1 (coll, iteratee, callback) { + return reject(eachOf$1, coll, iteratee, callback) + } + var reject$2 = awaitify(reject$1, 3); + + /** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectLimit (coll, limit, iteratee, callback) { + return reject(eachOfLimit(limit), coll, iteratee, callback) + } + var rejectLimit$1 = awaitify(rejectLimit, 4); + + /** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectSeries (coll, iteratee, callback) { + return reject(eachOfSeries$1, coll, iteratee, callback) + } + var rejectSeries$1 = awaitify(rejectSeries, 3); + + function constant$1(value) { + return function () { + return value; + } + } + + /** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ + const DEFAULT_TIMES = 5; + const DEFAULT_INTERVAL = 0; + + function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant$1(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = wrapAsync(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[PROMISE_SYMBOL] + } + + function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant$1(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } + } + + /** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ + function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); + + return callback[PROMISE_SYMBOL] + }); + } + + /** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function series(tasks, callback) { + return parallel(eachOfSeries$1, tasks, callback); + } + + /** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) + } + var some$1 = awaitify(some, 3); + + /** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback) + } + var someLimit$1 = awaitify(someLimit, 4); + + /** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) + } + var someSeries$1 = awaitify(someSeries, 3); + + /** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } + } + var sortBy$1 = awaitify(sortBy, 3); + + /** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ + function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); + + return initialParams((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); + } + + function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; + } + + /** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); + } + + /** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ + function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) + } + + /** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) + } + + /** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); + + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] + } + + /** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ + function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); + } + + var tryEach$1 = awaitify(tryEach); + + /** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ + function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; + } + + /** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ + function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); + } + var whilst$1 = awaitify(whilst, 3); + + /** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ + function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); + } + + /** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ + function waterfall (tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); + } + + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); + } + + var waterfall$1 = awaitify(waterfall); + + /** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + + var index = { + apply, + applyEach: applyEach$1, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo, + cargoQueue: cargo$1, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$2, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$2, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel: parallel$1, + parallelLimit, + priorityQueue, + queue: queue$1, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$2, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, + + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$2, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$2, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 + }; + + exports.default = index; + exports.apply = apply; + exports.applyEach = applyEach$1; + exports.applyEachSeries = applyEachSeries; + exports.asyncify = asyncify; + exports.auto = auto; + exports.autoInject = autoInject; + exports.cargo = cargo; + exports.cargoQueue = cargo$1; + exports.compose = compose; + exports.concat = concat$1; + exports.concatLimit = concatLimit$1; + exports.concatSeries = concatSeries$1; + exports.constant = constant; + exports.detect = detect$1; + exports.detectLimit = detectLimit$1; + exports.detectSeries = detectSeries$1; + exports.dir = dir; + exports.doUntil = doUntil; + exports.doWhilst = doWhilst$1; + exports.each = each; + exports.eachLimit = eachLimit$2; + exports.eachOf = eachOf$1; + exports.eachOfLimit = eachOfLimit$2; + exports.eachOfSeries = eachOfSeries$1; + exports.eachSeries = eachSeries$1; + exports.ensureAsync = ensureAsync; + exports.every = every$1; + exports.everyLimit = everyLimit$1; + exports.everySeries = everySeries$1; + exports.filter = filter$1; + exports.filterLimit = filterLimit$1; + exports.filterSeries = filterSeries$1; + exports.forever = forever$1; + exports.groupBy = groupBy; + exports.groupByLimit = groupByLimit$1; + exports.groupBySeries = groupBySeries; + exports.log = log; + exports.map = map$1; + exports.mapLimit = mapLimit$1; + exports.mapSeries = mapSeries$1; + exports.mapValues = mapValues; + exports.mapValuesLimit = mapValuesLimit$1; + exports.mapValuesSeries = mapValuesSeries; + exports.memoize = memoize; + exports.nextTick = nextTick; + exports.parallel = parallel$1; + exports.parallelLimit = parallelLimit; + exports.priorityQueue = priorityQueue; + exports.queue = queue$1; + exports.race = race$1; + exports.reduce = reduce$1; + exports.reduceRight = reduceRight; + exports.reflect = reflect; + exports.reflectAll = reflectAll; + exports.reject = reject$2; + exports.rejectLimit = rejectLimit$1; + exports.rejectSeries = rejectSeries$1; + exports.retry = retry; + exports.retryable = retryable; + exports.seq = seq; + exports.series = series; + exports.setImmediate = setImmediate$1; + exports.some = some$1; + exports.someLimit = someLimit$1; + exports.someSeries = someSeries$1; + exports.sortBy = sortBy$1; + exports.timeout = timeout; + exports.times = times; + exports.timesLimit = timesLimit; + exports.timesSeries = timesSeries; + exports.transform = transform; + exports.tryEach = tryEach$1; + exports.unmemoize = unmemoize; + exports.until = until; + exports.waterfall = waterfall$1; + exports.whilst = whilst$1; + exports.all = every$1; + exports.allLimit = everyLimit$1; + exports.allSeries = everySeries$1; + exports.any = some$1; + exports.anyLimit = someLimit$1; + exports.anySeries = someSeries$1; + exports.find = detect$1; + exports.findLimit = detectLimit$1; + exports.findSeries = detectSeries$1; + exports.flatMap = concat$1; + exports.flatMapLimit = concatLimit$1; + exports.flatMapSeries = concatSeries$1; + exports.forEach = each; + exports.forEachSeries = eachSeries$1; + exports.forEachLimit = eachLimit$2; + exports.forEachOf = eachOf$1; + exports.forEachOfSeries = eachOfSeries$1; + exports.forEachOfLimit = eachOfLimit$2; + exports.inject = reduce$1; + exports.foldl = reduce$1; + exports.foldr = reduceRight; + exports.select = filter$1; + exports.selectLimit = filterLimit$1; + exports.selectSeries = filterSeries$1; + exports.wrapSync = asyncify; + exports.during = whilst$1; + exports.doDuring = doWhilst$1; + + Object.defineProperty(exports, '__esModule', { value: true }); + +}))); diff --git a/node_modules/async/dist/async.min.js b/node_modules/async/dist/async.min.js new file mode 100644 index 00000000..a12963b6 --- /dev/null +++ b/node_modules/async/dist/async.min.js @@ -0,0 +1 @@ +(function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t(e.async={})})(this,function(e){'use strict';function t(e,...t){return(...n)=>e(...t,...n)}function n(e){return function(...t){var n=t.pop();return e.call(this,t,n)}}function a(e){setTimeout(e,0)}function i(e){return(t,...n)=>e(()=>t(...n))}function r(e){return u(e)?function(...t){const n=t.pop(),a=e.apply(this,t);return s(a,n)}:n(function(t,n){var a;try{a=e.apply(this,t)}catch(t){return n(t)}return a&&"function"==typeof a.then?s(a,n):void n(null,a)})}function s(e,t){return e.then(e=>{l(t,null,e)},e=>{l(t,e&&e.message?e:new Error(e))})}function l(e,t,n){try{e(t,n)}catch(e){_e(t=>{throw t},e)}}function u(e){return"AsyncFunction"===e[Symbol.toStringTag]}function d(e){return"AsyncGenerator"===e[Symbol.toStringTag]}function p(e){return"function"==typeof e[Symbol.asyncIterator]}function c(e){if("function"!=typeof e)throw new Error("expected a function");return u(e)?r(e):e}function o(e,t=e.length){if(!t)throw new Error("arity is undefined");return function(...n){return"function"==typeof n[t-1]?e.apply(this,n):new Promise((a,i)=>{n[t-1]=(e,...t)=>e?i(e):void a(1{c(e).apply(i,n.concat(t))},a)});return a}}function f(e,t,n,a){t=t||[];var i=[],r=0,s=c(n);return e(t,(e,t,n)=>{var a=r++;s(e,(e,t)=>{i[a]=t,n(e)})},e=>{a(e,i)})}function y(e){return e&&"number"==typeof e.length&&0<=e.length&&0==e.length%1}function m(e){function t(...t){if(null!==e){var n=e;e=null,n.apply(this,t)}}return Object.assign(t,e),t}function g(e){return e[Symbol.iterator]&&e[Symbol.iterator]()}function k(e){var t=-1,n=e.length;return function(){return++t=t||d||l||(d=!0,e.next().then(({value:e,done:t})=>{if(!(u||l))return d=!1,t?(l=!0,void(0>=p&&a(null))):void(p++,n(e,c,r),c++,i())}).catch(s))}function r(e,t){return p-=1,u?void 0:e?s(e):!1===e?(l=!0,void(u=!0)):t===be||l&&0>=p?(l=!0,a(null)):void i()}function s(e){u||(d=!1,l=!0,a(e))}let l=!1,u=!1,d=!1,p=0,c=0;i()}function O(e,t,n){function a(e,t){!1===e&&(l=!0);!0===l||(e?n(e):(++r===s||t===be)&&n(null))}n=m(n);var i=0,r=0,{length:s}=e,l=!1;for(0===s&&n(null);i{t=e,n=a}),e}function A(e,t,n){function a(e,t){g.push(()=>l(e,t))}function i(){if(!h){if(0===g.length&&0===o)return n(null,p);for(;g.length&&oe()),i()}function l(e,t){if(!f){var a=L((t,...a)=>{if(o--,!1===t)return void(h=!0);if(2>a.length&&([a]=a),t){var i={};if(Object.keys(p).forEach(e=>{i[e]=p[e]}),i[e]=a,f=!0,y=Object.create(null),h)return;n(t,i)}else p[e]=a,s(e)});o++;var i=c(t[t.length-1]);1{const i=e[a];Array.isArray(i)&&0<=i.indexOf(t)&&n.push(a)}),n}"number"!=typeof t&&(n=t,t=null),n=m(n||b());var d=Object.keys(e).length;if(!d)return n(null);t||(t=d);var p={},o=0,h=!1,f=!1,y=Object.create(null),g=[],k=[],v={};return Object.keys(e).forEach(t=>{var n=e[t];if(!Array.isArray(n))return a(t,[n]),void k.push(t);var i=n.slice(0,n.length-1),s=i.length;return 0===s?(a(t,n),void k.push(t)):void(v[t]=s,i.forEach(l=>{if(!e[l])throw new Error("async.auto task `"+t+"` has a non-existent dependency `"+l+"` in "+i.join(", "));r(l,()=>{s--,0===s&&a(t,n)})}))}),function(){for(var e,t=0;k.length;)e=k.pop(),t++,u(e).forEach(e=>{0==--v[e]&&k.push(e)});if(t!==d)throw new Error("async.auto cannot execute tasks due to a recursive dependency")}(),i(),n[Ce]}function I(e){let t="",n=0,a=e.indexOf("*/");for(;ne.replace(Ne,"").trim())}function j(e,t){var n={};return Object.keys(e).forEach(t=>{function a(e,t){var n=i.map(t=>e[t]);n.push(t),c(r)(...n)}var i,r=e[t],s=u(r),l=!s&&1===r.length||s&&0===r.length;if(Array.isArray(r))i=[...r],r=i.pop(),n[t]=i.concat(0{r(e,n),t(...a)};f[e].push(n)}function r(e,t){return e?t?void(f[e]=f[e].filter(e=>e!==t)):f[e]=[]:Object.keys(f).forEach(e=>f[e]=[])}function s(e,...t){f[e].forEach(e=>e(...t))}function l(e,t,n,a){function i(e,...t){return e?n?s(e):r():1>=t.length?r(t[0]):void r(t)}if(null!=a&&"function"!=typeof a)throw new Error("task callback must be a function");k.started=!0;var r,s,l=k._createTaskItem(e,n?i:a||i);if(t?k._tasks.unshift(l):k._tasks.push(l),y||(y=!0,_e(()=>{y=!1,k.process()})),n||!a)return new Promise((e,t)=>{r=e,s=t})}function u(e){return function(t,...n){o-=1;for(var a=0,r=e.length;as("drain")),!0)}if(null==t)t=1;else if(0===t)throw new RangeError("Concurrency must not be zero");var p=c(e),o=0,h=[];const f={error:[],drain:[],saturated:[],unsaturated:[],empty:[]};var y=!1;const m=e=>t=>t?void(r(e),a(e,t)):new Promise((t,n)=>{i(e,(e,a)=>e?n(e):void t(a))});var g=!1,k={_tasks:new Ve,_createTaskItem(e,t){return{data:e,callback:t}},*[Symbol.iterator](){yield*k._tasks[Symbol.iterator]()},concurrency:t,payload:n,buffer:t/4,started:!1,paused:!1,push(e,t){return Array.isArray(e)?d(e)?void 0:e.map(e=>l(e,!1,!1,t)):l(e,!1,!1,t)},pushAsync(e,t){return Array.isArray(e)?d(e)?void 0:e.map(e=>l(e,!1,!0,t)):l(e,!1,!0,t)},kill(){r(),k._tasks.empty()},unshift(e,t){return Array.isArray(e)?d(e)?void 0:e.map(e=>l(e,!0,!1,t)):l(e,!0,!1,t)},unshiftAsync(e,t){return Array.isArray(e)?d(e)?void 0:e.map(e=>l(e,!0,!0,t)):l(e,!0,!0,t)},remove(e){k._tasks.remove(e)},process(){var e=Math.min;if(!g){for(g=!0;!k.paused&&o{t.apply(n,e.concat((e,...t)=>{a(e,t)}))},(e,t)=>a(e,...t)),a[Ce]}}function P(...e){return C(...e.reverse())}function R(...e){return function(...t){var n=t.pop();return n(null,...e)}}function z(e,t){return(n,a,i,r)=>{var s,l=!1;const u=c(i);n(a,(n,a,i)=>{u(n,(a,r)=>a||!1===a?i(a):e(r)&&!s?(l=!0,s=t(!0,n),i(null,be)):void i())},e=>e?r(e):void r(null,l?s:t(!1)))}}function N(e){return(t,...n)=>c(t)(...n,(t,...n)=>{"object"==typeof console&&(t?console.error&&console.error(t):console[e]&&n.forEach(t=>console[e](t)))})}function V(e,t,n){const a=c(t);return Xe(e,(...e)=>{const t=e.pop();a(...e,(e,n)=>t(e,!n))},n)}function Y(e){return(t,n,a)=>e(t,a)}function q(e){return u(e)?e:function(...t){var n=t.pop(),a=!0;t.push((...e)=>{a?_e(()=>n(...e)):n(...e)}),e.apply(this,t),a=!1}}function D(e,t,n,a){var r=Array(t.length);e(t,(e,t,a)=>{n(e,(e,n)=>{r[t]=!!n,a(e)})},e=>{if(e)return a(e);for(var n=[],s=0;s{n(e,(n,r)=>n?a(n):void(r&&i.push({index:t,value:e}),a(n)))},e=>e?a(e):void a(null,i.sort((e,t)=>e.index-t.index).map(e=>e.value)))}function U(e,t,n,a){var i=y(t)?D:Q;return i(e,t,c(n),a)}function G(e,t,n){return ut(e,1/0,t,n)}function W(e,t,n){return ut(e,1,t,n)}function H(e,t,n){return pt(e,1/0,t,n)}function J(e,t,n){return pt(e,1,t,n)}function K(e,t=e=>e){var a=Object.create(null),r=Object.create(null),s=c(e),l=n((e,n)=>{var u=t(...e);u in a?_e(()=>n(null,...a[u])):u in r?r[u].push(n):(r[u]=[n],s(...e,(e,...t)=>{e||(a[u]=t);var n=r[u];delete r[u];for(var s=0,d=n.length;s{n(e[0],t)},t,1)}function ee(e){return(e<<1)+1}function te(e){return(e+1>>1)-1}function ne(e,t){return e.priority===t.priority?e.pushCount({data:e,priority:t})):{data:e,priority:t}}var a=$(e,t),{push:i,pushAsync:r}=a;return a._tasks=new ht,a._createTaskItem=({data:e,priority:t},n)=>({data:e,priority:t,callback:n}),a.push=function(e,t=0,a){return i(n(e,t),a)},a.pushAsync=function(e,t=0,a){return r(n(e,t),a)},delete a.unshift,delete a.unshiftAsync,a}function ie(e,t,n,a){var i=[...e].reverse();return qe(i,t,n,a)}function re(e){var t=c(e);return n(function(e,n){return e.push((e,...t)=>{let a={};if(e&&(a.error=e),0=t.length&&([i]=t),a.value=i}n(null,a)}),t.apply(this,e)})}function se(e){var t;return Array.isArray(e)?t=e.map(re):(t={},Object.keys(e).forEach(n=>{t[n]=re.call(this,e[n])})),t}function le(e,t,n,a){const i=c(n);return U(e,t,(e,t)=>{i(e,(e,n)=>{t(e,!n)})},a)}function ue(e){return function(){return e}}function de(e,t,n){function a(){r((e,...t)=>{!1===e||(e&&s++arguments.length&&"function"==typeof e?(n=t||b(),t=e):(pe(i,e),n=n||b()),"function"!=typeof t)throw new Error("Invalid arguments for async.retry");var r=c(t),s=1;return a(),n[Ce]}function pe(e,n){if("object"==typeof n)e.times=+n.times||kt,e.intervalFunc="function"==typeof n.interval?n.interval:ue(+n.interval||vt),e.errorFilter=n.errorFilter;else if("number"==typeof n||"string"==typeof n)e.times=+n||kt;else throw new Error("Invalid arguments for async.retry")}function ce(e,t){t||(t=e,e=null);let a=e&&e.arity||t.length;u(t)&&(a+=1);var i=c(t);return n((t,n)=>{function r(e){i(...t,e)}return(t.length{var s,l=!1;n.push((...e)=>{l||(r(...e),clearTimeout(s))}),s=setTimeout(function(){var t=e.name||"anonymous",n=new Error("Callback function \""+t+"\" timed out.");n.code="ETIMEDOUT",a&&(n.info=a),l=!0,r(n)},t),i(...n)})}function fe(e){for(var t=Array(e);e--;)t[e]=e;return t}function ye(e,t,n,a){var i=c(n);return De(fe(e),t,i,a)}function me(e,t,n){return ye(e,1/0,t,n)}function ge(e,t,n){return ye(e,1,t,n)}function ke(e,t,n,a){3>=arguments.length&&"function"==typeof t&&(a=n,n=t,t=Array.isArray(e)?[]:{}),a=m(a||b());var i=c(n);return Me(e,(e,n,a)=>{i(t,e,n,a)},e=>a(e,t)),a[Ce]}function ve(e){return(...t)=>(e.unmemoized||e)(...t)}function Se(e,t,n){const a=c(e);return _t(e=>a((t,n)=>e(t,!n)),t,n)}var xe,Le="function"==typeof queueMicrotask&&queueMicrotask,Ee="function"==typeof setImmediate&&setImmediate,Oe="object"==typeof process&&"function"==typeof process.nextTick;xe=Le?queueMicrotask:Ee?setImmediate:Oe?process.nextTick:a;var _e=i(xe);const be={};var Ae=e=>(t,n,a)=>{function i(e,t){if(!u)if(c-=1,e)l=!0,a(e);else if(!1===e)l=!0,u=!0;else{if(t===be||l&&0>=c)return l=!0,a(null);o||r()}}function r(){for(o=!0;c=c&&a(null));c+=1,n(t.value,t.key,L(i))}o=!1}if(a=m(a),0>=e)throw new RangeError("concurrency limit cannot be less than 1");if(!t)return a(null);if(d(t))return E(t,e,n,a);if(p(t))return E(t[Symbol.asyncIterator](),e,n,a);var s=x(t),l=!1,u=!1,c=0,o=!1;r()},Ie=o(function(e,t,n,a){return Ae(t)(e,c(n),a)},4),Me=o(function(e,t,n){var a=y(e)?O:_;return a(e,c(t),n)},3),je=o(function(e,t,n){return f(Me,e,t,n)},3),we=h(je),Be=o(function(e,t,n){return Ie(e,1,t,n)},3),Te=o(function(e,t,n){return f(Be,e,t,n)},3),Fe=h(Te);const Ce=Symbol("promiseCallback");var Pe=/^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/,Re=/^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/,ze=/,/,Ne=/(=.+)?(\s*)$/;class Ve{constructor(){this.head=this.tail=null,this.length=0}removeLink(e){return e.prev?e.prev.next=e.next:this.head=e.next,e.next?e.next.prev=e.prev:this.tail=e.prev,e.prev=e.next=null,this.length-=1,e}empty(){for(;this.head;)this.shift();return this}insertAfter(e,t){t.prev=e,t.next=e.next,e.next?e.next.prev=t:this.tail=t,e.next=t,this.length+=1}insertBefore(e,t){t.prev=e.prev,t.next=e,e.prev?e.prev.next=t:this.head=t,e.prev=t,this.length+=1}unshift(e){this.head?this.insertBefore(this.head,e):w(this,e)}push(e){this.tail?this.insertAfter(this.tail,e):w(this,e)}shift(){return this.head&&this.removeLink(this.head)}pop(){return this.tail&&this.removeLink(this.tail)}toArray(){return[...this]}*[Symbol.iterator](){for(var e=this.head;e;)yield e.data,e=e.next}remove(e){for(var t=this.head;t;){var{next:n}=t;e(t)&&this.removeLink(t),t=n}return this}}var Ye,qe=o(function(e,t,n,a){a=m(a);var r=c(n);return Be(e,(e,n,a)=>{r(t,e,(e,n)=>{t=n,a(e)})},e=>a(e,t))},4),De=o(function(e,t,n,a){return f(Ae(t),e,n,a)},4),Qe=o(function(e,t,n,a){var i=c(n);return De(e,t,(e,t)=>{i(e,(e,...n)=>e?t(e):t(e,n))},(e,t)=>{for(var n=[],r=0;re,(e,t)=>t)(Me,e,t,n)},3),He=o(function(e,t,n,a){return z(e=>e,(e,t)=>t)(Ae(t),e,n,a)},4),Je=o(function(e,t,n){return z(e=>e,(e,t)=>t)(Ae(1),e,t,n)},3),Ke=N("dir"),Xe=o(function(e,t,n){function a(e,...t){return e?n(e):void(!1===e||(r=t,l(...t,i)))}function i(e,t){return e?n(e):!1===e?void 0:t?void s(a):n(null,...r)}n=L(n);var r,s=c(e),l=c(t);return i(null,!0)},3),Ze=o(function(e,t,n){return Me(e,Y(c(t)),n)},3),$e=o(function(e,t,n,a){return Ae(t)(e,Y(c(n)),a)},4),et=o(function(e,t,n){return $e(e,1,t,n)},3),tt=o(function(e,t,n){return z(e=>!e,e=>!e)(Me,e,t,n)},3),nt=o(function(e,t,n,a){return z(e=>!e,e=>!e)(Ae(t),e,n,a)},4),at=o(function(e,t,n){return z(e=>!e,e=>!e)(Be,e,t,n)},3),it=o(function(e,t,n){return U(Me,e,t,n)},3),rt=o(function(e,t,n,a){return U(Ae(t),e,n,a)},4),st=o(function(e,t,n){return U(Be,e,t,n)},3),lt=o(function(e,t){function n(e){return e?a(e):void(!1===e||i(n))}var a=L(t),i=c(q(e));return n()},2),ut=o(function(e,t,n,a){var i=c(n);return De(e,t,(e,t)=>{i(e,(n,a)=>n?t(n):t(n,{key:a,val:e}))},(e,t)=>{for(var n={},{hasOwnProperty:r}=Object.prototype,s=0;s{r(e,t,(e,a)=>e?n(e):void(i[t]=a,n(e)))},e=>a(e,i))},4);Ye=Oe?process.nextTick:Ee?setImmediate:a;var ct=i(Ye),ot=o((e,t,n)=>{var a=y(t)?[]:{};e(t,(e,t,n)=>{c(e)((e,...i)=>{2>i.length&&([i]=i),a[t]=i,n(e)})},e=>n(e,a))},3);class ht{constructor(){this.heap=[],this.pushCount=Number.MIN_SAFE_INTEGER}get length(){return this.heap.length}empty(){return this.heap=[],this}percUp(e){for(let n;0e)(Me,e,t,n)},3),xt=o(function(e,t,n,a){return z(Boolean,e=>e)(Ae(t),e,n,a)},4),Lt=o(function(e,t,n){return z(Boolean,e=>e)(Be,e,t,n)},3),Et=o(function(e,t,n){function a(e,t){var n=e.criteria,a=t.criteria;return na?1:0}var i=c(t);return je(e,(e,t)=>{i(e,(n,a)=>n?t(n):void t(n,{value:e,criteria:a}))},(e,t)=>e?n(e):void n(null,t.sort(a).map(e=>e.value)))},3),Ot=o(function(e,t){var n,a=null;return et(e,(e,t)=>{c(e)((e,...i)=>!1===e?t(e):void(2>i.length?[n]=i:n=i,a=e,t(e?null:{})))},()=>t(a,n))}),_t=o(function(e,t,n){function a(e,...t){if(e)return n(e);l=t;!1===e||s(i)}function i(e,t){return e?n(e):!1===e?void 0:t?void r(a):n(null,...l)}n=L(n);var r=c(t),s=c(e),l=[];return s(i)},3),bt=o(function(e,t){function n(t){var n=c(e[i++]);n(...t,L(a))}function a(a,...r){return!1===a?void 0:a||i===e.length?t(a,...r):void n(r)}if(t=m(t),!Array.isArray(e))return t(new Error("First argument to waterfall must be an array of functions"));if(!e.length)return t();var i=0;n([])});e.default={apply:t,applyEach:we,applyEachSeries:Fe,asyncify:r,auto:A,autoInject:j,cargo:T,cargoQueue:F,compose:P,concat:Ue,concatLimit:Qe,concatSeries:Ge,constant:R,detect:We,detectLimit:He,detectSeries:Je,dir:Ke,doUntil:V,doWhilst:Xe,each:Ze,eachLimit:$e,eachOf:Me,eachOfLimit:Ie,eachOfSeries:Be,eachSeries:et,ensureAsync:q,every:tt,everyLimit:nt,everySeries:at,filter:it,filterLimit:rt,filterSeries:st,forever:lt,groupBy:G,groupByLimit:ut,groupBySeries:W,log:dt,map:je,mapLimit:De,mapSeries:Te,mapValues:H,mapValuesLimit:pt,mapValuesSeries:J,memoize:K,nextTick:ct,parallel:X,parallelLimit:Z,priorityQueue:ae,queue:$,race:ft,reduce:qe,reduceRight:ie,reflect:re,reflectAll:se,reject:yt,rejectLimit:mt,rejectSeries:gt,retry:de,retryable:ce,seq:C,series:oe,setImmediate:_e,some:St,someLimit:xt,someSeries:Lt,sortBy:Et,timeout:he,times:me,timesLimit:ye,timesSeries:ge,transform:ke,tryEach:Ot,unmemoize:ve,until:Se,waterfall:bt,whilst:_t,all:tt,allLimit:nt,allSeries:at,any:St,anyLimit:xt,anySeries:Lt,find:We,findLimit:He,findSeries:Je,flatMap:Ue,flatMapLimit:Qe,flatMapSeries:Ge,forEach:Ze,forEachSeries:et,forEachLimit:$e,forEachOf:Me,forEachOfSeries:Be,forEachOfLimit:Ie,inject:qe,foldl:qe,foldr:ie,select:it,selectLimit:rt,selectSeries:st,wrapSync:r,during:_t,doDuring:Xe},e.apply=t,e.applyEach=we,e.applyEachSeries=Fe,e.asyncify=r,e.auto=A,e.autoInject=j,e.cargo=T,e.cargoQueue=F,e.compose=P,e.concat=Ue,e.concatLimit=Qe,e.concatSeries=Ge,e.constant=R,e.detect=We,e.detectLimit=He,e.detectSeries=Je,e.dir=Ke,e.doUntil=V,e.doWhilst=Xe,e.each=Ze,e.eachLimit=$e,e.eachOf=Me,e.eachOfLimit=Ie,e.eachOfSeries=Be,e.eachSeries=et,e.ensureAsync=q,e.every=tt,e.everyLimit=nt,e.everySeries=at,e.filter=it,e.filterLimit=rt,e.filterSeries=st,e.forever=lt,e.groupBy=G,e.groupByLimit=ut,e.groupBySeries=W,e.log=dt,e.map=je,e.mapLimit=De,e.mapSeries=Te,e.mapValues=H,e.mapValuesLimit=pt,e.mapValuesSeries=J,e.memoize=K,e.nextTick=ct,e.parallel=X,e.parallelLimit=Z,e.priorityQueue=ae,e.queue=$,e.race=ft,e.reduce=qe,e.reduceRight=ie,e.reflect=re,e.reflectAll=se,e.reject=yt,e.rejectLimit=mt,e.rejectSeries=gt,e.retry=de,e.retryable=ce,e.seq=C,e.series=oe,e.setImmediate=_e,e.some=St,e.someLimit=xt,e.someSeries=Lt,e.sortBy=Et,e.timeout=he,e.times=me,e.timesLimit=ye,e.timesSeries=ge,e.transform=ke,e.tryEach=Ot,e.unmemoize=ve,e.until=Se,e.waterfall=bt,e.whilst=_t,e.all=tt,e.allLimit=nt,e.allSeries=at,e.any=St,e.anyLimit=xt,e.anySeries=Lt,e.find=We,e.findLimit=He,e.findSeries=Je,e.flatMap=Ue,e.flatMapLimit=Qe,e.flatMapSeries=Ge,e.forEach=Ze,e.forEachSeries=et,e.forEachLimit=$e,e.forEachOf=Me,e.forEachOfSeries=Be,e.forEachOfLimit=Ie,e.inject=qe,e.foldl=qe,e.foldr=ie,e.select=it,e.selectLimit=rt,e.selectSeries=st,e.wrapSync=r,e.during=_t,e.doDuring=Xe,Object.defineProperty(e,"__esModule",{value:!0})}); \ No newline at end of file diff --git a/node_modules/async/dist/async.mjs b/node_modules/async/dist/async.mjs new file mode 100644 index 00000000..d0cd59d9 --- /dev/null +++ b/node_modules/async/dist/async.mjs @@ -0,0 +1,5947 @@ +/** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ +function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); +} + +function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; +} + +/* istanbul ignore file */ + +var hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; +var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; +var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + +function fallback(fn) { + setTimeout(fn, 0); +} + +function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); +} + +var _defer; + +if (hasQueueMicrotask) { + _defer = queueMicrotask; +} else if (hasSetImmediate) { + _defer = setImmediate; +} else if (hasNextTick) { + _defer = process.nextTick; +} else { + _defer = fallback; +} + +var setImmediate$1 = wrap(_defer); + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } + + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } +} + +function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; +} + +function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; +} + +function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; +} + +function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; +} + +// conditionally promisify a function. +// only return a promise if a callback is omitted +function awaitify (asyncFn, arity = asyncFn.length) { + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) + } + + return awaitable +} + +function applyEach (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; +} + +function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); +} + +function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; +} + +// A temporary value used to identify if the loop should be broken. +// See #1064, #1293 +const breakLoop = {}; + +function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper +} + +function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); +} + +function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } +} + +function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } +} + +function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? {value: obj[key], key} : null; + }; +} + +function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } + + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); +} + +function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; +} + +// for async generators +function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) + + if (err === false) { + done = true; + canceled = true; + return + } + + if (result === breakLoop || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } + + replenish(); +} + +var eachOfLimit = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } + + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; +}; + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); +} + +var eachOfLimit$2 = awaitify(eachOfLimit$1, 4); + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$2(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); +} + +var eachOf$1 = awaitify(eachOf, 3); + +/** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) +} +var map$1 = awaitify(map, 3); + +/** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ +var applyEach$1 = applyEach(map$1); + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$2(coll, 1, iteratee, callback) +} +var eachOfSeries$1 = awaitify(eachOfSeries, 3); + +/** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) +} +var mapSeries$1 = awaitify(mapSeries, 3); + +/** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ +var applyEachSeries = applyEach(mapSeries$1); + +const PROMISE_SYMBOL = Symbol('promiseCallback'); + +function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); + + return callback +} + +/** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[PROMISE_SYMBOL] +} + +var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; +var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; +var FN_ARG_SPLIT = /,/; +var FN_ARG = /(=.+)?(\s*)$/; + +function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index+1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = (endIndex === -1) ? string.length : endIndex; + } else if ((endBlockComment !== -1) && (string[index] === '/') && (string[index+1] === '*')) { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; +} + +function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); +} + +/** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ +function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); + + return auto(newTasks, callback); +} + +// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation +// used for queues. This implementation assumes that the node provided by the user can be modified +// to adjust the next and last properties. We implement only the minimal functionality +// for queue support. +class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty () { + while(this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this] + } + + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } +} + +function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; +} + +function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on (event, handler) { + events[event].push(handler); + } + + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } + + var item = q._createTaskItem( + data, + rejectOnError ? promiseCallback : + (callback || promiseCallback) + ); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } + + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); + + }; + + var isProcessing = false; + var q = { + _tasks: new DLL(), + _createTaskItem (data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; +} + +/** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = once(callback); + var _iteratee = wrapAsync(iteratee); + return eachOfSeries$1(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +var reduce$1 = awaitify(reduce, 4); + +/** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ +function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } + + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); + + return cb[PROMISE_SYMBOL] + }; +} + +/** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ +function compose(...args) { + return seq(...args.reverse()); +} + +/** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit(limit), coll, iteratee, callback) +} +var mapLimit$1 = awaitify(mapLimit, 4); + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +var concatLimit$1 = awaitify(concatLimit, 4); + +/** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) +} +var concat$1 = awaitify(concat, 3); + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) +} +var concatSeries$1 = awaitify(concatSeries, 3); + +/** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ +function constant(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; +} + +function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; +} + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) +} +var detect$1 = awaitify(detect, 3); + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback) +} +var detectLimit$1 = awaitify(detectLimit, 4); + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback) +} + +var detectSeries$1 = awaitify(detectSeries, 3); + +function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }) +} + +/** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ +var dir = consoleFunc('dir'); + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +var doWhilst$1 = awaitify(doWhilst, 3); + +/** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); +} + +function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); +} + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); +} + +var each = awaitify(eachLimit, 3); + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); +} +var eachLimit$2 = awaitify(eachLimit$1, 4); + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return eachLimit$2(coll, 1, iteratee, callback) +} +var eachSeries$1 = awaitify(eachSeries, 3); + +/** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ +function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; +} + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) +} +var every$1 = awaitify(every, 3); + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback) +} +var everyLimit$1 = awaitify(everyLimit, 4); + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) +} +var everySeries$1 = awaitify(everySeries, 3); + +function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); +} + +function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); +} + +function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); +} + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) +} +var filter$1 = awaitify(filter, 3); + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit(limit), coll, iteratee, callback) +} +var filterLimit$1 = awaitify(filterLimit, 4); + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) +} +var filterSeries$1 = awaitify(filterSeries, 3); + +/** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ +function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); +} +var forever$1 = awaitify(forever, 2); + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); +} + +var groupByLimit$1 = awaitify(groupByLimit, 4); + +/** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) +} + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) +} + +/** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ +var log = consoleFunc('log'); + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); +} + +var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + +/** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) +} + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) +} + +/** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ +function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; +} + +/* istanbul ignore file */ + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +var _defer$1; + +if (hasNextTick) { + _defer$1 = process.nextTick; +} else if (hasSetImmediate) { + _defer$1 = setImmediate; +} else { + _defer$1 = fallback; +} + +var nextTick = wrap(_defer$1); + +var parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); +}, 3); + +/** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function parallel$1(tasks, callback) { + return parallel(eachOf$1, tasks, callback); +} + +/** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ +function parallelLimit(tasks, limit, callback) { + return parallel(eachOfLimit(limit), tasks, callback); +} + +/** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ + +/** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ +function queue$1 (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); +} + +// Binary min-heap implementation used for priority queue. +// Implementation is stable, i.e. push time is considered for equal priorities +class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty () { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } +} + +function leftChi(i) { + return (i<<1)+1; +} + +function parent(i) { + return ((i+1)>>1)-1; +} + +function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } +} + +/** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. + */ +function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue$1(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new Heap(); + q._createTaskItem = ({data, priority}, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return {data: tasks, priority}; + } + return tasks.map(data => { return {data, priority}; }); + } + + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function(data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; +} + +/** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ +function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } +} + +var race$1 = awaitify(race, 2); + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); +} + +/** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ +function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); +} + +/** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ +function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); + }); + } + return results; +} + +function reject(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); +} + +/** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function reject$1 (coll, iteratee, callback) { + return reject(eachOf$1, coll, iteratee, callback) +} +var reject$2 = awaitify(reject$1, 3); + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectLimit (coll, limit, iteratee, callback) { + return reject(eachOfLimit(limit), coll, iteratee, callback) +} +var rejectLimit$1 = awaitify(rejectLimit, 4); + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectSeries (coll, iteratee, callback) { + return reject(eachOfSeries$1, coll, iteratee, callback) +} +var rejectSeries$1 = awaitify(rejectSeries, 3); + +function constant$1(value) { + return function () { + return value; + } +} + +/** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ +const DEFAULT_TIMES = 5; +const DEFAULT_INTERVAL = 0; + +function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant$1(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = wrapAsync(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[PROMISE_SYMBOL] +} + +function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant$1(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } +} + +/** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ +function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); + + return callback[PROMISE_SYMBOL] + }); +} + +/** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function series(tasks, callback) { + return parallel(eachOfSeries$1, tasks, callback); +} + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) +} +var some$1 = awaitify(some, 3); + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback) +} +var someLimit$1 = awaitify(someLimit, 4); + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) +} +var someSeries$1 = awaitify(someSeries, 3); + +/** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } +} +var sortBy$1 = awaitify(sortBy, 3); + +/** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ +function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); + + return initialParams((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); +} + +function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; +} + +/** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); +} + +/** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ +function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) +} + +/** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) +} + +/** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); + + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] +} + +/** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ +function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); +} + +var tryEach$1 = awaitify(tryEach); + +/** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ +function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; +} + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +var whilst$1 = awaitify(whilst, 3); + +/** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ +function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); +} + +/** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ +function waterfall (tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); + } + + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); +} + +var waterfall$1 = awaitify(waterfall); + +/** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + +var index = { + apply, + applyEach: applyEach$1, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo, + cargoQueue: cargo$1, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$2, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$2, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel: parallel$1, + parallelLimit, + priorityQueue, + queue: queue$1, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$2, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, + + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$2, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$2, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 +}; + +export default index; +export { apply, applyEach$1 as applyEach, applyEachSeries, asyncify, auto, autoInject, cargo, cargo$1 as cargoQueue, compose, concat$1 as concat, concatLimit$1 as concatLimit, concatSeries$1 as concatSeries, constant, detect$1 as detect, detectLimit$1 as detectLimit, detectSeries$1 as detectSeries, dir, doUntil, doWhilst$1 as doWhilst, each, eachLimit$2 as eachLimit, eachOf$1 as eachOf, eachOfLimit$2 as eachOfLimit, eachOfSeries$1 as eachOfSeries, eachSeries$1 as eachSeries, ensureAsync, every$1 as every, everyLimit$1 as everyLimit, everySeries$1 as everySeries, filter$1 as filter, filterLimit$1 as filterLimit, filterSeries$1 as filterSeries, forever$1 as forever, groupBy, groupByLimit$1 as groupByLimit, groupBySeries, log, map$1 as map, mapLimit$1 as mapLimit, mapSeries$1 as mapSeries, mapValues, mapValuesLimit$1 as mapValuesLimit, mapValuesSeries, memoize, nextTick, parallel$1 as parallel, parallelLimit, priorityQueue, queue$1 as queue, race$1 as race, reduce$1 as reduce, reduceRight, reflect, reflectAll, reject$2 as reject, rejectLimit$1 as rejectLimit, rejectSeries$1 as rejectSeries, retry, retryable, seq, series, setImmediate$1 as setImmediate, some$1 as some, someLimit$1 as someLimit, someSeries$1 as someSeries, sortBy$1 as sortBy, timeout, times, timesLimit, timesSeries, transform, tryEach$1 as tryEach, unmemoize, until, waterfall$1 as waterfall, whilst$1 as whilst, every$1 as all, everyLimit$1 as allLimit, everySeries$1 as allSeries, some$1 as any, someLimit$1 as anyLimit, someSeries$1 as anySeries, detect$1 as find, detectLimit$1 as findLimit, detectSeries$1 as findSeries, concat$1 as flatMap, concatLimit$1 as flatMapLimit, concatSeries$1 as flatMapSeries, each as forEach, eachSeries$1 as forEachSeries, eachLimit$2 as forEachLimit, eachOf$1 as forEachOf, eachOfSeries$1 as forEachOfSeries, eachOfLimit$2 as forEachOfLimit, reduce$1 as inject, reduce$1 as foldl, reduceRight as foldr, filter$1 as select, filterLimit$1 as selectLimit, filterSeries$1 as selectSeries, asyncify as wrapSync, whilst$1 as during, doWhilst$1 as doDuring }; diff --git a/node_modules/async/doDuring.js b/node_modules/async/doDuring.js new file mode 100644 index 00000000..4c98e9e1 --- /dev/null +++ b/node_modules/async/doDuring.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +exports.default = (0, _awaitify2.default)(doWhilst, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/doUntil.js b/node_modules/async/doUntil.js new file mode 100644 index 00000000..8aa09350 --- /dev/null +++ b/node_modules/async/doUntil.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = doUntil; + +var _doWhilst = require('./doWhilst.js'); + +var _doWhilst2 = _interopRequireDefault(_doWhilst); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doUntil(iteratee, test, callback) { + const _test = (0, _wrapAsync2.default)(test); + return (0, _doWhilst2.default)(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb(err, !truth)); + }, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/doWhilst.js b/node_modules/async/doWhilst.js new file mode 100644 index 00000000..4c98e9e1 --- /dev/null +++ b/node_modules/async/doWhilst.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +exports.default = (0, _awaitify2.default)(doWhilst, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/during.js b/node_modules/async/during.js new file mode 100644 index 00000000..32a47762 --- /dev/null +++ b/node_modules/async/during.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +exports.default = (0, _awaitify2.default)(whilst, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/each.js b/node_modules/async/each.js new file mode 100644 index 00000000..405d495c --- /dev/null +++ b/node_modules/async/each.js @@ -0,0 +1,129 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit(coll, iteratee, callback) { + return (0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} + +exports.default = (0, _awaitify2.default)(eachLimit, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachLimit.js b/node_modules/async/eachLimit.js new file mode 100644 index 00000000..5f3d0094 --- /dev/null +++ b/node_modules/async/eachLimit.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} +exports.default = (0, _awaitify2.default)(eachLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOf.js b/node_modules/async/eachOf.js new file mode 100644 index 00000000..c22614f3 --- /dev/null +++ b/node_modules/async/eachOf.js @@ -0,0 +1,185 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./internal/isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _breakLoop = require('./internal/breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = (0, _once2.default)(callback); + var index = 0, + completed = 0, + { length } = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return; + if (err) { + callback(err); + } else if (++completed === length || value === _breakLoop2.default) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOf, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOfLimit.js b/node_modules/async/eachOfLimit.js new file mode 100644 index 00000000..e9fc4db8 --- /dev/null +++ b/node_modules/async/eachOfLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit2 = require('./internal/eachOfLimit.js'); + +var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOfLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachOfSeries.js b/node_modules/async/eachOfSeries.js new file mode 100644 index 00000000..cfb0f33c --- /dev/null +++ b/node_modules/async/eachOfSeries.js @@ -0,0 +1,39 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachOfSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/eachSeries.js b/node_modules/async/eachSeries.js new file mode 100644 index 00000000..d674d0c3 --- /dev/null +++ b/node_modules/async/eachSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachLimit = require('./eachLimit.js'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return (0, _eachLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/ensureAsync.js b/node_modules/async/ensureAsync.js new file mode 100644 index 00000000..ad8beb52 --- /dev/null +++ b/node_modules/async/ensureAsync.js @@ -0,0 +1,67 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = ensureAsync; + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ +function ensureAsync(fn) { + if ((0, _wrapAsync.isAsync)(fn)) return fn; + return function (...args /*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + (0, _setImmediate2.default)(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/every.js b/node_modules/async/every.js new file mode 100644 index 00000000..148db683 --- /dev/null +++ b/node_modules/async/every.js @@ -0,0 +1,119 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(every, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/everyLimit.js b/node_modules/async/everyLimit.js new file mode 100644 index 00000000..25b2c089 --- /dev/null +++ b/node_modules/async/everyLimit.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everyLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/everySeries.js b/node_modules/async/everySeries.js new file mode 100644 index 00000000..147c3dc5 --- /dev/null +++ b/node_modules/async/everySeries.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everySeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filter.js b/node_modules/async/filter.js new file mode 100644 index 00000000..303dc1fb --- /dev/null +++ b/node_modules/async/filter.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filter, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filterLimit.js b/node_modules/async/filterLimit.js new file mode 100644 index 00000000..89e55f53 --- /dev/null +++ b/node_modules/async/filterLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit(coll, limit, iteratee, callback) { + return (0, _filter3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/filterSeries.js b/node_modules/async/filterSeries.js new file mode 100644 index 00000000..a045e52c --- /dev/null +++ b/node_modules/async/filterSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/find.js b/node_modules/async/find.js new file mode 100644 index 00000000..05b2e5c6 --- /dev/null +++ b/node_modules/async/find.js @@ -0,0 +1,96 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detect, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/findLimit.js b/node_modules/async/findLimit.js new file mode 100644 index 00000000..db6961ec --- /dev/null +++ b/node_modules/async/findLimit.js @@ -0,0 +1,48 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detectLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/findSeries.js b/node_modules/async/findSeries.js new file mode 100644 index 00000000..b9131b4a --- /dev/null +++ b/node_modules/async/findSeries.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(1), coll, iteratee, callback); +} + +exports.default = (0, _awaitify2.default)(detectSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/flatMap.js b/node_modules/async/flatMap.js new file mode 100644 index 00000000..8eed1ac8 --- /dev/null +++ b/node_modules/async/flatMap.js @@ -0,0 +1,115 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, Infinity, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concat, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/flatMapLimit.js b/node_modules/async/flatMapLimit.js new file mode 100644 index 00000000..3d170f17 --- /dev/null +++ b/node_modules/async/flatMapLimit.js @@ -0,0 +1,60 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +exports.default = (0, _awaitify2.default)(concatLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/flatMapSeries.js b/node_modules/async/flatMapSeries.js new file mode 100644 index 00000000..84add3b0 --- /dev/null +++ b/node_modules/async/flatMapSeries.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concatSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/foldl.js b/node_modules/async/foldl.js new file mode 100644 index 00000000..56e2db81 --- /dev/null +++ b/node_modules/async/foldl.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/foldr.js b/node_modules/async/foldr.js new file mode 100644 index 00000000..bee5391d --- /dev/null +++ b/node_modules/async/foldr.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reduceRight; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight(array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return (0, _reduce2.default)(reversed, memo, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEach.js b/node_modules/async/forEach.js new file mode 100644 index 00000000..405d495c --- /dev/null +++ b/node_modules/async/forEach.js @@ -0,0 +1,129 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit(coll, iteratee, callback) { + return (0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} + +exports.default = (0, _awaitify2.default)(eachLimit, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachLimit.js b/node_modules/async/forEachLimit.js new file mode 100644 index 00000000..5f3d0094 --- /dev/null +++ b/node_modules/async/forEachLimit.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} +exports.default = (0, _awaitify2.default)(eachLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOf.js b/node_modules/async/forEachOf.js new file mode 100644 index 00000000..c22614f3 --- /dev/null +++ b/node_modules/async/forEachOf.js @@ -0,0 +1,185 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./internal/isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _breakLoop = require('./internal/breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = (0, _once2.default)(callback); + var index = 0, + completed = 0, + { length } = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return; + if (err) { + callback(err); + } else if (++completed === length || value === _breakLoop2.default) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOf, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOfLimit.js b/node_modules/async/forEachOfLimit.js new file mode 100644 index 00000000..e9fc4db8 --- /dev/null +++ b/node_modules/async/forEachOfLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit2 = require('./internal/eachOfLimit.js'); + +var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOfLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachOfSeries.js b/node_modules/async/forEachOfSeries.js new file mode 100644 index 00000000..cfb0f33c --- /dev/null +++ b/node_modules/async/forEachOfSeries.js @@ -0,0 +1,39 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachOfSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forEachSeries.js b/node_modules/async/forEachSeries.js new file mode 100644 index 00000000..d674d0c3 --- /dev/null +++ b/node_modules/async/forEachSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachLimit = require('./eachLimit.js'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return (0, _eachLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/forever.js b/node_modules/async/forever.js new file mode 100644 index 00000000..2c8d5b8f --- /dev/null +++ b/node_modules/async/forever.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _ensureAsync = require('./ensureAsync.js'); + +var _ensureAsync2 = _interopRequireDefault(_ensureAsync); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ +function forever(fn, errback) { + var done = (0, _onlyOnce2.default)(errback); + var task = (0, _wrapAsync2.default)((0, _ensureAsync2.default)(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); +} +exports.default = (0, _awaitify2.default)(forever, 2); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupBy.js b/node_modules/async/groupBy.js new file mode 100644 index 00000000..6bb52aaa --- /dev/null +++ b/node_modules/async/groupBy.js @@ -0,0 +1,108 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = groupBy; + +var _groupByLimit = require('./groupByLimit.js'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function groupBy(coll, iteratee, callback) { + return (0, _groupByLimit2.default)(coll, Infinity, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupByLimit.js b/node_modules/async/groupByLimit.js new file mode 100644 index 00000000..5766d6e0 --- /dev/null +++ b/node_modules/async/groupByLimit.js @@ -0,0 +1,71 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, { key, val }); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var { hasOwnProperty } = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var { key } = mapResults[i]; + var { val } = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); +} + +exports.default = (0, _awaitify2.default)(groupByLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/groupBySeries.js b/node_modules/async/groupBySeries.js new file mode 100644 index 00000000..60567435 --- /dev/null +++ b/node_modules/async/groupBySeries.js @@ -0,0 +1,36 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = groupBySeries; + +var _groupByLimit = require('./groupByLimit.js'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupBySeries(coll, iteratee, callback) { + return (0, _groupByLimit2.default)(coll, 1, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/index.js b/node_modules/async/index.js new file mode 100644 index 00000000..ce647d59 --- /dev/null +++ b/node_modules/async/index.js @@ -0,0 +1,588 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.doDuring = exports.during = exports.wrapSync = undefined; +exports.selectSeries = exports.selectLimit = exports.select = exports.foldr = exports.foldl = exports.inject = exports.forEachOfLimit = exports.forEachOfSeries = exports.forEachOf = exports.forEachLimit = exports.forEachSeries = exports.forEach = exports.flatMapSeries = exports.flatMapLimit = exports.flatMap = exports.findSeries = exports.findLimit = exports.find = exports.anySeries = exports.anyLimit = exports.any = exports.allSeries = exports.allLimit = exports.all = exports.whilst = exports.waterfall = exports.until = exports.unmemoize = exports.tryEach = exports.transform = exports.timesSeries = exports.timesLimit = exports.times = exports.timeout = exports.sortBy = exports.someSeries = exports.someLimit = exports.some = exports.setImmediate = exports.series = exports.seq = exports.retryable = exports.retry = exports.rejectSeries = exports.rejectLimit = exports.reject = exports.reflectAll = exports.reflect = exports.reduceRight = exports.reduce = exports.race = exports.queue = exports.priorityQueue = exports.parallelLimit = exports.parallel = exports.nextTick = exports.memoize = exports.mapValuesSeries = exports.mapValuesLimit = exports.mapValues = exports.mapSeries = exports.mapLimit = exports.map = exports.log = exports.groupBySeries = exports.groupByLimit = exports.groupBy = exports.forever = exports.filterSeries = exports.filterLimit = exports.filter = exports.everySeries = exports.everyLimit = exports.every = exports.ensureAsync = exports.eachSeries = exports.eachOfSeries = exports.eachOfLimit = exports.eachOf = exports.eachLimit = exports.each = exports.doWhilst = exports.doUntil = exports.dir = exports.detectSeries = exports.detectLimit = exports.detect = exports.constant = exports.concatSeries = exports.concatLimit = exports.concat = exports.compose = exports.cargoQueue = exports.cargo = exports.autoInject = exports.auto = exports.asyncify = exports.applyEachSeries = exports.applyEach = exports.apply = undefined; + +var _apply = require('./apply'); + +var _apply2 = _interopRequireDefault(_apply); + +var _applyEach = require('./applyEach'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _applyEachSeries = require('./applyEachSeries'); + +var _applyEachSeries2 = _interopRequireDefault(_applyEachSeries); + +var _asyncify = require('./asyncify'); + +var _asyncify2 = _interopRequireDefault(_asyncify); + +var _auto = require('./auto'); + +var _auto2 = _interopRequireDefault(_auto); + +var _autoInject = require('./autoInject'); + +var _autoInject2 = _interopRequireDefault(_autoInject); + +var _cargo = require('./cargo'); + +var _cargo2 = _interopRequireDefault(_cargo); + +var _cargoQueue = require('./cargoQueue'); + +var _cargoQueue2 = _interopRequireDefault(_cargoQueue); + +var _compose = require('./compose'); + +var _compose2 = _interopRequireDefault(_compose); + +var _concat = require('./concat'); + +var _concat2 = _interopRequireDefault(_concat); + +var _concatLimit = require('./concatLimit'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _concatSeries = require('./concatSeries'); + +var _concatSeries2 = _interopRequireDefault(_concatSeries); + +var _constant = require('./constant'); + +var _constant2 = _interopRequireDefault(_constant); + +var _detect = require('./detect'); + +var _detect2 = _interopRequireDefault(_detect); + +var _detectLimit = require('./detectLimit'); + +var _detectLimit2 = _interopRequireDefault(_detectLimit); + +var _detectSeries = require('./detectSeries'); + +var _detectSeries2 = _interopRequireDefault(_detectSeries); + +var _dir = require('./dir'); + +var _dir2 = _interopRequireDefault(_dir); + +var _doUntil = require('./doUntil'); + +var _doUntil2 = _interopRequireDefault(_doUntil); + +var _doWhilst = require('./doWhilst'); + +var _doWhilst2 = _interopRequireDefault(_doWhilst); + +var _each = require('./each'); + +var _each2 = _interopRequireDefault(_each); + +var _eachLimit = require('./eachLimit'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _eachOf = require('./eachOf'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _eachOfLimit = require('./eachOfLimit'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _eachOfSeries = require('./eachOfSeries'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _eachSeries = require('./eachSeries'); + +var _eachSeries2 = _interopRequireDefault(_eachSeries); + +var _ensureAsync = require('./ensureAsync'); + +var _ensureAsync2 = _interopRequireDefault(_ensureAsync); + +var _every = require('./every'); + +var _every2 = _interopRequireDefault(_every); + +var _everyLimit = require('./everyLimit'); + +var _everyLimit2 = _interopRequireDefault(_everyLimit); + +var _everySeries = require('./everySeries'); + +var _everySeries2 = _interopRequireDefault(_everySeries); + +var _filter = require('./filter'); + +var _filter2 = _interopRequireDefault(_filter); + +var _filterLimit = require('./filterLimit'); + +var _filterLimit2 = _interopRequireDefault(_filterLimit); + +var _filterSeries = require('./filterSeries'); + +var _filterSeries2 = _interopRequireDefault(_filterSeries); + +var _forever = require('./forever'); + +var _forever2 = _interopRequireDefault(_forever); + +var _groupBy = require('./groupBy'); + +var _groupBy2 = _interopRequireDefault(_groupBy); + +var _groupByLimit = require('./groupByLimit'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +var _groupBySeries = require('./groupBySeries'); + +var _groupBySeries2 = _interopRequireDefault(_groupBySeries); + +var _log = require('./log'); + +var _log2 = _interopRequireDefault(_log); + +var _map = require('./map'); + +var _map2 = _interopRequireDefault(_map); + +var _mapLimit = require('./mapLimit'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _mapSeries = require('./mapSeries'); + +var _mapSeries2 = _interopRequireDefault(_mapSeries); + +var _mapValues = require('./mapValues'); + +var _mapValues2 = _interopRequireDefault(_mapValues); + +var _mapValuesLimit = require('./mapValuesLimit'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +var _mapValuesSeries = require('./mapValuesSeries'); + +var _mapValuesSeries2 = _interopRequireDefault(_mapValuesSeries); + +var _memoize = require('./memoize'); + +var _memoize2 = _interopRequireDefault(_memoize); + +var _nextTick = require('./nextTick'); + +var _nextTick2 = _interopRequireDefault(_nextTick); + +var _parallel = require('./parallel'); + +var _parallel2 = _interopRequireDefault(_parallel); + +var _parallelLimit = require('./parallelLimit'); + +var _parallelLimit2 = _interopRequireDefault(_parallelLimit); + +var _priorityQueue = require('./priorityQueue'); + +var _priorityQueue2 = _interopRequireDefault(_priorityQueue); + +var _queue = require('./queue'); + +var _queue2 = _interopRequireDefault(_queue); + +var _race = require('./race'); + +var _race2 = _interopRequireDefault(_race); + +var _reduce = require('./reduce'); + +var _reduce2 = _interopRequireDefault(_reduce); + +var _reduceRight = require('./reduceRight'); + +var _reduceRight2 = _interopRequireDefault(_reduceRight); + +var _reflect = require('./reflect'); + +var _reflect2 = _interopRequireDefault(_reflect); + +var _reflectAll = require('./reflectAll'); + +var _reflectAll2 = _interopRequireDefault(_reflectAll); + +var _reject = require('./reject'); + +var _reject2 = _interopRequireDefault(_reject); + +var _rejectLimit = require('./rejectLimit'); + +var _rejectLimit2 = _interopRequireDefault(_rejectLimit); + +var _rejectSeries = require('./rejectSeries'); + +var _rejectSeries2 = _interopRequireDefault(_rejectSeries); + +var _retry = require('./retry'); + +var _retry2 = _interopRequireDefault(_retry); + +var _retryable = require('./retryable'); + +var _retryable2 = _interopRequireDefault(_retryable); + +var _seq = require('./seq'); + +var _seq2 = _interopRequireDefault(_seq); + +var _series = require('./series'); + +var _series2 = _interopRequireDefault(_series); + +var _setImmediate = require('./setImmediate'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _some = require('./some'); + +var _some2 = _interopRequireDefault(_some); + +var _someLimit = require('./someLimit'); + +var _someLimit2 = _interopRequireDefault(_someLimit); + +var _someSeries = require('./someSeries'); + +var _someSeries2 = _interopRequireDefault(_someSeries); + +var _sortBy = require('./sortBy'); + +var _sortBy2 = _interopRequireDefault(_sortBy); + +var _timeout = require('./timeout'); + +var _timeout2 = _interopRequireDefault(_timeout); + +var _times = require('./times'); + +var _times2 = _interopRequireDefault(_times); + +var _timesLimit = require('./timesLimit'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +var _timesSeries = require('./timesSeries'); + +var _timesSeries2 = _interopRequireDefault(_timesSeries); + +var _transform = require('./transform'); + +var _transform2 = _interopRequireDefault(_transform); + +var _tryEach = require('./tryEach'); + +var _tryEach2 = _interopRequireDefault(_tryEach); + +var _unmemoize = require('./unmemoize'); + +var _unmemoize2 = _interopRequireDefault(_unmemoize); + +var _until = require('./until'); + +var _until2 = _interopRequireDefault(_until); + +var _waterfall = require('./waterfall'); + +var _waterfall2 = _interopRequireDefault(_waterfall); + +var _whilst = require('./whilst'); + +var _whilst2 = _interopRequireDefault(_whilst); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + +/** + * Async is a utility module which provides straight-forward, powerful functions + * for working with asynchronous JavaScript. Although originally designed for + * use with [Node.js](http://nodejs.org) and installable via + * `npm install --save async`, it can also be used directly in the browser. + * @module async + * @see AsyncFunction + */ + +/** + * A collection of `async` functions for manipulating collections, such as + * arrays and objects. + * @module Collections + */ + +/** + * A collection of `async` functions for controlling the flow through a script. + * @module ControlFlow + */ + +/** + * A collection of `async` utility functions. + * @module Utils + */ + +exports.default = { + apply: _apply2.default, + applyEach: _applyEach2.default, + applyEachSeries: _applyEachSeries2.default, + asyncify: _asyncify2.default, + auto: _auto2.default, + autoInject: _autoInject2.default, + cargo: _cargo2.default, + cargoQueue: _cargoQueue2.default, + compose: _compose2.default, + concat: _concat2.default, + concatLimit: _concatLimit2.default, + concatSeries: _concatSeries2.default, + constant: _constant2.default, + detect: _detect2.default, + detectLimit: _detectLimit2.default, + detectSeries: _detectSeries2.default, + dir: _dir2.default, + doUntil: _doUntil2.default, + doWhilst: _doWhilst2.default, + each: _each2.default, + eachLimit: _eachLimit2.default, + eachOf: _eachOf2.default, + eachOfLimit: _eachOfLimit2.default, + eachOfSeries: _eachOfSeries2.default, + eachSeries: _eachSeries2.default, + ensureAsync: _ensureAsync2.default, + every: _every2.default, + everyLimit: _everyLimit2.default, + everySeries: _everySeries2.default, + filter: _filter2.default, + filterLimit: _filterLimit2.default, + filterSeries: _filterSeries2.default, + forever: _forever2.default, + groupBy: _groupBy2.default, + groupByLimit: _groupByLimit2.default, + groupBySeries: _groupBySeries2.default, + log: _log2.default, + map: _map2.default, + mapLimit: _mapLimit2.default, + mapSeries: _mapSeries2.default, + mapValues: _mapValues2.default, + mapValuesLimit: _mapValuesLimit2.default, + mapValuesSeries: _mapValuesSeries2.default, + memoize: _memoize2.default, + nextTick: _nextTick2.default, + parallel: _parallel2.default, + parallelLimit: _parallelLimit2.default, + priorityQueue: _priorityQueue2.default, + queue: _queue2.default, + race: _race2.default, + reduce: _reduce2.default, + reduceRight: _reduceRight2.default, + reflect: _reflect2.default, + reflectAll: _reflectAll2.default, + reject: _reject2.default, + rejectLimit: _rejectLimit2.default, + rejectSeries: _rejectSeries2.default, + retry: _retry2.default, + retryable: _retryable2.default, + seq: _seq2.default, + series: _series2.default, + setImmediate: _setImmediate2.default, + some: _some2.default, + someLimit: _someLimit2.default, + someSeries: _someSeries2.default, + sortBy: _sortBy2.default, + timeout: _timeout2.default, + times: _times2.default, + timesLimit: _timesLimit2.default, + timesSeries: _timesSeries2.default, + transform: _transform2.default, + tryEach: _tryEach2.default, + unmemoize: _unmemoize2.default, + until: _until2.default, + waterfall: _waterfall2.default, + whilst: _whilst2.default, + + // aliases + all: _every2.default, + allLimit: _everyLimit2.default, + allSeries: _everySeries2.default, + any: _some2.default, + anyLimit: _someLimit2.default, + anySeries: _someSeries2.default, + find: _detect2.default, + findLimit: _detectLimit2.default, + findSeries: _detectSeries2.default, + flatMap: _concat2.default, + flatMapLimit: _concatLimit2.default, + flatMapSeries: _concatSeries2.default, + forEach: _each2.default, + forEachSeries: _eachSeries2.default, + forEachLimit: _eachLimit2.default, + forEachOf: _eachOf2.default, + forEachOfSeries: _eachOfSeries2.default, + forEachOfLimit: _eachOfLimit2.default, + inject: _reduce2.default, + foldl: _reduce2.default, + foldr: _reduceRight2.default, + select: _filter2.default, + selectLimit: _filterLimit2.default, + selectSeries: _filterSeries2.default, + wrapSync: _asyncify2.default, + during: _whilst2.default, + doDuring: _doWhilst2.default +}; +exports.apply = _apply2.default; +exports.applyEach = _applyEach2.default; +exports.applyEachSeries = _applyEachSeries2.default; +exports.asyncify = _asyncify2.default; +exports.auto = _auto2.default; +exports.autoInject = _autoInject2.default; +exports.cargo = _cargo2.default; +exports.cargoQueue = _cargoQueue2.default; +exports.compose = _compose2.default; +exports.concat = _concat2.default; +exports.concatLimit = _concatLimit2.default; +exports.concatSeries = _concatSeries2.default; +exports.constant = _constant2.default; +exports.detect = _detect2.default; +exports.detectLimit = _detectLimit2.default; +exports.detectSeries = _detectSeries2.default; +exports.dir = _dir2.default; +exports.doUntil = _doUntil2.default; +exports.doWhilst = _doWhilst2.default; +exports.each = _each2.default; +exports.eachLimit = _eachLimit2.default; +exports.eachOf = _eachOf2.default; +exports.eachOfLimit = _eachOfLimit2.default; +exports.eachOfSeries = _eachOfSeries2.default; +exports.eachSeries = _eachSeries2.default; +exports.ensureAsync = _ensureAsync2.default; +exports.every = _every2.default; +exports.everyLimit = _everyLimit2.default; +exports.everySeries = _everySeries2.default; +exports.filter = _filter2.default; +exports.filterLimit = _filterLimit2.default; +exports.filterSeries = _filterSeries2.default; +exports.forever = _forever2.default; +exports.groupBy = _groupBy2.default; +exports.groupByLimit = _groupByLimit2.default; +exports.groupBySeries = _groupBySeries2.default; +exports.log = _log2.default; +exports.map = _map2.default; +exports.mapLimit = _mapLimit2.default; +exports.mapSeries = _mapSeries2.default; +exports.mapValues = _mapValues2.default; +exports.mapValuesLimit = _mapValuesLimit2.default; +exports.mapValuesSeries = _mapValuesSeries2.default; +exports.memoize = _memoize2.default; +exports.nextTick = _nextTick2.default; +exports.parallel = _parallel2.default; +exports.parallelLimit = _parallelLimit2.default; +exports.priorityQueue = _priorityQueue2.default; +exports.queue = _queue2.default; +exports.race = _race2.default; +exports.reduce = _reduce2.default; +exports.reduceRight = _reduceRight2.default; +exports.reflect = _reflect2.default; +exports.reflectAll = _reflectAll2.default; +exports.reject = _reject2.default; +exports.rejectLimit = _rejectLimit2.default; +exports.rejectSeries = _rejectSeries2.default; +exports.retry = _retry2.default; +exports.retryable = _retryable2.default; +exports.seq = _seq2.default; +exports.series = _series2.default; +exports.setImmediate = _setImmediate2.default; +exports.some = _some2.default; +exports.someLimit = _someLimit2.default; +exports.someSeries = _someSeries2.default; +exports.sortBy = _sortBy2.default; +exports.timeout = _timeout2.default; +exports.times = _times2.default; +exports.timesLimit = _timesLimit2.default; +exports.timesSeries = _timesSeries2.default; +exports.transform = _transform2.default; +exports.tryEach = _tryEach2.default; +exports.unmemoize = _unmemoize2.default; +exports.until = _until2.default; +exports.waterfall = _waterfall2.default; +exports.whilst = _whilst2.default; +exports.all = _every2.default; +exports.allLimit = _everyLimit2.default; +exports.allSeries = _everySeries2.default; +exports.any = _some2.default; +exports.anyLimit = _someLimit2.default; +exports.anySeries = _someSeries2.default; +exports.find = _detect2.default; +exports.findLimit = _detectLimit2.default; +exports.findSeries = _detectSeries2.default; +exports.flatMap = _concat2.default; +exports.flatMapLimit = _concatLimit2.default; +exports.flatMapSeries = _concatSeries2.default; +exports.forEach = _each2.default; +exports.forEachSeries = _eachSeries2.default; +exports.forEachLimit = _eachLimit2.default; +exports.forEachOf = _eachOf2.default; +exports.forEachOfSeries = _eachOfSeries2.default; +exports.forEachOfLimit = _eachOfLimit2.default; +exports.inject = _reduce2.default; +exports.foldl = _reduce2.default; +exports.foldr = _reduceRight2.default; +exports.select = _filter2.default; +exports.selectLimit = _filterLimit2.default; +exports.selectSeries = _filterSeries2.default; +exports.wrapSync = _asyncify2.default; +exports.during = _whilst2.default; +exports.doDuring = _doWhilst2.default; \ No newline at end of file diff --git a/node_modules/async/inject.js b/node_modules/async/inject.js new file mode 100644 index 00000000..56e2db81 --- /dev/null +++ b/node_modules/async/inject.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/DoublyLinkedList.js b/node_modules/async/internal/DoublyLinkedList.js new file mode 100644 index 00000000..cd11c3b3 --- /dev/null +++ b/node_modules/async/internal/DoublyLinkedList.js @@ -0,0 +1,92 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation +// used for queues. This implementation assumes that the node provided by the user can be modified +// to adjust the next and last properties. We implement only the minimal functionality +// for queue support. +class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next;else this.head = node.next; + if (node.next) node.next.prev = node.prev;else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty() { + while (this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode;else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode;else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node);else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node);else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator]() { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove(testFn) { + var curr = this.head; + while (curr) { + var { next } = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } +} + +exports.default = DLL; +function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/Heap.js b/node_modules/async/internal/Heap.js new file mode 100644 index 00000000..80762fe0 --- /dev/null +++ b/node_modules/async/internal/Heap.js @@ -0,0 +1,120 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// Binary min-heap implementation used for priority queue. +// Implementation is stable, i.e. push time is considered for equal priorities +class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty() { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p = parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l = leftChi(index)) < this.heap.length) { + if (l + 1 < this.heap.length && smaller(this.heap[l + 1], this.heap[l])) { + l = l + 1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length - 1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length - 1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator]() { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove(testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length - 1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } +} + +exports.default = Heap; +function leftChi(i) { + return (i << 1) + 1; +} + +function parent(i) { + return (i + 1 >> 1) - 1; +} + +function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } else { + return x.pushCount < y.pushCount; + } +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/applyEach.js b/node_modules/async/internal/applyEach.js new file mode 100644 index 00000000..a3f4ef1d --- /dev/null +++ b/node_modules/async/internal/applyEach.js @@ -0,0 +1,29 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = (0, _awaitify2.default)(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + (0, _wrapAsync2.default)(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; +}; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/asyncEachOfLimit.js b/node_modules/async/internal/asyncEachOfLimit.js new file mode 100644 index 00000000..bba74c7c --- /dev/null +++ b/node_modules/async/internal/asyncEachOfLimit.js @@ -0,0 +1,75 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncEachOfLimit; + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// for async generators +function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return; + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({ value, done: iterDone }) => { + //console.log('got value', value) + if (canceled || done) return; + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return; + if (err) return handleError(err); + + if (err === false) { + done = true; + canceled = true; + return; + } + + if (result === _breakLoop2.default || done && running <= 0) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return; + awaiting = false; + done = true; + callback(err); + } + + replenish(); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/awaitify.js b/node_modules/async/internal/awaitify.js new file mode 100644 index 00000000..7b36f1ac --- /dev/null +++ b/node_modules/async/internal/awaitify.js @@ -0,0 +1,27 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = awaitify; +// conditionally promisify a function. +// only return a promise if a callback is omitted +function awaitify(asyncFn, arity = asyncFn.length) { + if (!arity) throw new Error('arity is undefined'); + function awaitable(...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args); + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err); + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }); + } + + return awaitable; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/breakLoop.js b/node_modules/async/internal/breakLoop.js new file mode 100644 index 00000000..8245e553 --- /dev/null +++ b/node_modules/async/internal/breakLoop.js @@ -0,0 +1,10 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// A temporary value used to identify if the loop should be broken. +// See #1064, #1293 +const breakLoop = {}; +exports.default = breakLoop; +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/consoleFunc.js b/node_modules/async/internal/consoleFunc.js new file mode 100644 index 00000000..70347a5d --- /dev/null +++ b/node_modules/async/internal/consoleFunc.js @@ -0,0 +1,31 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = consoleFunc; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function consoleFunc(name) { + return (fn, ...args) => (0, _wrapAsync2.default)(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { + /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/createTester.js b/node_modules/async/internal/createTester.js new file mode 100644 index 00000000..7b2d7346 --- /dev/null +++ b/node_modules/async/internal/createTester.js @@ -0,0 +1,40 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _createTester; + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = (0, _wrapAsync2.default)(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, _breakLoop2.default); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/eachOfLimit.js b/node_modules/async/internal/eachOfLimit.js new file mode 100644 index 00000000..fc26b20f --- /dev/null +++ b/node_modules/async/internal/eachOfLimit.js @@ -0,0 +1,90 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _iterator = require('./iterator.js'); + +var _iterator2 = _interopRequireDefault(_iterator); + +var _onlyOnce = require('./onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./wrapAsync.js'); + +var _asyncEachOfLimit = require('./asyncEachOfLimit.js'); + +var _asyncEachOfLimit2 = _interopRequireDefault(_asyncEachOfLimit); + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +exports.default = limit => { + return (obj, iteratee, callback) => { + callback = (0, _once2.default)(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1'); + } + if (!obj) { + return callback(null); + } + if ((0, _wrapAsync.isAsyncGenerator)(obj)) { + return (0, _asyncEachOfLimit2.default)(obj, limit, iteratee, callback); + } + if ((0, _wrapAsync.isAsyncIterable)(obj)) { + return (0, _asyncEachOfLimit2.default)(obj[Symbol.asyncIterator](), limit, iteratee, callback); + } + var nextElem = (0, _iterator2.default)(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return; + running -= 1; + if (err) { + done = true; + callback(err); + } else if (err === false) { + done = true; + canceled = true; + } else if (value === _breakLoop2.default || done && running <= 0) { + done = true; + return callback(null); + } else if (!looping) { + replenish(); + } + } + + function replenish() { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, (0, _onlyOnce2.default)(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; +}; + +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/filter.js b/node_modules/async/internal/filter.js new file mode 100644 index 00000000..aef2b9d2 --- /dev/null +++ b/node_modules/async/internal/filter.js @@ -0,0 +1,55 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _filter; + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); +} + +function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({ index, value: x }); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results.sort((a, b) => a.index - b.index).map(v => v.value)); + }); +} + +function _filter(eachfn, coll, iteratee, callback) { + var filter = (0, _isArrayLike2.default)(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, (0, _wrapAsync2.default)(iteratee), callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/getIterator.js b/node_modules/async/internal/getIterator.js new file mode 100644 index 00000000..830a5452 --- /dev/null +++ b/node_modules/async/internal/getIterator.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); +}; + +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/initialParams.js b/node_modules/async/internal/initialParams.js new file mode 100644 index 00000000..245378cf --- /dev/null +++ b/node_modules/async/internal/initialParams.js @@ -0,0 +1,14 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (fn) { + return function (...args /*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; +}; + +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/isArrayLike.js b/node_modules/async/internal/isArrayLike.js new file mode 100644 index 00000000..ce076704 --- /dev/null +++ b/node_modules/async/internal/isArrayLike.js @@ -0,0 +1,10 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isArrayLike; +function isArrayLike(value) { + return value && typeof value.length === 'number' && value.length >= 0 && value.length % 1 === 0; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/iterator.js b/node_modules/async/internal/iterator.js new file mode 100644 index 00000000..90b0223b --- /dev/null +++ b/node_modules/async/internal/iterator.js @@ -0,0 +1,57 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = createIterator; + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _getIterator = require('./getIterator.js'); + +var _getIterator2 = _interopRequireDefault(_getIterator); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? { value: coll[i], key: i } : null; + }; +} + +function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) return null; + i++; + return { value: item.value, key: i }; + }; +} + +function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? { value: obj[key], key } : null; + }; +} + +function createIterator(coll) { + if ((0, _isArrayLike2.default)(coll)) { + return createArrayIterator(coll); + } + + var iterator = (0, _getIterator2.default)(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/map.js b/node_modules/async/internal/map.js new file mode 100644 index 00000000..af3fd098 --- /dev/null +++ b/node_modules/async/internal/map.js @@ -0,0 +1,30 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _asyncMap; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = (0, _wrapAsync2.default)(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/once.js b/node_modules/async/internal/once.js new file mode 100644 index 00000000..49f37270 --- /dev/null +++ b/node_modules/async/internal/once.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = once; +function once(fn) { + function wrapper(...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/onlyOnce.js b/node_modules/async/internal/onlyOnce.js new file mode 100644 index 00000000..6ad721bd --- /dev/null +++ b/node_modules/async/internal/onlyOnce.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = onlyOnce; +function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/parallel.js b/node_modules/async/internal/parallel.js new file mode 100644 index 00000000..75741bba --- /dev/null +++ b/node_modules/async/internal/parallel.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +exports.default = (0, _awaitify2.default)((eachfn, tasks, callback) => { + var results = (0, _isArrayLike2.default)(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + (0, _wrapAsync2.default)(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); +}, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/promiseCallback.js b/node_modules/async/internal/promiseCallback.js new file mode 100644 index 00000000..17a83016 --- /dev/null +++ b/node_modules/async/internal/promiseCallback.js @@ -0,0 +1,23 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +const PROMISE_SYMBOL = Symbol('promiseCallback'); + +function promiseCallback() { + let resolve, reject; + function callback(err, ...args) { + if (err) return reject(err); + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, reject = rej; + }); + + return callback; +} + +exports.promiseCallback = promiseCallback; +exports.PROMISE_SYMBOL = PROMISE_SYMBOL; \ No newline at end of file diff --git a/node_modules/async/internal/queue.js b/node_modules/async/internal/queue.js new file mode 100644 index 00000000..cbc590d0 --- /dev/null +++ b/node_modules/async/internal/queue.js @@ -0,0 +1,294 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = queue; + +var _onlyOnce = require('./onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _setImmediate = require('./setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _DoublyLinkedList = require('./DoublyLinkedList.js'); + +var _DoublyLinkedList2 = _interopRequireDefault(_DoublyLinkedList); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } else if (concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = (0, _wrapAsync2.default)(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on(event, handler) { + events[event].push(handler); + } + + function once(event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off(event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []); + if (!handler) return events[event] = []; + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger(event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback(err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res(); + if (args.length <= 1) return res(args[0]); + res(args); + } + + var item = q._createTaskItem(data, rejectOnError ? promiseCallback : callback || promiseCallback); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + (0, _setImmediate2.default)(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }); + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= q.concurrency - q.buffer) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + (0, _setImmediate2.default)(() => trigger('drain')); + return true; + } + return false; + } + + const eventMethod = name => handler => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err); + resolve(data); + }); + }); + } + off(name); + on(name, handler); + }; + + var isProcessing = false; + var q = { + _tasks: new _DoublyLinkedList2.default(), + _createTaskItem(data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator]() { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, false, false, callback)); + } + return _insert(data, false, false, callback); + }, + pushAsync(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, false, true, callback)); + } + return _insert(data, false, true, callback); + }, + kill() { + off(); + q._tasks.empty(); + }, + unshift(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, true, false, callback)); + } + return _insert(data, true, false, callback); + }, + unshiftAsync(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, true, true, callback)); + } + return _insert(data, true, true, callback); + }, + remove(testFn) { + q._tasks.remove(testFn); + }, + process() { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while (!q.paused && numRunning < q.concurrency && q._tasks.length) { + var tasks = [], + data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = (0, _onlyOnce2.default)(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length() { + return q._tasks.length; + }, + running() { + return numRunning; + }, + workersList() { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause() { + q.paused = true; + }, + resume() { + if (q.paused === false) { + return; + } + q.paused = false; + (0, _setImmediate2.default)(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + } + }); + return q; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/range.js b/node_modules/async/internal/range.js new file mode 100644 index 00000000..6680e642 --- /dev/null +++ b/node_modules/async/internal/range.js @@ -0,0 +1,14 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = range; +function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/reject.js b/node_modules/async/internal/reject.js new file mode 100644 index 00000000..7388ef49 --- /dev/null +++ b/node_modules/async/internal/reject.js @@ -0,0 +1,26 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reject; + +var _filter = require('./filter.js'); + +var _filter2 = _interopRequireDefault(_filter); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function reject(eachfn, arr, _iteratee, callback) { + const iteratee = (0, _wrapAsync2.default)(_iteratee); + return (0, _filter2.default)(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/internal/setImmediate.js b/node_modules/async/internal/setImmediate.js new file mode 100644 index 00000000..513efd13 --- /dev/null +++ b/node_modules/async/internal/setImmediate.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.fallback = fallback; +exports.wrap = wrap; +/* istanbul ignore file */ + +var hasQueueMicrotask = exports.hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; +var hasSetImmediate = exports.hasSetImmediate = typeof setImmediate === 'function' && setImmediate; +var hasNextTick = exports.hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + +function fallback(fn) { + setTimeout(fn, 0); +} + +function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); +} + +var _defer; + +if (hasQueueMicrotask) { + _defer = queueMicrotask; +} else if (hasSetImmediate) { + _defer = setImmediate; +} else if (hasNextTick) { + _defer = process.nextTick; +} else { + _defer = fallback; +} + +exports.default = wrap(_defer); \ No newline at end of file diff --git a/node_modules/async/internal/withoutIndex.js b/node_modules/async/internal/withoutIndex.js new file mode 100644 index 00000000..ec45fa35 --- /dev/null +++ b/node_modules/async/internal/withoutIndex.js @@ -0,0 +1,10 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _withoutIndex; +function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/internal/wrapAsync.js b/node_modules/async/internal/wrapAsync.js new file mode 100644 index 00000000..ad4d6198 --- /dev/null +++ b/node_modules/async/internal/wrapAsync.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isAsyncIterable = exports.isAsyncGenerator = exports.isAsync = undefined; + +var _asyncify = require('../asyncify.js'); + +var _asyncify2 = _interopRequireDefault(_asyncify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; +} + +function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; +} + +function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; +} + +function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function'); + return isAsync(asyncFn) ? (0, _asyncify2.default)(asyncFn) : asyncFn; +} + +exports.default = wrapAsync; +exports.isAsync = isAsync; +exports.isAsyncGenerator = isAsyncGenerator; +exports.isAsyncIterable = isAsyncIterable; \ No newline at end of file diff --git a/node_modules/async/log.js b/node_modules/async/log.js new file mode 100644 index 00000000..8fc1ed51 --- /dev/null +++ b/node_modules/async/log.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _consoleFunc = require('./internal/consoleFunc.js'); + +var _consoleFunc2 = _interopRequireDefault(_consoleFunc); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ +exports.default = (0, _consoleFunc2.default)('log'); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/map.js b/node_modules/async/map.js new file mode 100644 index 00000000..ec4135d1 --- /dev/null +++ b/node_modules/async/map.js @@ -0,0 +1,142 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function map(coll, iteratee, callback) { + return (0, _map3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(map, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapLimit.js b/node_modules/async/mapLimit.js new file mode 100644 index 00000000..b5e461c3 --- /dev/null +++ b/node_modules/async/mapLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapLimit(coll, limit, iteratee, callback) { + return (0, _map3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(mapLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapSeries.js b/node_modules/async/mapSeries.js new file mode 100644 index 00000000..91f36bf4 --- /dev/null +++ b/node_modules/async/mapSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapSeries(coll, iteratee, callback) { + return (0, _map3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(mapSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValues.js b/node_modules/async/mapValues.js new file mode 100644 index 00000000..00da9262 --- /dev/null +++ b/node_modules/async/mapValues.js @@ -0,0 +1,152 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = mapValues; + +var _mapValuesLimit = require('./mapValuesLimit.js'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function mapValues(obj, iteratee, callback) { + return (0, _mapValuesLimit2.default)(obj, Infinity, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValuesLimit.js b/node_modules/async/mapValuesLimit.js new file mode 100644 index 00000000..93066ee8 --- /dev/null +++ b/node_modules/async/mapValuesLimit.js @@ -0,0 +1,61 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesLimit(obj, limit, iteratee, callback) { + callback = (0, _once2.default)(callback); + var newObj = {}; + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfLimit2.default)(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); +} + +exports.default = (0, _awaitify2.default)(mapValuesLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/mapValuesSeries.js b/node_modules/async/mapValuesSeries.js new file mode 100644 index 00000000..560058aa --- /dev/null +++ b/node_modules/async/mapValuesSeries.js @@ -0,0 +1,37 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = mapValuesSeries; + +var _mapValuesLimit = require('./mapValuesLimit.js'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesSeries(obj, iteratee, callback) { + return (0, _mapValuesLimit2.default)(obj, 1, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/memoize.js b/node_modules/async/memoize.js new file mode 100644 index 00000000..6003e412 --- /dev/null +++ b/node_modules/async/memoize.js @@ -0,0 +1,91 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = memoize; + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ +function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = (0, _wrapAsync2.default)(fn); + var memoized = (0, _initialParams2.default)((args, callback) => { + var key = hasher(...args); + if (key in memo) { + (0, _setImmediate2.default)(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/nextTick.js b/node_modules/async/nextTick.js new file mode 100644 index 00000000..e6d321bc --- /dev/null +++ b/node_modules/async/nextTick.js @@ -0,0 +1,52 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _setImmediate = require('./internal/setImmediate.js'); + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +var _defer; /* istanbul ignore file */ + + +if (_setImmediate.hasNextTick) { + _defer = process.nextTick; +} else if (_setImmediate.hasSetImmediate) { + _defer = setImmediate; +} else { + _defer = _setImmediate.fallback; +} + +exports.default = (0, _setImmediate.wrap)(_defer); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/package.json b/node_modules/async/package.json new file mode 100644 index 00000000..9c464bc7 --- /dev/null +++ b/node_modules/async/package.json @@ -0,0 +1,75 @@ +{ + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "version": "3.2.4", + "main": "dist/async.js", + "author": "Caolan McMahon", + "homepage": "https://caolan.github.io/async/", + "repository": { + "type": "git", + "url": "https://github.com/caolan/async.git" + }, + "bugs": { + "url": "https://github.com/caolan/async/issues" + }, + "keywords": [ + "async", + "callback", + "module", + "utility" + ], + "devDependencies": { + "@babel/eslint-parser": "^7.16.5", + "babel-core": "^6.26.3", + "babel-minify": "^0.5.0", + "babel-plugin-add-module-exports": "^1.0.4", + "babel-plugin-istanbul": "^6.1.1", + "babel-plugin-syntax-async-generators": "^6.13.0", + "babel-plugin-transform-es2015-modules-commonjs": "^6.26.2", + "babel-preset-es2015": "^6.3.13", + "babel-preset-es2017": "^6.22.0", + "babel-register": "^6.26.0", + "babelify": "^10.0.0", + "benchmark": "^2.1.1", + "bluebird": "^3.4.6", + "browserify": "^17.0.0", + "chai": "^4.2.0", + "cheerio": "^0.22.0", + "es6-promise": "^4.2.8", + "eslint": "^8.6.0", + "eslint-plugin-prefer-arrow": "^1.2.3", + "fs-extra": "^10.0.0", + "jsdoc": "^3.6.2", + "karma": "^6.3.12", + "karma-browserify": "^8.1.0", + "karma-firefox-launcher": "^2.1.2", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.0", + "karma-safari-launcher": "^1.0.0", + "mocha": "^6.1.4", + "native-promise-only": "^0.8.0-a", + "nyc": "^15.1.0", + "rollup": "^2.66.1", + "rollup-plugin-node-resolve": "^5.2.0", + "rollup-plugin-npm": "^2.0.0", + "rsvp": "^4.8.5", + "semver": "^7.3.5", + "yargs": "^17.3.1" + }, + "scripts": { + "coverage": "nyc npm run mocha-node-test -- --grep @nycinvalid --invert", + "jsdoc": "jsdoc -c ./support/jsdoc/jsdoc.json && node support/jsdoc/jsdoc-fix-html.js", + "lint": "eslint --fix .", + "mocha-browser-test": "karma start", + "mocha-node-test": "mocha", + "mocha-test": "npm run mocha-node-test && npm run mocha-browser-test", + "test": "npm run lint && npm run mocha-node-test" + }, + "license": "MIT", + "nyc": { + "exclude": [ + "test" + ] + }, + "module": "dist/async.mjs" +} \ No newline at end of file diff --git a/node_modules/async/parallel.js b/node_modules/async/parallel.js new file mode 100644 index 00000000..76bc6243 --- /dev/null +++ b/node_modules/async/parallel.js @@ -0,0 +1,180 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = parallel; + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _parallel2 = require('./internal/parallel.js'); + +var _parallel3 = _interopRequireDefault(_parallel2); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function parallel(tasks, callback) { + return (0, _parallel3.default)(_eachOf2.default, tasks, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/parallelLimit.js b/node_modules/async/parallelLimit.js new file mode 100644 index 00000000..dbe0bb8c --- /dev/null +++ b/node_modules/async/parallelLimit.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = parallelLimit; + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _parallel = require('./internal/parallel.js'); + +var _parallel2 = _interopRequireDefault(_parallel); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ +function parallelLimit(tasks, limit, callback) { + return (0, _parallel2.default)((0, _eachOfLimit2.default)(limit), tasks, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/priorityQueue.js b/node_modules/async/priorityQueue.js new file mode 100644 index 00000000..6006f669 --- /dev/null +++ b/node_modules/async/priorityQueue.js @@ -0,0 +1,86 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (worker, concurrency) { + // Start with a normal queue + var q = (0, _queue2.default)(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new _Heap2.default(); + q._createTaskItem = ({ data, priority }, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return { data: tasks, priority }; + } + return tasks.map(data => { + return { data, priority }; + }); + } + + // Override push to accept second parameter representing priority + q.push = function (data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function (data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; +}; + +var _queue = require('./queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +var _Heap = require('./internal/Heap.js'); + +var _Heap2 = _interopRequireDefault(_Heap); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports['default']; + +/** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. + */ \ No newline at end of file diff --git a/node_modules/async/queue.js b/node_modules/async/queue.js new file mode 100644 index 00000000..c69becb8 --- /dev/null +++ b/node_modules/async/queue.js @@ -0,0 +1,167 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (worker, concurrency) { + var _worker = (0, _wrapAsync2.default)(worker); + return (0, _queue2.default)((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); +}; + +var _queue = require('./internal/queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports['default']; + +/** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ + +/** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ \ No newline at end of file diff --git a/node_modules/async/race.js b/node_modules/async/race.js new file mode 100644 index 00000000..9595d884 --- /dev/null +++ b/node_modules/async/race.js @@ -0,0 +1,67 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ +function race(tasks, callback) { + callback = (0, _once2.default)(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + (0, _wrapAsync2.default)(tasks[i])(callback); + } +} + +exports.default = (0, _awaitify2.default)(race, 2); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reduce.js b/node_modules/async/reduce.js new file mode 100644 index 00000000..56e2db81 --- /dev/null +++ b/node_modules/async/reduce.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reduceRight.js b/node_modules/async/reduceRight.js new file mode 100644 index 00000000..bee5391d --- /dev/null +++ b/node_modules/async/reduceRight.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reduceRight; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight(array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return (0, _reduce2.default)(reversed, memo, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reflect.js b/node_modules/async/reflect.js new file mode 100644 index 00000000..297ed797 --- /dev/null +++ b/node_modules/async/reflect.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reflect; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ +function reflect(fn) { + var _fn = (0, _wrapAsync2.default)(fn); + return (0, _initialParams2.default)(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0) { + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reflectAll.js b/node_modules/async/reflectAll.js new file mode 100644 index 00000000..a862ff05 --- /dev/null +++ b/node_modules/async/reflectAll.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reflectAll; + +var _reflect = require('./reflect.js'); + +var _reflect2 = _interopRequireDefault(_reflect); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ +function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(_reflect2.default); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = _reflect2.default.call(this, tasks[key]); + }); + } + return results; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/reject.js b/node_modules/async/reject.js new file mode 100644 index 00000000..cabd96ea --- /dev/null +++ b/node_modules/async/reject.js @@ -0,0 +1,87 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function reject(coll, iteratee, callback) { + return (0, _reject3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(reject, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/rejectLimit.js b/node_modules/async/rejectLimit.js new file mode 100644 index 00000000..1a899252 --- /dev/null +++ b/node_modules/async/rejectLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectLimit(coll, limit, iteratee, callback) { + return (0, _reject3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(rejectLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/rejectSeries.js b/node_modules/async/rejectSeries.js new file mode 100644 index 00000000..6e1a1c5e --- /dev/null +++ b/node_modules/async/rejectSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectSeries(coll, iteratee, callback) { + return (0, _reject3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(rejectSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/retry.js b/node_modules/async/retry.js new file mode 100644 index 00000000..dba30301 --- /dev/null +++ b/node_modules/async/retry.js @@ -0,0 +1,159 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = retry; + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function constant(value) { + return function () { + return value; + }; +} + +/** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ +const DEFAULT_TIMES = 5; +const DEFAULT_INTERVAL = 0; + +function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || (0, _promiseCallback.promiseCallback)(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || (0, _promiseCallback.promiseCallback)(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = (0, _wrapAsync2.default)(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return; + if (err && attempt++ < options.times && (typeof options.errorFilter != 'function' || options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[_promiseCallback.PROMISE_SYMBOL]; +} + +function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? t.interval : constant(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/retryable.js b/node_modules/async/retryable.js new file mode 100644 index 00000000..1b1147cd --- /dev/null +++ b/node_modules/async/retryable.js @@ -0,0 +1,77 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = retryable; + +var _retry = require('./retry.js'); + +var _retry2 = _interopRequireDefault(_retry); + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ +function retryable(opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = opts && opts.arity || task.length; + if ((0, _wrapAsync.isAsync)(task)) { + arity += 1; + } + var _task = (0, _wrapAsync2.default)(task); + return (0, _initialParams2.default)((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = (0, _promiseCallback.promiseCallback)(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) (0, _retry2.default)(opts, taskFn, callback);else (0, _retry2.default)(taskFn, callback); + + return callback[_promiseCallback.PROMISE_SYMBOL]; + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/select.js b/node_modules/async/select.js new file mode 100644 index 00000000..303dc1fb --- /dev/null +++ b/node_modules/async/select.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filter, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/selectLimit.js b/node_modules/async/selectLimit.js new file mode 100644 index 00000000..89e55f53 --- /dev/null +++ b/node_modules/async/selectLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit(coll, limit, iteratee, callback) { + return (0, _filter3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/selectSeries.js b/node_modules/async/selectSeries.js new file mode 100644 index 00000000..a045e52c --- /dev/null +++ b/node_modules/async/selectSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/seq.js b/node_modules/async/seq.js new file mode 100644 index 00000000..28c825f2 --- /dev/null +++ b/node_modules/async/seq.js @@ -0,0 +1,79 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = seq; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ +function seq(...functions) { + var _functions = functions.map(_wrapAsync2.default); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = (0, _promiseCallback.promiseCallback)(); + } + + (0, _reduce2.default)(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, (err, results) => cb(err, ...results)); + + return cb[_promiseCallback.PROMISE_SYMBOL]; + }; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/series.js b/node_modules/async/series.js new file mode 100644 index 00000000..56e78f9f --- /dev/null +++ b/node_modules/async/series.js @@ -0,0 +1,186 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = series; + +var _parallel2 = require('./internal/parallel.js'); + +var _parallel3 = _interopRequireDefault(_parallel2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function series(tasks, callback) { + return (0, _parallel3.default)(_eachOfSeries2.default, tasks, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/setImmediate.js b/node_modules/async/setImmediate.js new file mode 100644 index 00000000..c712ec3b --- /dev/null +++ b/node_modules/async/setImmediate.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `setImmediate`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name setImmediate + * @static + * @memberOf module:Utils + * @method + * @see [async.nextTick]{@link module:Utils.nextTick} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +exports.default = _setImmediate2.default; +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/some.js b/node_modules/async/some.js new file mode 100644 index 00000000..2046cf64 --- /dev/null +++ b/node_modules/async/some.js @@ -0,0 +1,122 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(some, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/someLimit.js b/node_modules/async/someLimit.js new file mode 100644 index 00000000..c8a295a8 --- /dev/null +++ b/node_modules/async/someLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someLimit, 4); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/someSeries.js b/node_modules/async/someSeries.js new file mode 100644 index 00000000..ee0654ba --- /dev/null +++ b/node_modules/async/someSeries.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someSeries, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/sortBy.js b/node_modules/async/sortBy.js new file mode 100644 index 00000000..d17fb6a2 --- /dev/null +++ b/node_modules/async/sortBy.js @@ -0,0 +1,190 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map = require('./map.js'); + +var _map2 = _interopRequireDefault(_map); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function sortBy(coll, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _map2.default)(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, { value: x, criteria }); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, + b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } +} +exports.default = (0, _awaitify2.default)(sortBy, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timeout.js b/node_modules/async/timeout.js new file mode 100644 index 00000000..dd58eb38 --- /dev/null +++ b/node_modules/async/timeout.js @@ -0,0 +1,89 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timeout; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ +function timeout(asyncFn, milliseconds, info) { + var fn = (0, _wrapAsync2.default)(asyncFn); + + return (0, _initialParams2.default)((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/times.js b/node_modules/async/times.js new file mode 100644 index 00000000..4484c73e --- /dev/null +++ b/node_modules/async/times.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = times; + +var _timesLimit = require('./timesLimit.js'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ +function times(n, iteratee, callback) { + return (0, _timesLimit2.default)(n, Infinity, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timesLimit.js b/node_modules/async/timesLimit.js new file mode 100644 index 00000000..9fb0ba35 --- /dev/null +++ b/node_modules/async/timesLimit.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timesLimit; + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _range = require('./internal/range.js'); + +var _range2 = _interopRequireDefault(_range); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesLimit(count, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)((0, _range2.default)(count), limit, _iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/timesSeries.js b/node_modules/async/timesSeries.js new file mode 100644 index 00000000..a10f0cbe --- /dev/null +++ b/node_modules/async/timesSeries.js @@ -0,0 +1,32 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timesSeries; + +var _timesLimit = require('./timesLimit.js'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesSeries(n, iteratee, callback) { + return (0, _timesLimit2.default)(n, 1, iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/transform.js b/node_modules/async/transform.js new file mode 100644 index 00000000..75b754e9 --- /dev/null +++ b/node_modules/async/transform.js @@ -0,0 +1,173 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = transform; + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function transform(coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = (0, _once2.default)(callback || (0, _promiseCallback.promiseCallback)()); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + + (0, _eachOf2.default)(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[_promiseCallback.PROMISE_SYMBOL]; +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/tryEach.js b/node_modules/async/tryEach.js new file mode 100644 index 00000000..82fe8ec1 --- /dev/null +++ b/node_modules/async/tryEach.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachSeries = require('./eachSeries.js'); + +var _eachSeries2 = _interopRequireDefault(_eachSeries); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ +function tryEach(tasks, callback) { + var error = null; + var result; + return (0, _eachSeries2.default)(tasks, (task, taskCb) => { + (0, _wrapAsync2.default)(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); +} + +exports.default = (0, _awaitify2.default)(tryEach); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/unmemoize.js b/node_modules/async/unmemoize.js new file mode 100644 index 00000000..47a92b42 --- /dev/null +++ b/node_modules/async/unmemoize.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = unmemoize; +/** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ +function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; +} +module.exports = exports["default"]; \ No newline at end of file diff --git a/node_modules/async/until.js b/node_modules/async/until.js new file mode 100644 index 00000000..3c71e514 --- /dev/null +++ b/node_modules/async/until.js @@ -0,0 +1,61 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = until; + +var _whilst = require('./whilst.js'); + +var _whilst2 = _interopRequireDefault(_whilst); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ +function until(test, iteratee, callback) { + const _test = (0, _wrapAsync2.default)(test); + return (0, _whilst2.default)(cb => _test((err, truth) => cb(err, !truth)), iteratee, callback); +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/waterfall.js b/node_modules/async/waterfall.js new file mode 100644 index 00000000..fcd0dc1c --- /dev/null +++ b/node_modules/async/waterfall.js @@ -0,0 +1,105 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ +function waterfall(tasks, callback) { + callback = (0, _once2.default)(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = (0, _wrapAsync2.default)(tasks[taskIndex++]); + task(...args, (0, _onlyOnce2.default)(next)); + } + + function next(err, ...args) { + if (err === false) return; + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); +} + +exports.default = (0, _awaitify2.default)(waterfall); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/whilst.js b/node_modules/async/whilst.js new file mode 100644 index 00000000..32a47762 --- /dev/null +++ b/node_modules/async/whilst.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +exports.default = (0, _awaitify2.default)(whilst, 3); +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/async/wrapSync.js b/node_modules/async/wrapSync.js new file mode 100644 index 00000000..3c3bf886 --- /dev/null +++ b/node_modules/async/wrapSync.js @@ -0,0 +1,118 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncify; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if ((0, _wrapAsync.isAsync)(func)) { + return function (...args /*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback); + }; + } + + return (0, _initialParams2.default)(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback); + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + (0, _setImmediate2.default)(e => { + throw e; + }, err); + } +} +module.exports = exports['default']; \ No newline at end of file diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 00000000..cea8b16e --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 00000000..2cdc8e41 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 00000000..d2a48b6b --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 00000000..c67a6460 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 00000000..ce6073e0 --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/base64-js/LICENSE b/node_modules/base64-js/LICENSE new file mode 100644 index 00000000..6d52b8ac --- /dev/null +++ b/node_modules/base64-js/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Jameson Little + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/base64-js/README.md b/node_modules/base64-js/README.md new file mode 100644 index 00000000..b42a48f4 --- /dev/null +++ b/node_modules/base64-js/README.md @@ -0,0 +1,34 @@ +base64-js +========= + +`base64-js` does basic base64 encoding/decoding in pure JS. + +[![build status](https://secure.travis-ci.org/beatgammit/base64-js.png)](http://travis-ci.org/beatgammit/base64-js) + +Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data. + +Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does. + +## install + +With [npm](https://npmjs.org) do: + +`npm install base64-js` and `var base64js = require('base64-js')` + +For use in web browsers do: + +`` + +[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme) + +## methods + +`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument. + +* `byteLength` - Takes a base64 string and returns length of byte array +* `toByteArray` - Takes a base64 string and returns a byte array +* `fromByteArray` - Takes a byte array and returns a base64 string + +## license + +MIT diff --git a/node_modules/base64-js/base64js.min.js b/node_modules/base64-js/base64js.min.js new file mode 100644 index 00000000..908ac83f --- /dev/null +++ b/node_modules/base64-js/base64js.min.js @@ -0,0 +1 @@ +(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;fj?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} diff --git a/node_modules/base64-js/package.json b/node_modules/base64-js/package.json new file mode 100644 index 00000000..c3972e39 --- /dev/null +++ b/node_modules/base64-js/package.json @@ -0,0 +1,47 @@ +{ + "name": "base64-js", + "description": "Base64 encoding/decoding in pure JS", + "version": "1.5.1", + "author": "T. Jameson Little ", + "typings": "index.d.ts", + "bugs": { + "url": "https://github.com/beatgammit/base64-js/issues" + }, + "devDependencies": { + "babel-minify": "^0.5.1", + "benchmark": "^2.1.4", + "browserify": "^16.3.0", + "standard": "*", + "tape": "4.x" + }, + "homepage": "https://github.com/beatgammit/base64-js", + "keywords": [ + "base64" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/beatgammit/base64-js.git" + }, + "scripts": { + "build": "browserify -s base64js -r ./ | minify > base64js.min.js", + "lint": "standard", + "test": "npm run lint && npm run unit", + "unit": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/bin-build/index.js b/node_modules/bin-build/index.js new file mode 100644 index 00000000..525968ac --- /dev/null +++ b/node_modules/bin-build/index.js @@ -0,0 +1,43 @@ +'use strict'; +const decompress = require('decompress'); +const download = require('download'); +const execa = require('execa'); +const pMapSeries = require('p-map-series'); +const tempfile = require('tempfile'); + +const exec = (cmd, cwd) => pMapSeries(cmd, x => execa.shell(x, {cwd})); + +exports.directory = (dir, cmd) => { + if (typeof dir !== 'string') { + return Promise.reject(new TypeError(`Expected a \`string\`, got \`${typeof dir}\``)); + } + + return exec(cmd, dir); +}; + +exports.file = (file, cmd, opts) => { + opts = Object.assign({strip: 1}, opts); + + if (typeof file !== 'string') { + return Promise.reject(new TypeError(`Expected a \`string\`, got \`${typeof file}\``)); + } + + const tmp = tempfile(); + + return decompress(file, tmp, opts).then(() => exec(cmd, tmp)); +}; + +exports.url = (url, cmd, opts) => { + opts = Object.assign({ + extract: true, + strip: 1 + }, opts); + + if (typeof url !== 'string') { + return Promise.reject(new TypeError(`Expected a \`string\`, got \`${typeof url}\``)); + } + + const tmp = tempfile(); + + return download(url, tmp, opts).then(() => exec(cmd, tmp)); +}; diff --git a/node_modules/bin-build/license b/node_modules/bin-build/license new file mode 100644 index 00000000..0f8cf79c --- /dev/null +++ b/node_modules/bin-build/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Kevin Martensson (github.com/kevva) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/bin-build/package.json b/node_modules/bin-build/package.json new file mode 100644 index 00000000..f685c169 --- /dev/null +++ b/node_modules/bin-build/package.json @@ -0,0 +1,43 @@ +{ + "name": "bin-build", + "version": "3.0.0", + "description": "Easily build binaries", + "license": "MIT", + "repository": "kevva/bin-build", + "author": { + "name": "Kevin Mårtensson", + "email": "kevinmartensson@gmail.com", + "url": "https://github.com/kevva" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "binary", + "build", + "make" + ], + "dependencies": { + "decompress": "^4.0.0", + "download": "^6.2.2", + "execa": "^0.7.0", + "p-map-series": "^1.0.0", + "tempfile": "^2.0.0" + }, + "devDependencies": { + "ava": "*", + "del": "^3.0.0", + "nock": "^9.0.0", + "path-exists": "^3.0.0", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/bin-build/readme.md b/node_modules/bin-build/readme.md new file mode 100644 index 00000000..31b559b3 --- /dev/null +++ b/node_modules/bin-build/readme.md @@ -0,0 +1,103 @@ +# bin-build [![Build Status](https://travis-ci.org/kevva/bin-build.svg?branch=master)](https://travis-ci.org/kevva/bin-build) + +> Easily build binaries + + +## Install + +``` +$ npm install --save bin-build +``` + + +## Usage + +```js +const binBuild = require('bin-build'); + +binBuild.url('http://www.lcdf.org/gifsicle/gifsicle-1.80.tar.gz', [ + './configure --disable-gifview --disable-gifdiff', + 'make install' +]).then(() => { + console.log('gifsicle built successfully'); +}); + +binBuild.file('gifsicle-1.80.tar.gz', [ + './configure --disable-gifview --disable-gifdiff', + 'make install' +]).then(() => { + console.log('gifsicle built successfully'); +}); +``` + + +## API + +### binBuild.directory(directory, commands) + +#### directory + +Type: `string` + +Path to a directory containing the source code. + +#### commands + +Type: `Array` + +Commands to run when building. + +### binBuild.file(file, commands, [options]) + +#### file + +Type: `string` + +Path to a archive file containing the source code. + +#### commands + +Type: `Array` + +Commands to run when building. + +#### options + +Type: `Object` + +##### strip + +Type: `number`
+Default: `1` + +Strip a number of leading paths from file names on extraction. + +### binBuild.url(url, commands, [options]) + +#### url + +Type: `string` + +URL to a archive file containing the source code. + +#### commands + +Type: `Array` + +Commands to run when building. + +#### options + +Type: `Object` + +##### strip + +Type: `number`
+Default: `1` + +Strip a number of leading paths from file names on extraction. + + +## License + +MIT © [Kevin Mårtensson](https://github.com/kevva) diff --git a/node_modules/bin-check/index.js b/node_modules/bin-check/index.js new file mode 100644 index 00000000..49100bd0 --- /dev/null +++ b/node_modules/bin-check/index.js @@ -0,0 +1,31 @@ +'use strict'; +const execa = require('execa'); +const executable = require('executable'); + +module.exports = (bin, args) => { + if (!Array.isArray(args)) { + args = ['--help']; + } + + return executable(bin) + .then(works => { + if (!works) { + throw new Error(`Couldn't execute the \`${bin}\` binary. Make sure it has the right permissions.`); + } + + return execa(bin, args); + }) + .then(res => res.code === 0); +}; + +module.exports.sync = (bin, args) => { + if (!Array.isArray(args)) { + args = ['--help']; + } + + if (!executable.sync(bin)) { + throw new Error(`Couldn't execute the \`${bin}\` binary. Make sure it has the right permissions.`); + } + + return execa.sync(bin, args).status === 0; +}; diff --git a/node_modules/bin-check/license b/node_modules/bin-check/license new file mode 100644 index 00000000..e0e91582 --- /dev/null +++ b/node_modules/bin-check/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Kevin Mårtensson (github.com/kevva) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/bin-check/package.json b/node_modules/bin-check/package.json new file mode 100644 index 00000000..279b342b --- /dev/null +++ b/node_modules/bin-check/package.json @@ -0,0 +1,35 @@ +{ + "name": "bin-check", + "version": "4.1.0", + "description": "Check if a binary is working", + "license": "MIT", + "repository": "kevva/bin-check", + "author": { + "name": "Kevin Mårtensson", + "email": "kevinmartensson@gmail.com", + "url": "https://github.com/kevva" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "binary", + "check", + "executable", + "test" + ], + "dependencies": { + "execa": "^0.7.0", + "executable": "^4.1.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/bin-check/readme.md b/node_modules/bin-check/readme.md new file mode 100644 index 00000000..111f1f2e --- /dev/null +++ b/node_modules/bin-check/readme.md @@ -0,0 +1,51 @@ +# bin-check [![Build Status](https://travis-ci.org/kevva/bin-check.svg?branch=master)](https://travis-ci.org/kevva/bin-check) + +> Check if a binary is working by checking its exit code + + +## Install + +``` +$ npm install bin-check +``` + + +## Usage + +```js +const binCheck = require('bin-check'); + +binCheck('/bin/sh', ['--version']).then(works => { + console.log(works); + //=> true +}); +``` + + +## API + +### binCheck(binary, [arguments]) + +Returns a `Promise` for a `boolean`. + +### binCheck.sync(binary, [arguments]) + +Returns a `boolean`. + +#### binary + +Type: `string` + +Path to the binary. + +#### arguments + +Type: `Array`
+Default: `['--help']` + +Arguments to run the binary with. + + +## License + +MIT © [Kevin Mårtensson](https://github.com/kevva) diff --git a/node_modules/bin-version-check/index.js b/node_modules/bin-version-check/index.js new file mode 100644 index 00000000..5d3ae825 --- /dev/null +++ b/node_modules/bin-version-check/index.js @@ -0,0 +1,22 @@ +'use strict'; +const semver = require('semver'); +const binVersion = require('bin-version'); +const semverTruncate = require('semver-truncate'); + +module.exports = (binary, semverRange, options) => { + if (typeof binary !== 'string' || typeof semverRange !== 'string') { + return Promise.reject(new Error('`binary` and `semverRange` arguments required')); + } + + if (!semver.validRange(semverRange)) { + return Promise.reject(new Error('Invalid version range')); + } + + return binVersion(binary, options).then(binaryVersion => { + if (!semver.satisfies(semverTruncate(binaryVersion, 'patch'), semverRange)) { + const error = new Error(`${binary} ${binaryVersion} doesn't satisfy the version requirement of ${semverRange}`); + error.name = 'InvalidBinaryVersion'; + throw error; + } + }); +}; diff --git a/node_modules/bin-version-check/license b/node_modules/bin-version-check/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-version-check/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-version-check/package.json b/node_modules/bin-version-check/package.json new file mode 100644 index 00000000..18367986 --- /dev/null +++ b/node_modules/bin-version-check/package.json @@ -0,0 +1,43 @@ +{ + "name": "bin-version-check", + "version": "4.0.0", + "description": "Check whether a binary version satisfies a semver range", + "license": "MIT", + "repository": "sindresorhus/bin-version-check", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "cli", + "bin", + "binary", + "executable", + "version", + "semver", + "semantic", + "range", + "satisfy", + "check", + "validate" + ], + "dependencies": { + "bin-version": "^3.0.0", + "semver": "^5.6.0", + "semver-truncate": "^1.1.2" + }, + "devDependencies": { + "ava": "^1.0.0-rc.1", + "xo": "^0.23.0" + } +} diff --git a/node_modules/bin-version-check/readme.md b/node_modules/bin-version-check/readme.md new file mode 100644 index 00000000..6c48c928 --- /dev/null +++ b/node_modules/bin-version-check/readme.md @@ -0,0 +1,71 @@ +# bin-version-check [![Build Status](https://travis-ci.org/sindresorhus/bin-version-check.svg?branch=master)](https://travis-ci.org/sindresorhus/bin-version-check) + +> Check whether a binary version satisfies a [semver range](https://github.com/npm/node-semver#ranges) + +Useful when you have a thing that only works with specific versions of a binary. + + +## Install + +``` +$ npm install bin-version-check +``` + + +## Usage + +``` +$ curl --version +curl 7.30.0 (x86_64-apple-darwin13.0) +``` + +```js +const binVersionCheck = require('bin-version-check'); + +(async () => { + try { + await binVersionCheck('curl', '>=8'); + } catch (error) { + console.log(error); + //=> 'InvalidBinVersion: curl 7.30.0 doesn't satisfy the version requirement of >=8' + } +})(); +``` + + +## API + +### binVersionCheck(binary, semverRange, [options]) + +#### binary + +Type: `string` + +Name or path of the binary to check. + +#### semverRange + +Type: `string` + +[Semver range](https://github.com/npm/node-semver#ranges) to check against. + +#### options + +Type: `Object` + +##### args + +Type: `string[]` +Default: `['--version']` + +CLI arguments used to get the binary version. + + +## Related + +- [bin-version-check-cli](https://github.com/sindresorhus/bin-version-check-cli) - CLI for this module + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-version/index.d.ts b/node_modules/bin-version/index.d.ts new file mode 100644 index 00000000..00287aec --- /dev/null +++ b/node_modules/bin-version/index.d.ts @@ -0,0 +1,43 @@ +declare namespace binVersion { + interface Options { + /** + The arguments to pass to `binary` so that it will print its version. + + @default ['--version'] + */ + args?: string[]; + } +} + +/** +Get the version of a binary in [semver](https://github.com/npm/node-semver) format. + +@param binary - The name of or path to the binary to get the version from. +@returns The version of the `binary`. + +@example +``` +import binVersion = require('bin-version'); + +(async () => { + // $ curl --version + // curl 7.30.0 (x86_64-apple-darwin13.0) + + console.log(await binVersion('curl')); + //=> '7.30.0' + + + // $ openssl version + // OpenSSL 1.0.2d 9 Jul 2015 + + console.log(await binVersion('openssl', {args: ['version']})); + //=> '1.0.2' +})(); +``` +*/ +declare function binVersion( + binary: string, + options?: binVersion.Options +): Promise; + +export = binVersion; diff --git a/node_modules/bin-version/index.js b/node_modules/bin-version/index.js new file mode 100644 index 00000000..303e996f --- /dev/null +++ b/node_modules/bin-version/index.js @@ -0,0 +1,15 @@ +'use strict'; +const execa = require('execa'); +const findVersions = require('find-versions'); + +module.exports = (binary, options = {}) => { + return execa(binary, options.args || ['--version']) + .then(result => findVersions(result.stdout || result.stderr, {loose: true})[0]) + .catch(error => { + if (error.code === 'ENOENT') { + error.message = `Couldn't find the \`${binary}\` binary. Make sure it's installed and in your $PATH.`; + } + + throw error; + }); +}; diff --git a/node_modules/bin-version/license b/node_modules/bin-version/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-version/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-version/node_modules/cross-spawn/CHANGELOG.md b/node_modules/bin-version/node_modules/cross-spawn/CHANGELOG.md new file mode 100644 index 00000000..ded9620b --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/CHANGELOG.md @@ -0,0 +1,100 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +
+## [6.0.5](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.4...v6.0.5) (2018-03-02) + + +### Bug Fixes + +* avoid using deprecated Buffer constructor ([#94](https://github.com/moxystudio/node-cross-spawn/issues/94)) ([d5770df](https://github.com/moxystudio/node-cross-spawn/commit/d5770df)), closes [/nodejs.org/api/deprecations.html#deprecations_dep0005](https://github.com//nodejs.org/api/deprecations.html/issues/deprecations_dep0005) + + + + +## [6.0.4](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.3...v6.0.4) (2018-01-31) + + +### Bug Fixes + +* fix paths being incorrectly normalized on unix ([06ee3c6](https://github.com/moxystudio/node-cross-spawn/commit/06ee3c6)), closes [#90](https://github.com/moxystudio/node-cross-spawn/issues/90) + + + + +## [6.0.3](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.2...v6.0.3) (2018-01-23) + + + + +## [6.0.2](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.1...v6.0.2) (2018-01-23) + + + + +## [6.0.1](https://github.com/moxystudio/node-cross-spawn/compare/v6.0.0...v6.0.1) (2018-01-23) + + + + +# [6.0.0](https://github.com/moxystudio/node-cross-spawn/compare/5.1.0...6.0.0) (2018-01-23) + + +### Bug Fixes + +* fix certain arguments not being correctly escaped or causing batch syntax error ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10)), closes [#82](https://github.com/moxystudio/node-cross-spawn/issues/82) [#51](https://github.com/moxystudio/node-cross-spawn/issues/51) +* fix commands as posix relatixe paths not working correctly, e.g.: `./my-command` ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10)) +* fix `options` argument being mutated ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10)) +* fix commands resolution when PATH was actually Path ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10)) + + +### Features + +* improve compliance with node's ENOENT errors ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10)) +* improve detection of node's shell option support ([900cf10](https://github.com/moxystudio/node-cross-spawn/commit/900cf10)) + + +### Chores + +* upgrade tooling +* upgrate project to es6 (node v4) + + +### BREAKING CHANGES + +* remove support for older nodejs versions, only `node >= 4` is supported + + + +## [5.1.0](https://github.com/moxystudio/node-cross-spawn/compare/5.0.1...5.1.0) (2017-02-26) + + +### Bug Fixes + +* fix `options.shell` support for NodeJS [v4.8](https://github.com/nodejs/node/blob/master/doc/changelogs/CHANGELOG_V4.md#4.8.0) + + + +## [5.0.1](https://github.com/moxystudio/node-cross-spawn/compare/5.0.0...5.0.1) (2016-11-04) + + +### Bug Fixes + +* fix `options.shell` support for NodeJS v7 + + + +# [5.0.0](https://github.com/moxystudio/node-cross-spawn/compare/4.0.2...5.0.0) (2016-10-30) + + +## Features + +* add support for `options.shell` +* improve parsing of shebangs by using [`shebang-command`](https://github.com/kevva/shebang-command) module + + +## Chores + +* refactor some code to make it more clear +* update README caveats diff --git a/node_modules/bin-version/node_modules/cross-spawn/LICENSE b/node_modules/bin-version/node_modules/cross-spawn/LICENSE new file mode 100644 index 00000000..8407b9a3 --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Made With MOXY Lda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/bin-version/node_modules/cross-spawn/README.md b/node_modules/bin-version/node_modules/cross-spawn/README.md new file mode 100644 index 00000000..e895cd7a --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/README.md @@ -0,0 +1,94 @@ +# cross-spawn + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Build status][appveyor-image]][appveyor-url] [![Coverage Status][codecov-image]][codecov-url] [![Dependency status][david-dm-image]][david-dm-url] [![Dev Dependency status][david-dm-dev-image]][david-dm-dev-url] [![Greenkeeper badge][greenkeeper-image]][greenkeeper-url] + +[npm-url]:https://npmjs.org/package/cross-spawn +[downloads-image]:http://img.shields.io/npm/dm/cross-spawn.svg +[npm-image]:http://img.shields.io/npm/v/cross-spawn.svg +[travis-url]:https://travis-ci.org/moxystudio/node-cross-spawn +[travis-image]:http://img.shields.io/travis/moxystudio/node-cross-spawn/master.svg +[appveyor-url]:https://ci.appveyor.com/project/satazor/node-cross-spawn +[appveyor-image]:https://img.shields.io/appveyor/ci/satazor/node-cross-spawn/master.svg +[codecov-url]:https://codecov.io/gh/moxystudio/node-cross-spawn +[codecov-image]:https://img.shields.io/codecov/c/github/moxystudio/node-cross-spawn/master.svg +[david-dm-url]:https://david-dm.org/moxystudio/node-cross-spawn +[david-dm-image]:https://img.shields.io/david/moxystudio/node-cross-spawn.svg +[david-dm-dev-url]:https://david-dm.org/moxystudio/node-cross-spawn?type=dev +[david-dm-dev-image]:https://img.shields.io/david/dev/moxystudio/node-cross-spawn.svg +[greenkeeper-image]:https://badges.greenkeeper.io/moxystudio/node-cross-spawn.svg +[greenkeeper-url]:https://greenkeeper.io/ + +A cross platform solution to node's spawn and spawnSync. + + +## Installation + +`$ npm install cross-spawn` + + +## Why + +Node has issues when using spawn on Windows: + +- It ignores [PATHEXT](https://github.com/joyent/node/issues/2318) +- It does not support [shebangs](https://en.wikipedia.org/wiki/Shebang_(Unix)) +- Has problems running commands with [spaces](https://github.com/nodejs/node/issues/7367) +- Has problems running commands with posix relative paths (e.g.: `./my-folder/my-executable`) +- Has an [issue](https://github.com/moxystudio/node-cross-spawn/issues/82) with command shims (files in `node_modules/.bin/`), where arguments with quotes and parenthesis would result in [invalid syntax error](https://github.com/moxystudio/node-cross-spawn/blob/e77b8f22a416db46b6196767bcd35601d7e11d54/test/index.test.js#L149) +- No `options.shell` support on node `` where `` must not contain any arguments. +If you would like to have the shebang support improved, feel free to contribute via a pull-request. + +Remember to always test your code on Windows! + + +## Tests + +`$ npm test` +`$ npm test -- --watch` during development + +## License + +Released under the [MIT License](http://www.opensource.org/licenses/mit-license.php). diff --git a/node_modules/bin-version/node_modules/cross-spawn/index.js b/node_modules/bin-version/node_modules/cross-spawn/index.js new file mode 100644 index 00000000..5509742c --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/index.js @@ -0,0 +1,39 @@ +'use strict'; + +const cp = require('child_process'); +const parse = require('./lib/parse'); +const enoent = require('./lib/enoent'); + +function spawn(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + + // Hook into child process "exit" event to emit an error if the command + // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + enoent.hookChildProcess(spawned, parsed); + + return spawned; +} + +function spawnSync(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + + // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); + + return result; +} + +module.exports = spawn; +module.exports.spawn = spawn; +module.exports.sync = spawnSync; + +module.exports._parse = parse; +module.exports._enoent = enoent; diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/enoent.js b/node_modules/bin-version/node_modules/cross-spawn/lib/enoent.js new file mode 100644 index 00000000..14df9b62 --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/lib/enoent.js @@ -0,0 +1,59 @@ +'use strict'; + +const isWin = process.platform === 'win32'; + +function notFoundError(original, syscall) { + return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { + code: 'ENOENT', + errno: 'ENOENT', + syscall: `${syscall} ${original.command}`, + path: original.command, + spawnargs: original.args, + }); +} + +function hookChildProcess(cp, parsed) { + if (!isWin) { + return; + } + + const originalEmit = cp.emit; + + cp.emit = function (name, arg1) { + // If emitting "exit" event and exit code is 1, we need to check if + // the command exists and emit an "error" instead + // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 + if (name === 'exit') { + const err = verifyENOENT(arg1, parsed, 'spawn'); + + if (err) { + return originalEmit.call(cp, 'error', err); + } + } + + return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params + }; +} + +function verifyENOENT(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawn'); + } + + return null; +} + +function verifyENOENTSync(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawnSync'); + } + + return null; +} + +module.exports = { + hookChildProcess, + verifyENOENT, + verifyENOENTSync, + notFoundError, +}; diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/parse.js b/node_modules/bin-version/node_modules/cross-spawn/lib/parse.js new file mode 100644 index 00000000..962827a9 --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/lib/parse.js @@ -0,0 +1,125 @@ +'use strict'; + +const path = require('path'); +const niceTry = require('nice-try'); +const resolveCommand = require('./util/resolveCommand'); +const escape = require('./util/escape'); +const readShebang = require('./util/readShebang'); +const semver = require('semver'); + +const isWin = process.platform === 'win32'; +const isExecutableRegExp = /\.(?:com|exe)$/i; +const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; + +// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 +const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; + +function detectShebang(parsed) { + parsed.file = resolveCommand(parsed); + + const shebang = parsed.file && readShebang(parsed.file); + + if (shebang) { + parsed.args.unshift(parsed.file); + parsed.command = shebang; + + return resolveCommand(parsed); + } + + return parsed.file; +} + +function parseNonShell(parsed) { + if (!isWin) { + return parsed; + } + + // Detect & add support for shebangs + const commandFile = detectShebang(parsed); + + // We don't need a shell if the command filename is an executable + const needsShell = !isExecutableRegExp.test(commandFile); + + // If a shell is required, use cmd.exe and take care of escaping everything correctly + // Note that `forceShell` is an hidden option used only in tests + if (parsed.options.forceShell || needsShell) { + // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` + // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument + // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, + // we need to double escape them + const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); + + // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) + // This is necessary otherwise it will always fail with ENOENT in those cases + parsed.command = path.normalize(parsed.command); + + // Escape command & arguments + parsed.command = escape.command(parsed.command); + parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); + + const shellCommand = [parsed.command].concat(parsed.args).join(' '); + + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.command = process.env.comspec || 'cmd.exe'; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } + + return parsed; +} + +function parseShell(parsed) { + // If node supports the shell option, there's no need to mimic its behavior + if (supportsShellOption) { + return parsed; + } + + // Mimic node shell option + // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 + const shellCommand = [parsed.command].concat(parsed.args).join(' '); + + if (isWin) { + parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } else { + if (typeof parsed.options.shell === 'string') { + parsed.command = parsed.options.shell; + } else if (process.platform === 'android') { + parsed.command = '/system/bin/sh'; + } else { + parsed.command = '/bin/sh'; + } + + parsed.args = ['-c', shellCommand]; + } + + return parsed; +} + +function parse(command, args, options) { + // Normalize arguments, similar to nodejs + if (args && !Array.isArray(args)) { + options = args; + args = null; + } + + args = args ? args.slice(0) : []; // Clone array to avoid changing the original + options = Object.assign({}, options); // Clone object to avoid changing the original + + // Build our parsed object + const parsed = { + command, + args, + options, + file: undefined, + original: { + command, + args, + }, + }; + + // Delegate further parsing to shell or non-shell + return options.shell ? parseShell(parsed) : parseNonShell(parsed); +} + +module.exports = parse; diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/util/escape.js b/node_modules/bin-version/node_modules/cross-spawn/lib/util/escape.js new file mode 100644 index 00000000..b0bb84c3 --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/lib/util/escape.js @@ -0,0 +1,45 @@ +'use strict'; + +// See http://www.robvanderwoude.com/escapechars.php +const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; + +function escapeCommand(arg) { + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + return arg; +} + +function escapeArgument(arg, doubleEscapeMetaChars) { + // Convert to string + arg = `${arg}`; + + // Algorithm below is based on https://qntm.org/cmd + + // Sequence of backslashes followed by a double quote: + // double up all the backslashes and escape the double quote + arg = arg.replace(/(\\*)"/g, '$1$1\\"'); + + // Sequence of backslashes followed by the end of the string + // (which will become a double quote later): + // double up all the backslashes + arg = arg.replace(/(\\*)$/, '$1$1'); + + // All other backslashes occur literally + + // Quote the whole thing: + arg = `"${arg}"`; + + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + // Double escape meta chars if necessary + if (doubleEscapeMetaChars) { + arg = arg.replace(metaCharsRegExp, '^$1'); + } + + return arg; +} + +module.exports.command = escapeCommand; +module.exports.argument = escapeArgument; diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/util/readShebang.js b/node_modules/bin-version/node_modules/cross-spawn/lib/util/readShebang.js new file mode 100644 index 00000000..bd4f1280 --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/lib/util/readShebang.js @@ -0,0 +1,32 @@ +'use strict'; + +const fs = require('fs'); +const shebangCommand = require('shebang-command'); + +function readShebang(command) { + // Read the first 150 bytes from the file + const size = 150; + let buffer; + + if (Buffer.alloc) { + // Node.js v4.5+ / v5.10+ + buffer = Buffer.alloc(size); + } else { + // Old Node.js API + buffer = new Buffer(size); + buffer.fill(0); // zero-fill + } + + let fd; + + try { + fd = fs.openSync(command, 'r'); + fs.readSync(fd, buffer, 0, size, 0); + fs.closeSync(fd); + } catch (e) { /* Empty */ } + + // Attempt to extract shebang (null is returned if not a shebang) + return shebangCommand(buffer.toString()); +} + +module.exports = readShebang; diff --git a/node_modules/bin-version/node_modules/cross-spawn/lib/util/resolveCommand.js b/node_modules/bin-version/node_modules/cross-spawn/lib/util/resolveCommand.js new file mode 100644 index 00000000..2fd5ad27 --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/lib/util/resolveCommand.js @@ -0,0 +1,47 @@ +'use strict'; + +const path = require('path'); +const which = require('which'); +const pathKey = require('path-key')(); + +function resolveCommandAttempt(parsed, withoutPathExt) { + const cwd = process.cwd(); + const hasCustomCwd = parsed.options.cwd != null; + + // If a custom `cwd` was specified, we need to change the process cwd + // because `which` will do stat calls but does not support a custom cwd + if (hasCustomCwd) { + try { + process.chdir(parsed.options.cwd); + } catch (err) { + /* Empty */ + } + } + + let resolved; + + try { + resolved = which.sync(parsed.command, { + path: (parsed.options.env || process.env)[pathKey], + pathExt: withoutPathExt ? path.delimiter : undefined, + }); + } catch (e) { + /* Empty */ + } finally { + process.chdir(cwd); + } + + // If we successfully resolved, ensure that an absolute path is returned + // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it + if (resolved) { + resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); + } + + return resolved; +} + +function resolveCommand(parsed) { + return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +} + +module.exports = resolveCommand; diff --git a/node_modules/bin-version/node_modules/cross-spawn/package.json b/node_modules/bin-version/node_modules/cross-spawn/package.json new file mode 100644 index 00000000..1a69be8c --- /dev/null +++ b/node_modules/bin-version/node_modules/cross-spawn/package.json @@ -0,0 +1,76 @@ +{ + "name": "cross-spawn", + "version": "6.0.5", + "description": "Cross platform child_process#spawn and child_process#spawnSync", + "keywords": [ + "spawn", + "spawnSync", + "windows", + "cross-platform", + "path-ext", + "shebang", + "cmd", + "execute" + ], + "author": "André Cruz ", + "homepage": "https://github.com/moxystudio/node-cross-spawn", + "repository": { + "type": "git", + "url": "git@github.com:moxystudio/node-cross-spawn.git" + }, + "license": "MIT", + "main": "index.js", + "files": [ + "lib" + ], + "scripts": { + "lint": "eslint .", + "test": "jest --env node --coverage", + "prerelease": "npm t && npm run lint", + "release": "standard-version", + "precommit": "lint-staged", + "commitmsg": "commitlint -e $GIT_PARAMS" + }, + "standard-version": { + "scripts": { + "posttag": "git push --follow-tags origin master && npm publish" + } + }, + "lint-staged": { + "*.js": [ + "eslint --fix", + "git add" + ] + }, + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "devDependencies": { + "@commitlint/cli": "^6.0.0", + "@commitlint/config-conventional": "^6.0.2", + "babel-core": "^6.26.0", + "babel-jest": "^22.1.0", + "babel-preset-moxy": "^2.2.1", + "eslint": "^4.3.0", + "eslint-config-moxy": "^5.0.0", + "husky": "^0.14.3", + "jest": "^22.0.0", + "lint-staged": "^7.0.0", + "mkdirp": "^0.5.1", + "regenerator-runtime": "^0.11.1", + "rimraf": "^2.6.2", + "standard-version": "^4.2.0" + }, + "engines": { + "node": ">=4.8" + } +} diff --git a/node_modules/bin-version/node_modules/execa/index.js b/node_modules/bin-version/node_modules/execa/index.js new file mode 100644 index 00000000..aad9ac88 --- /dev/null +++ b/node_modules/bin-version/node_modules/execa/index.js @@ -0,0 +1,361 @@ +'use strict'; +const path = require('path'); +const childProcess = require('child_process'); +const crossSpawn = require('cross-spawn'); +const stripEof = require('strip-eof'); +const npmRunPath = require('npm-run-path'); +const isStream = require('is-stream'); +const _getStream = require('get-stream'); +const pFinally = require('p-finally'); +const onExit = require('signal-exit'); +const errname = require('./lib/errname'); +const stdio = require('./lib/stdio'); + +const TEN_MEGABYTES = 1000 * 1000 * 10; + +function handleArgs(cmd, args, opts) { + let parsed; + + opts = Object.assign({ + extendEnv: true, + env: {} + }, opts); + + if (opts.extendEnv) { + opts.env = Object.assign({}, process.env, opts.env); + } + + if (opts.__winShell === true) { + delete opts.__winShell; + parsed = { + command: cmd, + args, + options: opts, + file: cmd, + original: { + cmd, + args + } + }; + } else { + parsed = crossSpawn._parse(cmd, args, opts); + } + + opts = Object.assign({ + maxBuffer: TEN_MEGABYTES, + buffer: true, + stripEof: true, + preferLocal: true, + localDir: parsed.options.cwd || process.cwd(), + encoding: 'utf8', + reject: true, + cleanup: true + }, parsed.options); + + opts.stdio = stdio(opts); + + if (opts.preferLocal) { + opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); + } + + if (opts.detached) { + // #115 + opts.cleanup = false; + } + + if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { + // #116 + parsed.args.unshift('/q'); + } + + return { + cmd: parsed.command, + args: parsed.args, + opts, + parsed + }; +} + +function handleInput(spawned, input) { + if (input === null || input === undefined) { + return; + } + + if (isStream(input)) { + input.pipe(spawned.stdin); + } else { + spawned.stdin.end(input); + } +} + +function handleOutput(opts, val) { + if (val && opts.stripEof) { + val = stripEof(val); + } + + return val; +} + +function handleShell(fn, cmd, opts) { + let file = '/bin/sh'; + let args = ['-c', cmd]; + + opts = Object.assign({}, opts); + + if (process.platform === 'win32') { + opts.__winShell = true; + file = process.env.comspec || 'cmd.exe'; + args = ['/s', '/c', `"${cmd}"`]; + opts.windowsVerbatimArguments = true; + } + + if (opts.shell) { + file = opts.shell; + delete opts.shell; + } + + return fn(file, args, opts); +} + +function getStream(process, stream, {encoding, buffer, maxBuffer}) { + if (!process[stream]) { + return null; + } + + let ret; + + if (!buffer) { + // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 + ret = new Promise((resolve, reject) => { + process[stream] + .once('end', resolve) + .once('error', reject); + }); + } else if (encoding) { + ret = _getStream(process[stream], { + encoding, + maxBuffer + }); + } else { + ret = _getStream.buffer(process[stream], {maxBuffer}); + } + + return ret.catch(err => { + err.stream = stream; + err.message = `${stream} ${err.message}`; + throw err; + }); +} + +function makeError(result, options) { + const {stdout, stderr} = result; + + let err = result.error; + const {code, signal} = result; + + const {parsed, joinedCmd} = options; + const timedOut = options.timedOut || false; + + if (!err) { + let output = ''; + + if (Array.isArray(parsed.opts.stdio)) { + if (parsed.opts.stdio[2] !== 'inherit') { + output += output.length > 0 ? stderr : `\n${stderr}`; + } + + if (parsed.opts.stdio[1] !== 'inherit') { + output += `\n${stdout}`; + } + } else if (parsed.opts.stdio !== 'inherit') { + output = `\n${stderr}${stdout}`; + } + + err = new Error(`Command failed: ${joinedCmd}${output}`); + err.code = code < 0 ? errname(code) : code; + } + + err.stdout = stdout; + err.stderr = stderr; + err.failed = true; + err.signal = signal || null; + err.cmd = joinedCmd; + err.timedOut = timedOut; + + return err; +} + +function joinCmd(cmd, args) { + let joinedCmd = cmd; + + if (Array.isArray(args) && args.length > 0) { + joinedCmd += ' ' + args.join(' '); + } + + return joinedCmd; +} + +module.exports = (cmd, args, opts) => { + const parsed = handleArgs(cmd, args, opts); + const {encoding, buffer, maxBuffer} = parsed.opts; + const joinedCmd = joinCmd(cmd, args); + + let spawned; + try { + spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); + } catch (err) { + return Promise.reject(err); + } + + let removeExitHandler; + if (parsed.opts.cleanup) { + removeExitHandler = onExit(() => { + spawned.kill(); + }); + } + + let timeoutId = null; + let timedOut = false; + + const cleanup = () => { + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (removeExitHandler) { + removeExitHandler(); + } + }; + + if (parsed.opts.timeout > 0) { + timeoutId = setTimeout(() => { + timeoutId = null; + timedOut = true; + spawned.kill(parsed.opts.killSignal); + }, parsed.opts.timeout); + } + + const processDone = new Promise(resolve => { + spawned.on('exit', (code, signal) => { + cleanup(); + resolve({code, signal}); + }); + + spawned.on('error', err => { + cleanup(); + resolve({error: err}); + }); + + if (spawned.stdin) { + spawned.stdin.on('error', err => { + cleanup(); + resolve({error: err}); + }); + } + }); + + function destroy() { + if (spawned.stdout) { + spawned.stdout.destroy(); + } + + if (spawned.stderr) { + spawned.stderr.destroy(); + } + } + + const handlePromise = () => pFinally(Promise.all([ + processDone, + getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), + getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) + ]).then(arr => { + const result = arr[0]; + result.stdout = arr[1]; + result.stderr = arr[2]; + + if (result.error || result.code !== 0 || result.signal !== null) { + const err = makeError(result, { + joinedCmd, + parsed, + timedOut + }); + + // TODO: missing some timeout logic for killed + // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 + // err.killed = spawned.killed || killed; + err.killed = err.killed || spawned.killed; + + if (!parsed.opts.reject) { + return err; + } + + throw err; + } + + return { + stdout: handleOutput(parsed.opts, result.stdout), + stderr: handleOutput(parsed.opts, result.stderr), + code: 0, + failed: false, + killed: false, + signal: null, + cmd: joinedCmd, + timedOut: false + }; + }), destroy); + + crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); + + handleInput(spawned, parsed.opts.input); + + spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); + spawned.catch = onrejected => handlePromise().catch(onrejected); + + return spawned; +}; + +// TODO: set `stderr: 'ignore'` when that option is implemented +module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); + +// TODO: set `stdout: 'ignore'` when that option is implemented +module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); + +module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); + +module.exports.sync = (cmd, args, opts) => { + const parsed = handleArgs(cmd, args, opts); + const joinedCmd = joinCmd(cmd, args); + + if (isStream(parsed.opts.input)) { + throw new TypeError('The `input` option cannot be a stream in sync mode'); + } + + const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); + result.code = result.status; + + if (result.error || result.status !== 0 || result.signal !== null) { + const err = makeError(result, { + joinedCmd, + parsed + }); + + if (!parsed.opts.reject) { + return err; + } + + throw err; + } + + return { + stdout: handleOutput(parsed.opts, result.stdout), + stderr: handleOutput(parsed.opts, result.stderr), + code: 0, + failed: false, + signal: null, + cmd: joinedCmd, + timedOut: false + }; +}; + +module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); diff --git a/node_modules/bin-version/node_modules/execa/lib/errname.js b/node_modules/bin-version/node_modules/execa/lib/errname.js new file mode 100644 index 00000000..e367837b --- /dev/null +++ b/node_modules/bin-version/node_modules/execa/lib/errname.js @@ -0,0 +1,39 @@ +'use strict'; +// Older verions of Node.js might not have `util.getSystemErrorName()`. +// In that case, fall back to a deprecated internal. +const util = require('util'); + +let uv; + +if (typeof util.getSystemErrorName === 'function') { + module.exports = util.getSystemErrorName; +} else { + try { + uv = process.binding('uv'); + + if (typeof uv.errname !== 'function') { + throw new TypeError('uv.errname is not a function'); + } + } catch (err) { + console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); + uv = null; + } + + module.exports = code => errname(uv, code); +} + +// Used for testing the fallback behavior +module.exports.__test__ = errname; + +function errname(uv, code) { + if (uv) { + return uv.errname(code); + } + + if (!(code < 0)) { + throw new Error('err >= 0'); + } + + return `Unknown system error ${code}`; +} + diff --git a/node_modules/bin-version/node_modules/execa/lib/stdio.js b/node_modules/bin-version/node_modules/execa/lib/stdio.js new file mode 100644 index 00000000..a82d4683 --- /dev/null +++ b/node_modules/bin-version/node_modules/execa/lib/stdio.js @@ -0,0 +1,41 @@ +'use strict'; +const alias = ['stdin', 'stdout', 'stderr']; + +const hasAlias = opts => alias.some(x => Boolean(opts[x])); + +module.exports = opts => { + if (!opts) { + return null; + } + + if (opts.stdio && hasAlias(opts)) { + throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); + } + + if (typeof opts.stdio === 'string') { + return opts.stdio; + } + + const stdio = opts.stdio || []; + + if (!Array.isArray(stdio)) { + throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + } + + const result = []; + const len = Math.max(stdio.length, alias.length); + + for (let i = 0; i < len; i++) { + let value = null; + + if (stdio[i] !== undefined) { + value = stdio[i]; + } else if (opts[alias[i]] !== undefined) { + value = opts[alias[i]]; + } + + result[i] = value; + } + + return result; +}; diff --git a/node_modules/bin-version/node_modules/execa/license b/node_modules/bin-version/node_modules/execa/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-version/node_modules/execa/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-version/node_modules/execa/package.json b/node_modules/bin-version/node_modules/execa/package.json new file mode 100644 index 00000000..ad98225c --- /dev/null +++ b/node_modules/bin-version/node_modules/execa/package.json @@ -0,0 +1,69 @@ +{ + "name": "execa", + "version": "1.0.0", + "description": "A better `child_process`", + "license": "MIT", + "repository": "sindresorhus/execa", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && nyc ava" + }, + "files": [ + "index.js", + "lib" + ], + "keywords": [ + "exec", + "child", + "process", + "execute", + "fork", + "execfile", + "spawn", + "file", + "shell", + "bin", + "binary", + "binaries", + "npm", + "path", + "local" + ], + "dependencies": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + }, + "devDependencies": { + "ava": "*", + "cat-names": "^1.0.2", + "coveralls": "^3.0.1", + "delay": "^3.0.0", + "is-running": "^2.0.0", + "nyc": "^13.0.1", + "tempfile": "^2.0.0", + "xo": "*" + }, + "nyc": { + "reporter": [ + "text", + "lcov" + ], + "exclude": [ + "**/fixtures/**", + "**/test.js", + "**/test/**" + ] + } +} diff --git a/node_modules/bin-version/node_modules/execa/readme.md b/node_modules/bin-version/node_modules/execa/readme.md new file mode 100644 index 00000000..f3f533d9 --- /dev/null +++ b/node_modules/bin-version/node_modules/execa/readme.md @@ -0,0 +1,327 @@ +# execa [![Build Status: Linux](https://travis-ci.org/sindresorhus/execa.svg?branch=master)](https://travis-ci.org/sindresorhus/execa) [![Build status: Windows](https://ci.appveyor.com/api/projects/status/x5ajamxtjtt93cqv/branch/master?svg=true)](https://ci.appveyor.com/project/sindresorhus/execa/branch/master) [![Coverage Status](https://coveralls.io/repos/github/sindresorhus/execa/badge.svg?branch=master)](https://coveralls.io/github/sindresorhus/execa?branch=master) + +> A better [`child_process`](https://nodejs.org/api/child_process.html) + + +## Why + +- Promise interface. +- [Strips EOF](https://github.com/sindresorhus/strip-eof) from the output so you don't have to `stdout.trim()`. +- Supports [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) binaries cross-platform. +- [Improved Windows support.](https://github.com/IndigoUnited/node-cross-spawn#why) +- Higher max buffer. 10 MB instead of 200 KB. +- [Executes locally installed binaries by name.](#preferlocal) +- [Cleans up spawned processes when the parent process dies.](#cleanup) + + +## Install + +``` +$ npm install execa +``` + + + + + + +## Usage + +```js +const execa = require('execa'); + +(async () => { + const {stdout} = await execa('echo', ['unicorns']); + console.log(stdout); + //=> 'unicorns' +})(); +``` + +Additional examples: + +```js +const execa = require('execa'); + +(async () => { + // Pipe the child process stdout to the current stdout + execa('echo', ['unicorns']).stdout.pipe(process.stdout); + + + // Run a shell command + const {stdout} = await execa.shell('echo unicorns'); + //=> 'unicorns' + + + // Catching an error + try { + await execa.shell('exit 3'); + } catch (error) { + console.log(error); + /* + { + message: 'Command failed: /bin/sh -c exit 3' + killed: false, + code: 3, + signal: null, + cmd: '/bin/sh -c exit 3', + stdout: '', + stderr: '', + timedOut: false + } + */ + } +})(); + +// Catching an error with a sync method +try { + execa.shellSync('exit 3'); +} catch (error) { + console.log(error); + /* + { + message: 'Command failed: /bin/sh -c exit 3' + code: 3, + signal: null, + cmd: '/bin/sh -c exit 3', + stdout: '', + stderr: '', + timedOut: false + } + */ +} +``` + + +## API + +### execa(file, [arguments], [options]) + +Execute a file. + +Think of this as a mix of `child_process.execFile` and `child_process.spawn`. + +Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. + +### execa.stdout(file, [arguments], [options]) + +Same as `execa()`, but returns only `stdout`. + +### execa.stderr(file, [arguments], [options]) + +Same as `execa()`, but returns only `stderr`. + +### execa.shell(command, [options]) + +Execute a command through the system shell. Prefer `execa()` whenever possible, as it's both faster and safer. + +Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess). + +The `child_process` instance is enhanced to also be promise for a result object with `stdout` and `stderr` properties. + +### execa.sync(file, [arguments], [options]) + +Execute a file synchronously. + +Returns the same result object as [`child_process.spawnSync`](https://nodejs.org/api/child_process.html#child_process_child_process_spawnsync_command_args_options). + +This method throws an `Error` if the command fails. + +### execa.shellSync(file, [options]) + +Execute a command synchronously through the system shell. + +Returns the same result object as [`child_process.spawnSync`](https://nodejs.org/api/child_process.html#child_process_child_process_spawnsync_command_args_options). + +### options + +Type: `Object` + +#### cwd + +Type: `string`
+Default: `process.cwd()` + +Current working directory of the child process. + +#### env + +Type: `Object`
+Default: `process.env` + +Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this. + +#### extendEnv + +Type: `boolean`
+Default: `true` + +Set to `false` if you don't want to extend the environment variables when providing the `env` property. + +#### argv0 + +Type: `string` + +Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified. + +#### stdio + +Type: `string[]` `string`
+Default: `pipe` + +Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. + +#### detached + +Type: `boolean` + +Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). + +#### uid + +Type: `number` + +Sets the user identity of the process. + +#### gid + +Type: `number` + +Sets the group identity of the process. + +#### shell + +Type: `boolean` `string`
+Default: `false` + +If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. + +#### stripEof + +Type: `boolean`
+Default: `true` + +[Strip EOF](https://github.com/sindresorhus/strip-eof) (last newline) from the output. + +#### preferLocal + +Type: `boolean`
+Default: `true` + +Prefer locally installed binaries when looking for a binary to execute.
+If you `$ npm install foo`, you can then `execa('foo')`. + +#### localDir + +Type: `string`
+Default: `process.cwd()` + +Preferred path to find locally installed binaries in (use with `preferLocal`). + +#### input + +Type: `string` `Buffer` `stream.Readable` + +Write some input to the `stdin` of your binary.
+Streams are not allowed when using the synchronous methods. + +#### reject + +Type: `boolean`
+Default: `true` + +Setting this to `false` resolves the promise with the error instead of rejecting it. + +#### cleanup + +Type: `boolean`
+Default: `true` + +Keep track of the spawned process and `kill` it when the parent process exits. + +#### encoding + +Type: `string`
+Default: `utf8` + +Specify the character encoding used to decode the `stdout` and `stderr` output. + +#### timeout + +Type: `number`
+Default: `0` + +If timeout is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than timeout milliseconds. + +#### buffer + +Type: `boolean`
+Default: `true` + +Buffer the output from the spawned process. When buffering is disabled you must consume the output of the `stdout` and `stderr` streams because the promise will not be resolved/rejected until they have completed. + +#### maxBuffer + +Type: `number`
+Default: `10000000` (10MB) + +Largest amount of data in bytes allowed on `stdout` or `stderr`. + +#### killSignal + +Type: `string` `number`
+Default: `SIGTERM` + +Signal value to be used when the spawned process will be killed. + +#### stdin + +Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### stdout + +Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### stderr + +Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### windowsVerbatimArguments + +Type: `boolean`
+Default: `false` + +If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. + + +## Tips + +### Save and pipe output from a child process + +Let's say you want to show the output of a child process in real-time while also saving it to a variable. + +```js +const execa = require('execa'); +const getStream = require('get-stream'); + +const stream = execa('echo', ['foo']).stdout; + +stream.pipe(process.stdout); + +getStream(stream).then(value => { + console.log('child output:', value); +}); +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-version/node_modules/get-stream/buffer-stream.js b/node_modules/bin-version/node_modules/get-stream/buffer-stream.js new file mode 100644 index 00000000..4121c8e5 --- /dev/null +++ b/node_modules/bin-version/node_modules/get-stream/buffer-stream.js @@ -0,0 +1,51 @@ +'use strict'; +const {PassThrough} = require('stream'); + +module.exports = options => { + options = Object.assign({}, options); + + const {array} = options; + let {encoding} = options; + const buffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } + + if (buffer) { + encoding = null; + } + + let len = 0; + const ret = []; + const stream = new PassThrough({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + stream.on('data', chunk => { + ret.push(chunk); + + if (objectMode) { + len = ret.length; + } else { + len += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return ret; + } + + return buffer ? Buffer.concat(ret, len) : ret.join(''); + }; + + stream.getBufferedLength = () => len; + + return stream; +}; diff --git a/node_modules/bin-version/node_modules/get-stream/index.js b/node_modules/bin-version/node_modules/get-stream/index.js new file mode 100644 index 00000000..7e5584a6 --- /dev/null +++ b/node_modules/bin-version/node_modules/get-stream/index.js @@ -0,0 +1,50 @@ +'use strict'; +const pump = require('pump'); +const bufferStream = require('./buffer-stream'); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + options = Object.assign({maxBuffer: Infinity}, options); + + const {maxBuffer} = options; + + let stream; + return new Promise((resolve, reject) => { + const rejectPromise = error => { + if (error) { // A null check + error.bufferedData = stream.getBufferedValue(); + } + reject(error); + }; + + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } + + resolve(); + }); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }).then(() => stream.getBufferedValue()); +} + +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); +module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); +module.exports.MaxBufferError = MaxBufferError; diff --git a/node_modules/bin-version/node_modules/get-stream/license b/node_modules/bin-version/node_modules/get-stream/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-version/node_modules/get-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-version/node_modules/get-stream/package.json b/node_modules/bin-version/node_modules/get-stream/package.json new file mode 100644 index 00000000..619651cd --- /dev/null +++ b/node_modules/bin-version/node_modules/get-stream/package.json @@ -0,0 +1,46 @@ +{ + "name": "get-stream", + "version": "4.1.0", + "description": "Get a stream as a string, buffer, or array", + "license": "MIT", + "repository": "sindresorhus/get-stream", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "buffer-stream.js" + ], + "keywords": [ + "get", + "stream", + "promise", + "concat", + "string", + "text", + "buffer", + "read", + "data", + "consume", + "readable", + "readablestream", + "array", + "object" + ], + "dependencies": { + "pump": "^3.0.0" + }, + "devDependencies": { + "ava": "*", + "into-stream": "^3.0.0", + "xo": "*" + } +} diff --git a/node_modules/bin-version/node_modules/get-stream/readme.md b/node_modules/bin-version/node_modules/get-stream/readme.md new file mode 100644 index 00000000..b87a4d37 --- /dev/null +++ b/node_modules/bin-version/node_modules/get-stream/readme.md @@ -0,0 +1,123 @@ +# get-stream [![Build Status](https://travis-ci.org/sindresorhus/get-stream.svg?branch=master)](https://travis-ci.org/sindresorhus/get-stream) + +> Get a stream as a string, buffer, or array + + +## Install + +``` +$ npm install get-stream +``` + + +## Usage + +```js +const fs = require('fs'); +const getStream = require('get-stream'); + +(async () => { + const stream = fs.createReadStream('unicorn.txt'); + + console.log(await getStream(stream)); + /* + ,,))))))));, + __)))))))))))))), + \|/ -\(((((''''((((((((. + -*-==//////(('' . `)))))), + /|\ ))| o ;-. '((((( ,(, + ( `| / ) ;))))' ,_))^;(~ + | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + ; ''''```` `: `:::|\,__,%% );`'; ~ + | _ ) / `:|`----' `-' + ______/\/~ | / / + /~;;.____/;;' / ___--,-( `;;;/ + / // _;______;'------~~~~~ /;;/\ / + // | | / ; \;;,\ + (<_ | ; /',/-----' _> + \_| ||_ //~;~~~~~~~~~ + `\_| (,~~ + \~\ + ~~ + */ +})(); +``` + + +## API + +The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + +### getStream(stream, [options]) + +Get the `stream` as a string. + +#### options + +Type: `Object` + +##### encoding + +Type: `string`
+Default: `utf8` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + +##### maxBuffer + +Type: `number`
+Default: `Infinity` + +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error. + +### getStream.buffer(stream, [options]) + +Get the `stream` as a buffer. + +It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + +### getStream.array(stream, [options]) + +Get the `stream` as an array of values. + +It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + +- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + +- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + +- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + + +## Errors + +If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. + +```js +(async () => { + try { + await getStream(streamThatErrorsAtTheEnd('unicorn')); + } catch (error) { + console.log(error.bufferedData); + //=> 'unicorn' + } +})() +``` + + +## FAQ + +### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? + +This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. + + +## Related + +- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-version/package.json b/node_modules/bin-version/package.json new file mode 100644 index 00000000..b3b6d09c --- /dev/null +++ b/node_modules/bin-version/package.json @@ -0,0 +1,40 @@ +{ + "name": "bin-version", + "version": "3.1.0", + "description": "Get the version of a binary in semver format", + "license": "MIT", + "repository": "sindresorhus/bin-version", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "bin", + "binary", + "executable", + "version", + "semver", + "semantic", + "cli" + ], + "dependencies": { + "execa": "^1.0.0", + "find-versions": "^3.0.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/bin-version/readme.md b/node_modules/bin-version/readme.md new file mode 100644 index 00000000..4152faab --- /dev/null +++ b/node_modules/bin-version/readme.md @@ -0,0 +1,72 @@ +# bin-version [![Build Status](https://travis-ci.com/sindresorhus/bin-version.svg?branch=master)](https://travis-ci.com/sindresorhus/bin-version) + +> Get the version of a binary in [semver](https://github.com/npm/node-semver) format + + +## Install + +``` +$ npm install bin-version +``` + + +## Usage + +``` +$ curl --version +curl 7.30.0 (x86_64-apple-darwin13.0) +``` + +```js +const binVersion = require('bin-version'); + +(async () => { + console.log(await binVersion('curl')); + //=> '7.30.0' +})(); +``` + +``` +$ openssl version +OpenSSL 1.0.2d 9 Jul 2015 +``` + +```js +(async () => { + console.log(await binVersion('openssl', {args: ['version']})); + //=> '1.0.2' +})(); +``` + +## API + +### binVersion(binary, [options]) + +Returns a `Promise` with the version of the `binary`. + +#### binary + +Type: `string` + +The name of or path to the binary to get the version from. + +#### options + +Type: `object` + +##### args + +Type: `string[]` +Default: `['--version']` + +The arguments to pass to `binary` so that it will print its version. + +## Related + +- [bin-version-cli](https://github.com/sindresorhus/bin-version-cli) - CLI for this module +- [find-versions](https://github.com/sindresorhus/find-versions) - Find semver versions in a string + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/index.js b/node_modules/bin-wrapper/index.js new file mode 100644 index 00000000..1aba001f --- /dev/null +++ b/node_modules/bin-wrapper/index.js @@ -0,0 +1,208 @@ +'use strict'; +const fs = require('fs'); +const path = require('path'); +const url = require('url'); +const pify = require('pify'); +const importLazy = require('import-lazy')(require); + +const binCheck = importLazy('bin-check'); +const binVersionCheck = importLazy('bin-version-check'); +const download = importLazy('download'); +const osFilterObj = importLazy('os-filter-obj'); + +const statAsync = pify(fs.stat); +const chmodAsync = pify(fs.chmod); + +/** + * Initialize a new `BinWrapper` + * + * @param {Object} options + * @api public + */ +module.exports = class BinWrapper { + constructor(options = {}) { + this.options = options; + + if (this.options.strip <= 0) { + this.options.strip = 0; + } else if (!this.options.strip) { + this.options.strip = 1; + } + } + + /** + * Get or set files to download + * + * @param {String} src + * @param {String} os + * @param {String} arch + * @api public + */ + src(src, os, arch) { + if (arguments.length === 0) { + return this._src; + } + + this._src = this._src || []; + this._src.push({ + url: src, + os, + arch + }); + + return this; + } + + /** + * Get or set the destination + * + * @param {String} dest + * @api public + */ + dest(dest) { + if (arguments.length === 0) { + return this._dest; + } + + this._dest = dest; + return this; + } + + /** + * Get or set the binary + * + * @param {String} bin + * @api public + */ + use(bin) { + if (arguments.length === 0) { + return this._use; + } + + this._use = bin; + return this; + } + + /** + * Get or set a semver range to test the binary against + * + * @param {String} range + * @api public + */ + version(range) { + if (arguments.length === 0) { + return this._version; + } + + this._version = range; + return this; + } + + /** + * Get path to the binary + * + * @api public + */ + path() { + return path.join(this.dest(), this.use()); + } + + /** + * Run + * + * @param {Array} cmd + * @api public + */ + run(cmd = ['--version']) { + return this.findExisting().then(() => { + if (this.options.skipCheck) { + return; + } + + return this.runCheck(cmd); + }); + } + + /** + * Run binary check + * + * @param {Array} cmd + * @api private + */ + runCheck(cmd) { + return binCheck(this.path(), cmd).then(works => { + if (!works) { + throw new Error(`The \`${this.path()}\` binary doesn't seem to work correctly`); + } + + if (this.version()) { + return binVersionCheck(this.path(), this.version()); + } + + return Promise.resolve(); + }); + } + + /** + * Find existing files + * + * @api private + */ + findExisting() { + return statAsync(this.path()).catch(error => { + if (error && error.code === 'ENOENT') { + return this.download(); + } + + return Promise.reject(error); + }); + } + + /** + * Download files + * + * @api private + */ + download() { + const files = osFilterObj(this.src() || []); + const urls = []; + + if (files.length === 0) { + return Promise.reject(new Error('No binary found matching your system. It\'s probably not supported.')); + } + + files.forEach(file => urls.push(file.url)); + + return Promise.all(urls.map(url => download(url, this.dest(), { + extract: true, + strip: this.options.strip + }))).then(result => { + const resultingFiles = flatten(result.map((item, index) => { + if (Array.isArray(item)) { + return item.map(file => file.path); + } + + const parsedUrl = url.parse(files[index].url); + const parsedPath = path.parse(parsedUrl.pathname); + + return parsedPath.base; + })); + + return Promise.all(resultingFiles.map(fileName => { + return chmodAsync(path.join(this.dest(), fileName), 0o755); + })); + }); + } +}; + +function flatten(arr) { + return arr.reduce((acc, elem) => { + if (Array.isArray(elem)) { + acc.push(...elem); + } else { + acc.push(elem); + } + + return acc; + }, []); +} diff --git a/node_modules/bin-wrapper/license b/node_modules/bin-wrapper/license new file mode 100644 index 00000000..1c7c9750 --- /dev/null +++ b/node_modules/bin-wrapper/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Kevin Mårtensson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/download/index.js b/node_modules/bin-wrapper/node_modules/download/index.js new file mode 100644 index 00000000..342ff476 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/download/index.js @@ -0,0 +1,119 @@ +'use strict'; +const fs = require('fs'); +const path = require('path'); +const url = require('url'); +const caw = require('caw'); +const contentDisposition = require('content-disposition'); +const archiveType = require('archive-type'); +const decompress = require('decompress'); +const filenamify = require('filenamify'); +const getStream = require('get-stream'); +const got = require('got'); +const makeDir = require('make-dir'); +const pify = require('pify'); +const pEvent = require('p-event'); +const fileType = require('file-type'); +const extName = require('ext-name'); + +const fsP = pify(fs); +const filenameFromPath = res => path.basename(url.parse(res.requestUrl).pathname); + +const getExtFromMime = res => { + const header = res.headers['content-type']; + + if (!header) { + return null; + } + + const exts = extName.mime(header); + + if (exts.length !== 1) { + return null; + } + + return exts[0].ext; +}; + +const getFilename = (res, data) => { + const header = res.headers['content-disposition']; + + if (header) { + const parsed = contentDisposition.parse(header); + + if (parsed.parameters && parsed.parameters.filename) { + return parsed.parameters.filename; + } + } + + let filename = filenameFromPath(res); + + if (!path.extname(filename)) { + const ext = (fileType(data) || {}).ext || getExtFromMime(res); + + if (ext) { + filename = `${filename}.${ext}`; + } + } + + return filename; +}; + +const getProtocolFromUri = uri => { + let {protocol} = url.parse(uri); + + if (protocol) { + protocol = protocol.slice(0, -1); + } + + return protocol; +}; + +module.exports = (uri, output, opts) => { + if (typeof output === 'object') { + opts = output; + output = null; + } + + const protocol = getProtocolFromUri(uri); + + opts = Object.assign({ + encoding: null, + rejectUnauthorized: process.env.npm_config_strict_ssl !== 'false' + }, opts); + + const agent = caw(opts.proxy, {protocol}); + const stream = got.stream(uri, Object.assign({agent}, opts)) + .on('redirect', (response, nextOptions) => { + const redirectProtocol = getProtocolFromUri(nextOptions.href); + if (redirectProtocol && redirectProtocol !== protocol) { + nextOptions.agent = caw(opts.proxy, {protocol: redirectProtocol}); + } + }); + + const promise = pEvent(stream, 'response').then(res => { + const encoding = opts.encoding === null ? 'buffer' : opts.encoding; + return Promise.all([getStream(stream, {encoding}), res]); + }).then(result => { + const [data, res] = result; + + if (!output) { + return opts.extract && archiveType(data) ? decompress(data, opts) : data; + } + + const filename = opts.filename || filenamify(getFilename(res, data)); + const outputFilepath = path.join(output, filename); + + if (opts.extract && archiveType(data)) { + return decompress(data, path.dirname(outputFilepath), opts); + } + + return makeDir(path.dirname(outputFilepath)) + .then(() => fsP.writeFile(outputFilepath, data)) + .then(() => data); + }); + + stream.then = promise.then.bind(promise); + stream.catch = promise.catch.bind(promise); + + return stream; +}; diff --git a/node_modules/bin-wrapper/node_modules/download/license b/node_modules/bin-wrapper/node_modules/download/license new file mode 100644 index 00000000..db6bc32c --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/download/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Kevin Mårtensson (github.com/kevva) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/download/node_modules/pify/index.js b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/index.js new file mode 100644 index 00000000..1dee43ad --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/index.js @@ -0,0 +1,84 @@ +'use strict'; + +const processFn = (fn, opts) => function () { + const P = opts.promiseModule; + const args = new Array(arguments.length); + + for (let i = 0; i < arguments.length; i++) { + args[i] = arguments[i]; + } + + return new P((resolve, reject) => { + if (opts.errorFirst) { + args.push(function (err, result) { + if (opts.multiArgs) { + const results = new Array(arguments.length - 1); + + for (let i = 1; i < arguments.length; i++) { + results[i - 1] = arguments[i]; + } + + if (err) { + results.unshift(err); + reject(results); + } else { + resolve(results); + } + } else if (err) { + reject(err); + } else { + resolve(result); + } + }); + } else { + args.push(function (result) { + if (opts.multiArgs) { + const results = new Array(arguments.length - 1); + + for (let i = 0; i < arguments.length; i++) { + results[i] = arguments[i]; + } + + resolve(results); + } else { + resolve(result); + } + }); + } + + fn.apply(this, args); + }); +}; + +module.exports = (obj, opts) => { + opts = Object.assign({ + exclude: [/.+(Sync|Stream)$/], + errorFirst: true, + promiseModule: Promise + }, opts); + + const filter = key => { + const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key); + return opts.include ? opts.include.some(match) : !opts.exclude.some(match); + }; + + let ret; + if (typeof obj === 'function') { + ret = function () { + if (opts.excludeMain) { + return obj.apply(this, arguments); + } + + return processFn(obj, opts).apply(this, arguments); + }; + } else { + ret = Object.create(Object.getPrototypeOf(obj)); + } + + for (const key in obj) { // eslint-disable-line guard-for-in + const x = obj[key]; + ret[key] = typeof x === 'function' && filter(key) ? processFn(x, opts) : x; + } + + return ret; +}; diff --git a/node_modules/bin-wrapper/node_modules/download/node_modules/pify/license b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/download/node_modules/pify/package.json b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/package.json new file mode 100644 index 00000000..468d8576 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/package.json @@ -0,0 +1,51 @@ +{ + "name": "pify", + "version": "3.0.0", + "description": "Promisify a callback-style function", + "license": "MIT", + "repository": "sindresorhus/pify", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava && npm run optimization-test", + "optimization-test": "node --allow-natives-syntax optimization-test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "promises", + "promisify", + "all", + "denodify", + "denodeify", + "callback", + "cb", + "node", + "then", + "thenify", + "convert", + "transform", + "wrap", + "wrapper", + "bind", + "to", + "async", + "await", + "es2015", + "bluebird" + ], + "devDependencies": { + "ava": "*", + "pinkie-promise": "^2.0.0", + "v8-natives": "^1.0.0", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/node_modules/download/node_modules/pify/readme.md b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/readme.md new file mode 100644 index 00000000..376ca4e5 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/download/node_modules/pify/readme.md @@ -0,0 +1,131 @@ +# pify [![Build Status](https://travis-ci.org/sindresorhus/pify.svg?branch=master)](https://travis-ci.org/sindresorhus/pify) + +> Promisify a callback-style function + + +## Install + +``` +$ npm install --save pify +``` + + +## Usage + +```js +const fs = require('fs'); +const pify = require('pify'); + +// Promisify a single function +pify(fs.readFile)('package.json', 'utf8').then(data => { + console.log(JSON.parse(data).name); + //=> 'pify' +}); + +// Promisify all methods in a module +pify(fs).readFile('package.json', 'utf8').then(data => { + console.log(JSON.parse(data).name); + //=> 'pify' +}); +``` + + +## API + +### pify(input, [options]) + +Returns a `Promise` wrapped version of the supplied function or module. + +#### input + +Type: `Function` `Object` + +Callback-style function or module whose methods you want to promisify. + +#### options + +##### multiArgs + +Type: `boolean`
+Default: `false` + +By default, the promisified function will only return the second argument from the callback, which works fine for most APIs. This option can be useful for modules like `request` that return multiple arguments. Turning this on will make it return an array of all arguments from the callback, excluding the error argument, instead of just the second argument. This also applies to rejections, where it returns an array of all the callback arguments, including the error. + +```js +const request = require('request'); +const pify = require('pify'); + +pify(request, {multiArgs: true})('https://sindresorhus.com').then(result => { + const [httpResponse, body] = result; +}); +``` + +##### include + +Type: `string[]` `RegExp[]` + +Methods in a module to promisify. Remaining methods will be left untouched. + +##### exclude + +Type: `string[]` `RegExp[]`
+Default: `[/.+(Sync|Stream)$/]` + +Methods in a module **not** to promisify. Methods with names ending with `'Sync'` are excluded by default. + +##### excludeMain + +Type: `boolean`
+Default: `false` + +If given module is a function itself, it will be promisified. Turn this option on if you want to promisify only methods of the module. + +```js +const pify = require('pify'); + +function fn() { + return true; +} + +fn.method = (data, callback) => { + setImmediate(() => { + callback(null, data); + }); +}; + +// Promisify methods but not `fn()` +const promiseFn = pify(fn, {excludeMain: true}); + +if (promiseFn()) { + promiseFn.method('hi').then(data => { + console.log(data); + }); +} +``` + +##### errorFirst + +Type: `boolean`
+Default: `true` + +Whether the callback has an error as the first argument. You'll want to set this to `false` if you're dealing with an API that doesn't have an error as the first argument, like `fs.exists()`, some browser APIs, Chrome Extension APIs, etc. + +##### promiseModule + +Type: `Function` + +Custom promise module to use instead of the native one. + +Check out [`pinkie-promise`](https://github.com/floatdrop/pinkie-promise) if you need a tiny promise polyfill. + + +## Related + +- [p-event](https://github.com/sindresorhus/p-event) - Promisify an event by waiting for it to be emitted +- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/node_modules/download/package.json b/node_modules/bin-wrapper/node_modules/download/package.json new file mode 100644 index 00000000..0b45cf6e --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/download/package.json @@ -0,0 +1,51 @@ +{ + "name": "download", + "version": "7.1.0", + "description": "Download and extract files", + "license": "MIT", + "repository": "kevva/download", + "author": { + "email": "kevinmartensson@gmail.com", + "name": "Kevin Mårtensson", + "url": "github.com/kevva" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "download", + "extract", + "http", + "request", + "url" + ], + "dependencies": { + "archive-type": "^4.0.0", + "caw": "^2.0.1", + "content-disposition": "^0.5.2", + "decompress": "^4.2.0", + "ext-name": "^5.0.0", + "file-type": "^8.1.0", + "filenamify": "^2.0.0", + "get-stream": "^3.0.0", + "got": "^8.3.1", + "make-dir": "^1.2.0", + "p-event": "^2.1.0", + "pify": "^3.0.0" + }, + "devDependencies": { + "ava": "*", + "is-zip": "^1.0.0", + "nock": "^9.2.5", + "path-exists": "^3.0.0", + "random-buffer": "^0.1.0", + "rimraf": "^2.6.2", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/node_modules/download/readme.md b/node_modules/bin-wrapper/node_modules/download/readme.md new file mode 100644 index 00000000..dfb3fcf2 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/download/readme.md @@ -0,0 +1,86 @@ +# download [![Build Status](https://travis-ci.org/kevva/download.svg?branch=master)](https://travis-ci.org/kevva/download) + +> Download and extract files + +*See [download-cli](https://github.com/kevva/download-cli) for the command-line version.* + + +## Install + +``` +$ npm install download +``` + + +## Usage + +```js +const fs = require('fs'); +const download = require('download'); + +download('http://unicorn.com/foo.jpg', 'dist').then(() => { + console.log('done!'); +}); + +download('http://unicorn.com/foo.jpg').then(data => { + fs.writeFileSync('dist/foo.jpg', data); +}); + +download('unicorn.com/foo.jpg').pipe(fs.createWriteStream('dist/foo.jpg')); + +Promise.all([ + 'unicorn.com/foo.jpg', + 'cats.com/dancing.gif' +].map(x => download(x, 'dist'))).then(() => { + console.log('files downloaded!'); +}); +``` + + +## API + +### download(url, [destination], [options]) + +Returns both a `Promise` and a [Duplex stream](https://nodejs.org/api/stream.html#stream_class_stream_duplex) with [additional events](https://github.com/sindresorhus/got#streams-1). + +#### url + +Type: `string` + +URL to download. + +#### destination + +Type: `string` + +Path to where your file will be written. + +#### options + +Type: `Object` + +Same options as [`got`](https://github.com/sindresorhus/got#options) and [`decompress`](https://github.com/kevva/decompress#options) in addition to the ones below. + +##### extract + +Type: `boolean`
+Default: `false` + +If set to `true`, try extracting the file using [`decompress`](https://github.com/kevva/decompress). + +##### filename + +Type: `string` + +Name of the saved file. + +##### proxy + +Type: `string` + +Proxy endpoint. + + +## License + +MIT © [Kevin Mårtensson](https://github.com/kevva) diff --git a/node_modules/bin-wrapper/node_modules/file-type/index.js b/node_modules/bin-wrapper/node_modules/file-type/index.js new file mode 100644 index 00000000..669fb558 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/file-type/index.js @@ -0,0 +1,809 @@ +'use strict'; +const toBytes = s => [...s].map(c => c.charCodeAt(0)); +const xpiZipFilename = toBytes('META-INF/mozilla.rsa'); +const oxmlContentTypes = toBytes('[Content_Types].xml'); +const oxmlRels = toBytes('_rels/.rels'); + +module.exports = input => { + const buf = input instanceof Uint8Array ? input : new Uint8Array(input); + + if (!(buf && buf.length > 1)) { + return null; + } + + const check = (header, options) => { + options = Object.assign({ + offset: 0 + }, options); + + for (let i = 0; i < header.length; i++) { + // If a bitmask is set + if (options.mask) { + // If header doesn't equal `buf` with bits masked off + if (header[i] !== (options.mask[i] & buf[i + options.offset])) { + return false; + } + } else if (header[i] !== buf[i + options.offset]) { + return false; + } + } + + return true; + }; + + const checkString = (header, options) => check(toBytes(header), options); + + if (check([0xFF, 0xD8, 0xFF])) { + return { + ext: 'jpg', + mime: 'image/jpeg' + }; + } + + if (check([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A])) { + return { + ext: 'png', + mime: 'image/png' + }; + } + + if (check([0x47, 0x49, 0x46])) { + return { + ext: 'gif', + mime: 'image/gif' + }; + } + + if (check([0x57, 0x45, 0x42, 0x50], {offset: 8})) { + return { + ext: 'webp', + mime: 'image/webp' + }; + } + + if (check([0x46, 0x4C, 0x49, 0x46])) { + return { + ext: 'flif', + mime: 'image/flif' + }; + } + + // Needs to be before `tif` check + if ( + (check([0x49, 0x49, 0x2A, 0x0]) || check([0x4D, 0x4D, 0x0, 0x2A])) && + check([0x43, 0x52], {offset: 8}) + ) { + return { + ext: 'cr2', + mime: 'image/x-canon-cr2' + }; + } + + if ( + check([0x49, 0x49, 0x2A, 0x0]) || + check([0x4D, 0x4D, 0x0, 0x2A]) + ) { + return { + ext: 'tif', + mime: 'image/tiff' + }; + } + + if (check([0x42, 0x4D])) { + return { + ext: 'bmp', + mime: 'image/bmp' + }; + } + + if (check([0x49, 0x49, 0xBC])) { + return { + ext: 'jxr', + mime: 'image/vnd.ms-photo' + }; + } + + if (check([0x38, 0x42, 0x50, 0x53])) { + return { + ext: 'psd', + mime: 'image/vnd.adobe.photoshop' + }; + } + + // Zip-based file formats + // Need to be before the `zip` check + if (check([0x50, 0x4B, 0x3, 0x4])) { + if ( + check([0x6D, 0x69, 0x6D, 0x65, 0x74, 0x79, 0x70, 0x65, 0x61, 0x70, 0x70, 0x6C, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6F, 0x6E, 0x2F, 0x65, 0x70, 0x75, 0x62, 0x2B, 0x7A, 0x69, 0x70], {offset: 30}) + ) { + return { + ext: 'epub', + mime: 'application/epub+zip' + }; + } + + // Assumes signed `.xpi` from addons.mozilla.org + if (check(xpiZipFilename, {offset: 30})) { + return { + ext: 'xpi', + mime: 'application/x-xpinstall' + }; + } + + if (checkString('mimetypeapplication/vnd.oasis.opendocument.text', {offset: 30})) { + return { + ext: 'odt', + mime: 'application/vnd.oasis.opendocument.text' + }; + } + + if (checkString('mimetypeapplication/vnd.oasis.opendocument.spreadsheet', {offset: 30})) { + return { + ext: 'ods', + mime: 'application/vnd.oasis.opendocument.spreadsheet' + }; + } + + if (checkString('mimetypeapplication/vnd.oasis.opendocument.presentation', {offset: 30})) { + return { + ext: 'odp', + mime: 'application/vnd.oasis.opendocument.presentation' + }; + } + + // https://github.com/file/file/blob/master/magic/Magdir/msooxml + if (check(oxmlContentTypes, {offset: 30}) || check(oxmlRels, {offset: 30})) { + const sliced = buf.subarray(4, 4 + 2000); + const nextZipHeaderIndex = arr => arr.findIndex((el, i, arr) => arr[i] === 0x50 && arr[i + 1] === 0x4B && arr[i + 2] === 0x3 && arr[i + 3] === 0x4); + const header2Pos = nextZipHeaderIndex(sliced); + + if (header2Pos !== -1) { + const slicedAgain = buf.subarray(header2Pos + 8, header2Pos + 8 + 1000); + const header3Pos = nextZipHeaderIndex(slicedAgain); + + if (header3Pos !== -1) { + const offset = 8 + header2Pos + header3Pos + 30; + + if (checkString('word/', {offset})) { + return { + ext: 'docx', + mime: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' + }; + } + + if (checkString('ppt/', {offset})) { + return { + ext: 'pptx', + mime: 'application/vnd.openxmlformats-officedocument.presentationml.presentation' + }; + } + + if (checkString('xl/', {offset})) { + return { + ext: 'xlsx', + mime: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + }; + } + } + } + } + } + + if ( + check([0x50, 0x4B]) && + (buf[2] === 0x3 || buf[2] === 0x5 || buf[2] === 0x7) && + (buf[3] === 0x4 || buf[3] === 0x6 || buf[3] === 0x8) + ) { + return { + ext: 'zip', + mime: 'application/zip' + }; + } + + if (check([0x75, 0x73, 0x74, 0x61, 0x72], {offset: 257})) { + return { + ext: 'tar', + mime: 'application/x-tar' + }; + } + + if ( + check([0x52, 0x61, 0x72, 0x21, 0x1A, 0x7]) && + (buf[6] === 0x0 || buf[6] === 0x1) + ) { + return { + ext: 'rar', + mime: 'application/x-rar-compressed' + }; + } + + if (check([0x1F, 0x8B, 0x8])) { + return { + ext: 'gz', + mime: 'application/gzip' + }; + } + + if (check([0x42, 0x5A, 0x68])) { + return { + ext: 'bz2', + mime: 'application/x-bzip2' + }; + } + + if (check([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])) { + return { + ext: '7z', + mime: 'application/x-7z-compressed' + }; + } + + if (check([0x78, 0x01])) { + return { + ext: 'dmg', + mime: 'application/x-apple-diskimage' + }; + } + + if (check([0x33, 0x67, 0x70, 0x35]) || // 3gp5 + ( + check([0x0, 0x0, 0x0]) && check([0x66, 0x74, 0x79, 0x70], {offset: 4}) && + ( + check([0x6D, 0x70, 0x34, 0x31], {offset: 8}) || // MP41 + check([0x6D, 0x70, 0x34, 0x32], {offset: 8}) || // MP42 + check([0x69, 0x73, 0x6F, 0x6D], {offset: 8}) || // ISOM + check([0x69, 0x73, 0x6F, 0x32], {offset: 8}) || // ISO2 + check([0x6D, 0x6D, 0x70, 0x34], {offset: 8}) || // MMP4 + check([0x4D, 0x34, 0x56], {offset: 8}) || // M4V + check([0x64, 0x61, 0x73, 0x68], {offset: 8}) // DASH + ) + )) { + return { + ext: 'mp4', + mime: 'video/mp4' + }; + } + + if (check([0x4D, 0x54, 0x68, 0x64])) { + return { + ext: 'mid', + mime: 'audio/midi' + }; + } + + // https://github.com/threatstack/libmagic/blob/master/magic/Magdir/matroska + if (check([0x1A, 0x45, 0xDF, 0xA3])) { + const sliced = buf.subarray(4, 4 + 4096); + const idPos = sliced.findIndex((el, i, arr) => arr[i] === 0x42 && arr[i + 1] === 0x82); + + if (idPos !== -1) { + const docTypePos = idPos + 3; + const findDocType = type => [...type].every((c, i) => sliced[docTypePos + i] === c.charCodeAt(0)); + + if (findDocType('matroska')) { + return { + ext: 'mkv', + mime: 'video/x-matroska' + }; + } + + if (findDocType('webm')) { + return { + ext: 'webm', + mime: 'video/webm' + }; + } + } + } + + if (check([0x0, 0x0, 0x0, 0x14, 0x66, 0x74, 0x79, 0x70, 0x71, 0x74, 0x20, 0x20]) || + check([0x66, 0x72, 0x65, 0x65], {offset: 4}) || + check([0x66, 0x74, 0x79, 0x70, 0x71, 0x74, 0x20, 0x20], {offset: 4}) || + check([0x6D, 0x64, 0x61, 0x74], {offset: 4}) || // MJPEG + check([0x77, 0x69, 0x64, 0x65], {offset: 4})) { + return { + ext: 'mov', + mime: 'video/quicktime' + }; + } + + // RIFF file format which might be AVI, WAV, QCP, etc + if (check([0x52, 0x49, 0x46, 0x46])) { + if (check([0x41, 0x56, 0x49], {offset: 8})) { + return { + ext: 'avi', + mime: 'video/x-msvideo' + }; + } + if (check([0x57, 0x41, 0x56, 0x45], {offset: 8})) { + return { + ext: 'wav', + mime: 'audio/x-wav' + }; + } + // QLCM, QCP file + if (check([0x51, 0x4C, 0x43, 0x4D], {offset: 8})) { + return { + ext: 'qcp', + mime: 'audio/qcelp' + }; + } + } + + if (check([0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9])) { + return { + ext: 'wmv', + mime: 'video/x-ms-wmv' + }; + } + + if ( + check([0x0, 0x0, 0x1, 0xBA]) || + check([0x0, 0x0, 0x1, 0xB3]) + ) { + return { + ext: 'mpg', + mime: 'video/mpeg' + }; + } + + if (check([0x66, 0x74, 0x79, 0x70, 0x33, 0x67], {offset: 4})) { + return { + ext: '3gp', + mime: 'video/3gpp' + }; + } + + // Check for MPEG header at different starting offsets + for (let start = 0; start < 2 && start < (buf.length - 16); start++) { + if ( + check([0x49, 0x44, 0x33], {offset: start}) || // ID3 header + check([0xFF, 0xE2], {offset: start, mask: [0xFF, 0xE2]}) // MPEG 1 or 2 Layer 3 header + ) { + return { + ext: 'mp3', + mime: 'audio/mpeg' + }; + } + + if ( + check([0xFF, 0xE4], {offset: start, mask: [0xFF, 0xE4]}) // MPEG 1 or 2 Layer 2 header + ) { + return { + ext: 'mp2', + mime: 'audio/mpeg' + }; + } + + if ( + check([0xFF, 0xF8], {offset: start, mask: [0xFF, 0xFC]}) // MPEG 2 layer 0 using ADTS + ) { + return { + ext: 'mp2', + mime: 'audio/mpeg' + }; + } + + if ( + check([0xFF, 0xF0], {offset: start, mask: [0xFF, 0xFC]}) // MPEG 4 layer 0 using ADTS + ) { + return { + ext: 'mp4', + mime: 'audio/mpeg' + }; + } + } + + if ( + check([0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x41], {offset: 4}) || + check([0x4D, 0x34, 0x41, 0x20]) + ) { + return { + ext: 'm4a', + mime: 'audio/m4a' + }; + } + + // Needs to be before `ogg` check + if (check([0x4F, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64], {offset: 28})) { + return { + ext: 'opus', + mime: 'audio/opus' + }; + } + + // If 'OggS' in first bytes, then OGG container + if (check([0x4F, 0x67, 0x67, 0x53])) { + // This is a OGG container + + // If ' theora' in header. + if (check([0x80, 0x74, 0x68, 0x65, 0x6F, 0x72, 0x61], {offset: 28})) { + return { + ext: 'ogv', + mime: 'video/ogg' + }; + } + // If '\x01video' in header. + if (check([0x01, 0x76, 0x69, 0x64, 0x65, 0x6F, 0x00], {offset: 28})) { + return { + ext: 'ogm', + mime: 'video/ogg' + }; + } + // If ' FLAC' in header https://xiph.org/flac/faq.html + if (check([0x7F, 0x46, 0x4C, 0x41, 0x43], {offset: 28})) { + return { + ext: 'oga', + mime: 'audio/ogg' + }; + } + + // 'Speex ' in header https://en.wikipedia.org/wiki/Speex + if (check([0x53, 0x70, 0x65, 0x65, 0x78, 0x20, 0x20], {offset: 28})) { + return { + ext: 'spx', + mime: 'audio/ogg' + }; + } + + // If '\x01vorbis' in header + if (check([0x01, 0x76, 0x6F, 0x72, 0x62, 0x69, 0x73], {offset: 28})) { + return { + ext: 'ogg', + mime: 'audio/ogg' + }; + } + + // Default OGG container https://www.iana.org/assignments/media-types/application/ogg + return { + ext: 'ogx', + mime: 'application/ogg' + }; + } + + if (check([0x66, 0x4C, 0x61, 0x43])) { + return { + ext: 'flac', + mime: 'audio/x-flac' + }; + } + + if (check([0x4D, 0x41, 0x43, 0x20])) { + return { + ext: 'ape', + mime: 'audio/ape' + }; + } + + if (check([0x23, 0x21, 0x41, 0x4D, 0x52, 0x0A])) { + return { + ext: 'amr', + mime: 'audio/amr' + }; + } + + if (check([0x25, 0x50, 0x44, 0x46])) { + return { + ext: 'pdf', + mime: 'application/pdf' + }; + } + + if (check([0x4D, 0x5A])) { + return { + ext: 'exe', + mime: 'application/x-msdownload' + }; + } + + if ( + (buf[0] === 0x43 || buf[0] === 0x46) && + check([0x57, 0x53], {offset: 1}) + ) { + return { + ext: 'swf', + mime: 'application/x-shockwave-flash' + }; + } + + if (check([0x7B, 0x5C, 0x72, 0x74, 0x66])) { + return { + ext: 'rtf', + mime: 'application/rtf' + }; + } + + if (check([0x00, 0x61, 0x73, 0x6D])) { + return { + ext: 'wasm', + mime: 'application/wasm' + }; + } + + if ( + check([0x77, 0x4F, 0x46, 0x46]) && + ( + check([0x00, 0x01, 0x00, 0x00], {offset: 4}) || + check([0x4F, 0x54, 0x54, 0x4F], {offset: 4}) + ) + ) { + return { + ext: 'woff', + mime: 'font/woff' + }; + } + + if ( + check([0x77, 0x4F, 0x46, 0x32]) && + ( + check([0x00, 0x01, 0x00, 0x00], {offset: 4}) || + check([0x4F, 0x54, 0x54, 0x4F], {offset: 4}) + ) + ) { + return { + ext: 'woff2', + mime: 'font/woff2' + }; + } + + if ( + check([0x4C, 0x50], {offset: 34}) && + ( + check([0x00, 0x00, 0x01], {offset: 8}) || + check([0x01, 0x00, 0x02], {offset: 8}) || + check([0x02, 0x00, 0x02], {offset: 8}) + ) + ) { + return { + ext: 'eot', + mime: 'application/octet-stream' + }; + } + + if (check([0x00, 0x01, 0x00, 0x00, 0x00])) { + return { + ext: 'ttf', + mime: 'font/ttf' + }; + } + + if (check([0x4F, 0x54, 0x54, 0x4F, 0x00])) { + return { + ext: 'otf', + mime: 'font/otf' + }; + } + + if (check([0x00, 0x00, 0x01, 0x00])) { + return { + ext: 'ico', + mime: 'image/x-icon' + }; + } + + if (check([0x00, 0x00, 0x02, 0x00])) { + return { + ext: 'cur', + mime: 'image/x-icon' + }; + } + + if (check([0x46, 0x4C, 0x56, 0x01])) { + return { + ext: 'flv', + mime: 'video/x-flv' + }; + } + + if (check([0x25, 0x21])) { + return { + ext: 'ps', + mime: 'application/postscript' + }; + } + + if (check([0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])) { + return { + ext: 'xz', + mime: 'application/x-xz' + }; + } + + if (check([0x53, 0x51, 0x4C, 0x69])) { + return { + ext: 'sqlite', + mime: 'application/x-sqlite3' + }; + } + + if (check([0x4E, 0x45, 0x53, 0x1A])) { + return { + ext: 'nes', + mime: 'application/x-nintendo-nes-rom' + }; + } + + if (check([0x43, 0x72, 0x32, 0x34])) { + return { + ext: 'crx', + mime: 'application/x-google-chrome-extension' + }; + } + + if ( + check([0x4D, 0x53, 0x43, 0x46]) || + check([0x49, 0x53, 0x63, 0x28]) + ) { + return { + ext: 'cab', + mime: 'application/vnd.ms-cab-compressed' + }; + } + + // Needs to be before `ar` check + if (check([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A, 0x64, 0x65, 0x62, 0x69, 0x61, 0x6E, 0x2D, 0x62, 0x69, 0x6E, 0x61, 0x72, 0x79])) { + return { + ext: 'deb', + mime: 'application/x-deb' + }; + } + + if (check([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E])) { + return { + ext: 'ar', + mime: 'application/x-unix-archive' + }; + } + + if (check([0xED, 0xAB, 0xEE, 0xDB])) { + return { + ext: 'rpm', + mime: 'application/x-rpm' + }; + } + + if ( + check([0x1F, 0xA0]) || + check([0x1F, 0x9D]) + ) { + return { + ext: 'Z', + mime: 'application/x-compress' + }; + } + + if (check([0x4C, 0x5A, 0x49, 0x50])) { + return { + ext: 'lz', + mime: 'application/x-lzip' + }; + } + + if (check([0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1])) { + return { + ext: 'msi', + mime: 'application/x-msi' + }; + } + + if (check([0x06, 0x0E, 0x2B, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0D, 0x01, 0x02, 0x01, 0x01, 0x02])) { + return { + ext: 'mxf', + mime: 'application/mxf' + }; + } + + if (check([0x47], {offset: 4}) && (check([0x47], {offset: 192}) || check([0x47], {offset: 196}))) { + return { + ext: 'mts', + mime: 'video/mp2t' + }; + } + + if (check([0x42, 0x4C, 0x45, 0x4E, 0x44, 0x45, 0x52])) { + return { + ext: 'blend', + mime: 'application/x-blender' + }; + } + + if (check([0x42, 0x50, 0x47, 0xFB])) { + return { + ext: 'bpg', + mime: 'image/bpg' + }; + } + + if (check([0x00, 0x00, 0x00, 0x0C, 0x6A, 0x50, 0x20, 0x20, 0x0D, 0x0A, 0x87, 0x0A])) { + // JPEG-2000 family + + if (check([0x6A, 0x70, 0x32, 0x20], {offset: 20})) { + return { + ext: 'jp2', + mime: 'image/jp2' + }; + } + + if (check([0x6A, 0x70, 0x78, 0x20], {offset: 20})) { + return { + ext: 'jpx', + mime: 'image/jpx' + }; + } + + if (check([0x6A, 0x70, 0x6D, 0x20], {offset: 20})) { + return { + ext: 'jpm', + mime: 'image/jpm' + }; + } + + if (check([0x6D, 0x6A, 0x70, 0x32], {offset: 20})) { + return { + ext: 'mj2', + mime: 'image/mj2' + }; + } + } + + if (check([0x46, 0x4F, 0x52, 0x4D, 0x00])) { + return { + ext: 'aif', + mime: 'audio/aiff' + }; + } + + if (checkString(' (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/file-type/package.json b/node_modules/bin-wrapper/node_modules/file-type/package.json new file mode 100644 index 00000000..1aec8d42 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/file-type/package.json @@ -0,0 +1,123 @@ +{ + "name": "file-type", + "version": "8.1.0", + "description": "Detect the file type of a Buffer/Uint8Array", + "license": "MIT", + "repository": "sindresorhus/file-type", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "mime", + "file", + "type", + "archive", + "image", + "img", + "pic", + "picture", + "flash", + "photo", + "video", + "detect", + "check", + "is", + "exif", + "exe", + "binary", + "buffer", + "uint8array", + "jpg", + "png", + "gif", + "webp", + "flif", + "cr2", + "tif", + "bmp", + "jxr", + "psd", + "zip", + "tar", + "rar", + "gz", + "bz2", + "7z", + "dmg", + "mp4", + "m4v", + "mid", + "mkv", + "webm", + "mov", + "avi", + "mpg", + "mp2", + "mp3", + "m4a", + "ogg", + "opus", + "flac", + "wav", + "amr", + "pdf", + "epub", + "mobi", + "swf", + "rtf", + "woff", + "woff2", + "eot", + "ttf", + "otf", + "ico", + "flv", + "ps", + "xz", + "sqlite", + "xpi", + "cab", + "deb", + "ar", + "rpm", + "Z", + "lz", + "msi", + "mxf", + "mts", + "wasm", + "webassembly", + "blend", + "bpg", + "docx", + "pptx", + "xlsx", + "3gp", + "jp2", + "jpm", + "jpx", + "mj2", + "aif", + "odt", + "ods", + "odp", + "xml", + "heic" + ], + "devDependencies": { + "ava": "*", + "read-chunk": "^2.0.0", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/node_modules/file-type/readme.md b/node_modules/bin-wrapper/node_modules/file-type/readme.md new file mode 100644 index 00000000..bd04b8dc --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/file-type/readme.md @@ -0,0 +1,186 @@ +# file-type [![Build Status](https://travis-ci.org/sindresorhus/file-type.svg?branch=master)](https://travis-ci.org/sindresorhus/file-type) + +> Detect the file type of a Buffer/Uint8Array + +The file type is detected by checking the [magic number](http://en.wikipedia.org/wiki/Magic_number_(programming)#Magic_numbers_in_files) of the buffer. + + +## Install + +``` +$ npm install file-type +``` + + + + + + +## Usage + +##### Node.js + +```js +const readChunk = require('read-chunk'); +const fileType = require('file-type'); +const buffer = readChunk.sync('unicorn.png', 0, 4100); + +fileType(buffer); +//=> {ext: 'png', mime: 'image/png'} +``` + +Or from a remote location: + +```js +const http = require('http'); +const fileType = require('file-type'); +const url = 'http://assets-cdn.github.com/images/spinners/octocat-spinner-32.gif'; + +http.get(url, res => { + res.once('data', chunk => { + res.destroy(); + console.log(fileType(chunk)); + //=> {ext: 'gif', mime: 'image/gif'} + }); +}); +``` + +##### Browser + +```js +const xhr = new XMLHttpRequest(); +xhr.open('GET', 'unicorn.png'); +xhr.responseType = 'arraybuffer'; + +xhr.onload = () => { + fileType(new Uint8Array(this.response)); + //=> {ext: 'png', mime: 'image/png'} +}; + +xhr.send(); +``` + + +## API + +### fileType(input) + +Returns an `Object` with: + +- `ext` - One of the [supported file types](#supported-file-types) +- `mime` - The [MIME type](http://en.wikipedia.org/wiki/Internet_media_type) + +Or `null` when no match. + +#### input + +Type: `Buffer` `Uint8Array` + +It only needs the first 4100 bytes. + + +## Supported file types + +- [`jpg`](https://en.wikipedia.org/wiki/JPEG) +- [`png`](https://en.wikipedia.org/wiki/Portable_Network_Graphics) +- [`gif`](https://en.wikipedia.org/wiki/GIF) +- [`webp`](https://en.wikipedia.org/wiki/WebP) +- [`flif`](https://en.wikipedia.org/wiki/Free_Lossless_Image_Format) +- [`cr2`](http://fileinfo.com/extension/cr2) +- [`tif`](https://en.wikipedia.org/wiki/Tagged_Image_File_Format) +- [`bmp`](https://en.wikipedia.org/wiki/BMP_file_format) +- [`jxr`](https://en.wikipedia.org/wiki/JPEG_XR) +- [`psd`](https://en.wikipedia.org/wiki/Adobe_Photoshop#File_format) +- [`zip`](https://en.wikipedia.org/wiki/Zip_(file_format)) +- [`tar`](https://en.wikipedia.org/wiki/Tar_(computing)#File_format) +- [`rar`](https://en.wikipedia.org/wiki/RAR_(file_format)) +- [`gz`](https://en.wikipedia.org/wiki/Gzip) +- [`bz2`](https://en.wikipedia.org/wiki/Bzip2) +- [`7z`](https://en.wikipedia.org/wiki/7z) +- [`dmg`](https://en.wikipedia.org/wiki/Apple_Disk_Image) +- [`mp4`](https://en.wikipedia.org/wiki/MPEG-4_Part_14#Filename_extensions) +- [`m4v`](https://en.wikipedia.org/wiki/M4V) +- [`mid`](https://en.wikipedia.org/wiki/MIDI) +- [`mkv`](https://en.wikipedia.org/wiki/Matroska) +- [`webm`](https://en.wikipedia.org/wiki/WebM) +- [`mov`](https://en.wikipedia.org/wiki/QuickTime_File_Format) +- [`avi`](https://en.wikipedia.org/wiki/Audio_Video_Interleave) +- [`wmv`](https://en.wikipedia.org/wiki/Windows_Media_Video) +- [`mpg`](https://en.wikipedia.org/wiki/MPEG-1) +- [`mp2`](https://en.wikipedia.org/wiki/MPEG-1_Audio_Layer_II) +- [`mp3`](https://en.wikipedia.org/wiki/MP3) +- [`m4a`](https://en.wikipedia.org/wiki/MPEG-4_Part_14#.MP4_versus_.M4A) +- [`ogg`](https://en.wikipedia.org/wiki/Ogg) +- [`opus`](https://en.wikipedia.org/wiki/Opus_(audio_format)) +- [`flac`](https://en.wikipedia.org/wiki/FLAC) +- [`wav`](https://en.wikipedia.org/wiki/WAV) +- [`qcp`](https://en.wikipedia.org/wiki/QCP) +- [`amr`](https://en.wikipedia.org/wiki/Adaptive_Multi-Rate_audio_codec) +- [`pdf`](https://en.wikipedia.org/wiki/Portable_Document_Format) +- [`epub`](https://en.wikipedia.org/wiki/EPUB) +- [`mobi`](https://en.wikipedia.org/wiki/Mobipocket) - Mobipocket +- [`exe`](https://en.wikipedia.org/wiki/.exe) +- [`swf`](https://en.wikipedia.org/wiki/SWF) +- [`rtf`](https://en.wikipedia.org/wiki/Rich_Text_Format) +- [`woff`](https://en.wikipedia.org/wiki/Web_Open_Font_Format) +- [`woff2`](https://en.wikipedia.org/wiki/Web_Open_Font_Format) +- [`eot`](https://en.wikipedia.org/wiki/Embedded_OpenType) +- [`ttf`](https://en.wikipedia.org/wiki/TrueType) +- [`otf`](https://en.wikipedia.org/wiki/OpenType) +- [`ico`](https://en.wikipedia.org/wiki/ICO_(file_format)) +- [`flv`](https://en.wikipedia.org/wiki/Flash_Video) +- [`ps`](https://en.wikipedia.org/wiki/Postscript) +- [`xz`](https://en.wikipedia.org/wiki/Xz) +- [`sqlite`](https://www.sqlite.org/fileformat2.html) +- [`nes`](http://fileinfo.com/extension/nes) +- [`crx`](https://developer.chrome.com/extensions/crx) +- [`xpi`](https://en.wikipedia.org/wiki/XPInstall) +- [`cab`](https://en.wikipedia.org/wiki/Cabinet_(file_format)) +- [`deb`](https://en.wikipedia.org/wiki/Deb_(file_format)) +- [`ar`](https://en.wikipedia.org/wiki/Ar_(Unix)) +- [`rpm`](http://fileinfo.com/extension/rpm) +- [`Z`](http://fileinfo.com/extension/z) +- [`lz`](https://en.wikipedia.org/wiki/Lzip) +- [`msi`](https://en.wikipedia.org/wiki/Windows_Installer) +- [`mxf`](https://en.wikipedia.org/wiki/Material_Exchange_Format) +- [`mts`](https://en.wikipedia.org/wiki/.m2ts) +- [`wasm`](https://en.wikipedia.org/wiki/WebAssembly) +- [`blend`](https://wiki.blender.org/index.php/Dev:Source/Architecture/File_Format) +- [`bpg`](https://bellard.org/bpg/) +- [`docx`](https://en.wikipedia.org/wiki/Office_Open_XML) +- [`pptx`](https://en.wikipedia.org/wiki/Office_Open_XML) +- [`xlsx`](https://en.wikipedia.org/wiki/Office_Open_XML) +- [`3gp`](https://en.wikipedia.org/wiki/3GP_and_3G2) +- [`jp2`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000 +- [`jpm`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000 +- [`jpx`](https://en.wikipedia.org/wiki/JPEG_2000) - JPEG 2000 +- [`mj2`](https://en.wikipedia.org/wiki/Motion_JPEG_2000) - Motion JPEG 2000 +- [`aif`](https://en.wikipedia.org/wiki/Audio_Interchange_File_Format) +- [`odt`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for word processing +- [`ods`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for spreadsheets +- [`odp`](https://en.wikipedia.org/wiki/OpenDocument) - OpenDocument for presentations +- [`xml`](https://en.wikipedia.org/wiki/XML) +- [`heic`](http://nokiatech.github.io/heif/technical.html) +- [`cur`](https://en.wikipedia.org/wiki/ICO_(file_format)) +- [`ktx`](https://www.khronos.org/opengles/sdk/tools/KTX/file_format_spec/) +- [`ape`](https://en.wikipedia.org/wiki/Monkey%27s_Audio) - Monkey's Audio + +*SVG isn't included as it requires the whole file to be read, but you can get it [here](https://github.com/sindresorhus/is-svg).* + +*Pull request welcome for additional commonly used file types.* + + +## Related + +- [file-type-cli](https://github.com/sindresorhus/file-type-cli) - CLI for this module + + +## Created by + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Mikael Finstad](https://github.com/mifi) + + +## License + +MIT diff --git a/node_modules/bin-wrapper/node_modules/get-stream/buffer-stream.js b/node_modules/bin-wrapper/node_modules/get-stream/buffer-stream.js new file mode 100644 index 00000000..ae45d3d9 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/get-stream/buffer-stream.js @@ -0,0 +1,51 @@ +'use strict'; +const PassThrough = require('stream').PassThrough; + +module.exports = opts => { + opts = Object.assign({}, opts); + + const array = opts.array; + let encoding = opts.encoding; + const buffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } + + if (buffer) { + encoding = null; + } + + let len = 0; + const ret = []; + const stream = new PassThrough({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + stream.on('data', chunk => { + ret.push(chunk); + + if (objectMode) { + len = ret.length; + } else { + len += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return ret; + } + + return buffer ? Buffer.concat(ret, len) : ret.join(''); + }; + + stream.getBufferedLength = () => len; + + return stream; +}; diff --git a/node_modules/bin-wrapper/node_modules/get-stream/index.js b/node_modules/bin-wrapper/node_modules/get-stream/index.js new file mode 100644 index 00000000..2dc5ee96 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/get-stream/index.js @@ -0,0 +1,51 @@ +'use strict'; +const bufferStream = require('./buffer-stream'); + +function getStream(inputStream, opts) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + opts = Object.assign({maxBuffer: Infinity}, opts); + + const maxBuffer = opts.maxBuffer; + let stream; + let clean; + + const p = new Promise((resolve, reject) => { + const error = err => { + if (err) { // null check + err.bufferedData = stream.getBufferedValue(); + } + + reject(err); + }; + + stream = bufferStream(opts); + inputStream.once('error', error); + inputStream.pipe(stream); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + reject(new Error('maxBuffer exceeded')); + } + }); + stream.once('error', error); + stream.on('end', resolve); + + clean = () => { + // some streams doesn't implement the `stream.Readable` interface correctly + if (inputStream.unpipe) { + inputStream.unpipe(stream); + } + }; + }); + + p.then(clean, clean); + + return p.then(() => stream.getBufferedValue()); +} + +module.exports = getStream; +module.exports.buffer = (stream, opts) => getStream(stream, Object.assign({}, opts, {encoding: 'buffer'})); +module.exports.array = (stream, opts) => getStream(stream, Object.assign({}, opts, {array: true})); diff --git a/node_modules/bin-wrapper/node_modules/get-stream/license b/node_modules/bin-wrapper/node_modules/get-stream/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/get-stream/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/get-stream/package.json b/node_modules/bin-wrapper/node_modules/get-stream/package.json new file mode 100644 index 00000000..2f2adf0d --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/get-stream/package.json @@ -0,0 +1,48 @@ +{ + "name": "get-stream", + "version": "3.0.0", + "description": "Get a stream as a string, buffer, or array", + "license": "MIT", + "repository": "sindresorhus/get-stream", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "buffer-stream.js" + ], + "keywords": [ + "get", + "stream", + "promise", + "concat", + "string", + "str", + "text", + "buffer", + "read", + "data", + "consume", + "readable", + "readablestream", + "array", + "object", + "obj" + ], + "devDependencies": { + "ava": "*", + "into-stream": "^3.0.0", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/bin-wrapper/node_modules/get-stream/readme.md b/node_modules/bin-wrapper/node_modules/get-stream/readme.md new file mode 100644 index 00000000..73b188fb --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/get-stream/readme.md @@ -0,0 +1,117 @@ +# get-stream [![Build Status](https://travis-ci.org/sindresorhus/get-stream.svg?branch=master)](https://travis-ci.org/sindresorhus/get-stream) + +> Get a stream as a string, buffer, or array + + +## Install + +``` +$ npm install --save get-stream +``` + + +## Usage + +```js +const fs = require('fs'); +const getStream = require('get-stream'); +const stream = fs.createReadStream('unicorn.txt'); + +getStream(stream).then(str => { + console.log(str); + /* + ,,))))))));, + __)))))))))))))), + \|/ -\(((((''''((((((((. + -*-==//////(('' . `)))))), + /|\ ))| o ;-. '((((( ,(, + ( `| / ) ;))))' ,_))^;(~ + | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + ; ''''```` `: `:::|\,__,%% );`'; ~ + | _ ) / `:|`----' `-' + ______/\/~ | / / + /~;;.____/;;' / ___--,-( `;;;/ + / // _;______;'------~~~~~ /;;/\ / + // | | / ; \;;,\ + (<_ | ; /',/-----' _> + \_| ||_ //~;~~~~~~~~~ + `\_| (,~~ + \~\ + ~~ + */ +}); +``` + + +## API + +The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + +### getStream(stream, [options]) + +Get the `stream` as a string. + +#### options + +##### encoding + +Type: `string`
+Default: `utf8` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + +##### maxBuffer + +Type: `number`
+Default: `Infinity` + +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected. + +### getStream.buffer(stream, [options]) + +Get the `stream` as a buffer. + +It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + +### getStream.array(stream, [options]) + +Get the `stream` as an array of values. + +It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + +- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + +- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + +- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + + +## Errors + +If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. + +```js +getStream(streamThatErrorsAtTheEnd('unicorn')) + .catch(err => { + console.log(err.bufferedData); + //=> 'unicorn' + }); +``` + + +## FAQ + +### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? + +This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. + + +## Related + +- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/node_modules/got/errors.js b/node_modules/bin-wrapper/node_modules/got/errors.js new file mode 100644 index 00000000..ad833881 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/errors.js @@ -0,0 +1,92 @@ +'use strict'; +const urlLib = require('url'); +const http = require('http'); +const PCancelable = require('p-cancelable'); +const is = require('@sindresorhus/is'); + +class GotError extends Error { + constructor(message, error, opts) { + super(message); + Error.captureStackTrace(this, this.constructor); + this.name = 'GotError'; + + if (!is.undefined(error.code)) { + this.code = error.code; + } + + Object.assign(this, { + host: opts.host, + hostname: opts.hostname, + method: opts.method, + path: opts.path, + protocol: opts.protocol, + url: opts.href + }); + } +} + +module.exports.GotError = GotError; + +module.exports.CacheError = class extends GotError { + constructor(error, opts) { + super(error.message, error, opts); + this.name = 'CacheError'; + } +}; + +module.exports.RequestError = class extends GotError { + constructor(error, opts) { + super(error.message, error, opts); + this.name = 'RequestError'; + } +}; + +module.exports.ReadError = class extends GotError { + constructor(error, opts) { + super(error.message, error, opts); + this.name = 'ReadError'; + } +}; + +module.exports.ParseError = class extends GotError { + constructor(error, statusCode, opts, data) { + super(`${error.message} in "${urlLib.format(opts)}": \n${data.slice(0, 77)}...`, error, opts); + this.name = 'ParseError'; + this.statusCode = statusCode; + this.statusMessage = http.STATUS_CODES[this.statusCode]; + } +}; + +module.exports.HTTPError = class extends GotError { + constructor(statusCode, statusMessage, headers, opts) { + if (statusMessage) { + statusMessage = statusMessage.replace(/\r?\n/g, ' ').trim(); + } else { + statusMessage = http.STATUS_CODES[statusCode]; + } + super(`Response code ${statusCode} (${statusMessage})`, {}, opts); + this.name = 'HTTPError'; + this.statusCode = statusCode; + this.statusMessage = statusMessage; + this.headers = headers; + } +}; + +module.exports.MaxRedirectsError = class extends GotError { + constructor(statusCode, redirectUrls, opts) { + super('Redirected 10 times. Aborting.', {}, opts); + this.name = 'MaxRedirectsError'; + this.statusCode = statusCode; + this.statusMessage = http.STATUS_CODES[this.statusCode]; + this.redirectUrls = redirectUrls; + } +}; + +module.exports.UnsupportedProtocolError = class extends GotError { + constructor(opts) { + super(`Unsupported protocol "${opts.protocol}"`, {}, opts); + this.name = 'UnsupportedProtocolError'; + } +}; + +module.exports.CancelError = PCancelable.CancelError; diff --git a/node_modules/bin-wrapper/node_modules/got/index.js b/node_modules/bin-wrapper/node_modules/got/index.js new file mode 100644 index 00000000..9d83b771 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/index.js @@ -0,0 +1,675 @@ +'use strict'; +const EventEmitter = require('events'); +const http = require('http'); +const https = require('https'); +const PassThrough = require('stream').PassThrough; +const Transform = require('stream').Transform; +const urlLib = require('url'); +const fs = require('fs'); +const querystring = require('querystring'); +const CacheableRequest = require('cacheable-request'); +const duplexer3 = require('duplexer3'); +const intoStream = require('into-stream'); +const is = require('@sindresorhus/is'); +const getStream = require('get-stream'); +const timedOut = require('timed-out'); +const urlParseLax = require('url-parse-lax'); +const urlToOptions = require('url-to-options'); +const lowercaseKeys = require('lowercase-keys'); +const decompressResponse = require('decompress-response'); +const mimicResponse = require('mimic-response'); +const isRetryAllowed = require('is-retry-allowed'); +const isURL = require('isurl'); +const PCancelable = require('p-cancelable'); +const pTimeout = require('p-timeout'); +const pify = require('pify'); +const Buffer = require('safe-buffer').Buffer; +const pkg = require('./package.json'); +const errors = require('./errors'); + +const getMethodRedirectCodes = new Set([300, 301, 302, 303, 304, 305, 307, 308]); +const allMethodRedirectCodes = new Set([300, 303, 307, 308]); + +const isFormData = body => is.nodeStream(body) && is.function(body.getBoundary); + +const getBodySize = opts => { + const body = opts.body; + + if (opts.headers['content-length']) { + return Number(opts.headers['content-length']); + } + + if (!body && !opts.stream) { + return 0; + } + + if (is.string(body)) { + return Buffer.byteLength(body); + } + + if (isFormData(body)) { + return pify(body.getLength.bind(body))(); + } + + if (body instanceof fs.ReadStream) { + return pify(fs.stat)(body.path).then(stat => stat.size); + } + + if (is.nodeStream(body) && is.buffer(body._buffer)) { + return body._buffer.length; + } + + return null; +}; + +function requestAsEventEmitter(opts) { + opts = opts || {}; + + const ee = new EventEmitter(); + const requestUrl = opts.href || urlLib.resolve(urlLib.format(opts), opts.path); + const redirects = []; + const agents = is.object(opts.agent) ? opts.agent : null; + let retryCount = 0; + let redirectUrl; + let uploadBodySize; + let uploaded = 0; + + const get = opts => { + if (opts.protocol !== 'http:' && opts.protocol !== 'https:') { + ee.emit('error', new got.UnsupportedProtocolError(opts)); + return; + } + + let fn = opts.protocol === 'https:' ? https : http; + + if (agents) { + const protocolName = opts.protocol === 'https:' ? 'https' : 'http'; + opts.agent = agents[protocolName] || opts.agent; + } + + if (opts.useElectronNet && process.versions.electron) { + const electron = require('electron'); + fn = electron.net || electron.remote.net; + } + + let progressInterval; + + const cacheableRequest = new CacheableRequest(fn.request, opts.cache); + const cacheReq = cacheableRequest(opts, res => { + clearInterval(progressInterval); + + ee.emit('uploadProgress', { + percent: 1, + transferred: uploaded, + total: uploadBodySize + }); + + const statusCode = res.statusCode; + + res.url = redirectUrl || requestUrl; + res.requestUrl = requestUrl; + + const followRedirect = opts.followRedirect && 'location' in res.headers; + const redirectGet = followRedirect && getMethodRedirectCodes.has(statusCode); + const redirectAll = followRedirect && allMethodRedirectCodes.has(statusCode); + + if (redirectAll || (redirectGet && (opts.method === 'GET' || opts.method === 'HEAD'))) { + res.resume(); + + if (statusCode === 303) { + // Server responded with "see other", indicating that the resource exists at another location, + // and the client should request it from that location via GET or HEAD. + opts.method = 'GET'; + } + + if (redirects.length >= 10) { + ee.emit('error', new got.MaxRedirectsError(statusCode, redirects, opts), null, res); + return; + } + + const bufferString = Buffer.from(res.headers.location, 'binary').toString(); + + redirectUrl = urlLib.resolve(urlLib.format(opts), bufferString); + + redirects.push(redirectUrl); + + const redirectOpts = Object.assign({}, opts, urlLib.parse(redirectUrl)); + + ee.emit('redirect', res, redirectOpts); + + get(redirectOpts); + + return; + } + + setImmediate(() => { + try { + getResponse(res, opts, ee, redirects); + } catch (e) { + ee.emit('error', e); + } + }); + }); + + cacheReq.on('error', err => { + if (err instanceof CacheableRequest.RequestError) { + ee.emit('error', new got.RequestError(err, opts)); + } else { + ee.emit('error', new got.CacheError(err, opts)); + } + }); + + cacheReq.once('request', req => { + let aborted = false; + req.once('abort', _ => { + aborted = true; + }); + + req.once('error', err => { + clearInterval(progressInterval); + + if (aborted) { + return; + } + + const backoff = opts.retries(++retryCount, err); + + if (backoff) { + setTimeout(get, backoff, opts); + return; + } + + ee.emit('error', new got.RequestError(err, opts)); + }); + + ee.once('request', req => { + ee.emit('uploadProgress', { + percent: 0, + transferred: 0, + total: uploadBodySize + }); + + const socket = req.connection; + if (socket) { + // `._connecting` was the old property which was made public in node v6.1.0 + const isConnecting = socket.connecting === undefined ? socket._connecting : socket.connecting; + + const onSocketConnect = () => { + const uploadEventFrequency = 150; + + progressInterval = setInterval(() => { + if (socket.destroyed) { + clearInterval(progressInterval); + return; + } + + const lastUploaded = uploaded; + const headersSize = req._header ? Buffer.byteLength(req._header) : 0; + uploaded = socket.bytesWritten - headersSize; + + // Prevent the known issue of `bytesWritten` being larger than body size + if (uploadBodySize && uploaded > uploadBodySize) { + uploaded = uploadBodySize; + } + + // Don't emit events with unchanged progress and + // prevent last event from being emitted, because + // it's emitted when `response` is emitted + if (uploaded === lastUploaded || uploaded === uploadBodySize) { + return; + } + + ee.emit('uploadProgress', { + percent: uploadBodySize ? uploaded / uploadBodySize : 0, + transferred: uploaded, + total: uploadBodySize + }); + }, uploadEventFrequency); + }; + + // Only subscribe to 'connect' event if we're actually connecting a new + // socket, otherwise if we're already connected (because this is a + // keep-alive connection) do not bother. This is important since we won't + // get a 'connect' event for an already connected socket. + if (isConnecting) { + socket.once('connect', onSocketConnect); + } else { + onSocketConnect(); + } + } + }); + + if (opts.gotTimeout) { + clearInterval(progressInterval); + timedOut(req, opts.gotTimeout); + } + + setImmediate(() => { + ee.emit('request', req); + }); + }); + }; + + setImmediate(() => { + Promise.resolve(getBodySize(opts)) + .then(size => { + uploadBodySize = size; + + if ( + is.undefined(opts.headers['content-length']) && + is.undefined(opts.headers['transfer-encoding']) && + isFormData(opts.body) + ) { + opts.headers['content-length'] = size; + } + + get(opts); + }) + .catch(err => { + ee.emit('error', err); + }); + }); + + return ee; +} + +function getResponse(res, opts, ee, redirects) { + const downloadBodySize = Number(res.headers['content-length']) || null; + let downloaded = 0; + + const progressStream = new Transform({ + transform(chunk, encoding, callback) { + downloaded += chunk.length; + + const percent = downloadBodySize ? downloaded / downloadBodySize : 0; + + // Let flush() be responsible for emitting the last event + if (percent < 1) { + ee.emit('downloadProgress', { + percent, + transferred: downloaded, + total: downloadBodySize + }); + } + + callback(null, chunk); + }, + + flush(callback) { + ee.emit('downloadProgress', { + percent: 1, + transferred: downloaded, + total: downloadBodySize + }); + + callback(); + } + }); + + mimicResponse(res, progressStream); + progressStream.redirectUrls = redirects; + + const response = opts.decompress === true && + is.function(decompressResponse) && + opts.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream; + + if (!opts.decompress && ['gzip', 'deflate'].indexOf(res.headers['content-encoding']) !== -1) { + opts.encoding = null; + } + + ee.emit('response', response); + + ee.emit('downloadProgress', { + percent: 0, + transferred: 0, + total: downloadBodySize + }); + + res.pipe(progressStream); +} + +function asPromise(opts) { + const timeoutFn = requestPromise => opts.gotTimeout && opts.gotTimeout.request ? + pTimeout(requestPromise, opts.gotTimeout.request, new got.RequestError({message: 'Request timed out', code: 'ETIMEDOUT'}, opts)) : + requestPromise; + + const proxy = new EventEmitter(); + + const cancelable = new PCancelable((resolve, reject, onCancel) => { + const ee = requestAsEventEmitter(opts); + let cancelOnRequest = false; + + onCancel(() => { + cancelOnRequest = true; + }); + + ee.on('request', req => { + if (cancelOnRequest) { + req.abort(); + } + + onCancel(() => { + req.abort(); + }); + + if (is.nodeStream(opts.body)) { + opts.body.pipe(req); + opts.body = undefined; + return; + } + + req.end(opts.body); + }); + + ee.on('response', res => { + const stream = is.null(opts.encoding) ? getStream.buffer(res) : getStream(res, opts); + + stream + .catch(err => reject(new got.ReadError(err, opts))) + .then(data => { + const statusCode = res.statusCode; + const limitStatusCode = opts.followRedirect ? 299 : 399; + + res.body = data; + + if (opts.json && res.body) { + try { + res.body = JSON.parse(res.body); + } catch (err) { + if (statusCode >= 200 && statusCode < 300) { + throw new got.ParseError(err, statusCode, opts, data); + } + } + } + + if (opts.throwHttpErrors && statusCode !== 304 && (statusCode < 200 || statusCode > limitStatusCode)) { + throw new got.HTTPError(statusCode, res.statusMessage, res.headers, opts); + } + + resolve(res); + }) + .catch(err => { + Object.defineProperty(err, 'response', {value: res}); + reject(err); + }); + }); + + ee.once('error', reject); + ee.on('redirect', proxy.emit.bind(proxy, 'redirect')); + ee.on('uploadProgress', proxy.emit.bind(proxy, 'uploadProgress')); + ee.on('downloadProgress', proxy.emit.bind(proxy, 'downloadProgress')); + }); + + // Preserve backwards-compatibility + // TODO: Remove this in the next major version + Object.defineProperty(cancelable, 'canceled', { + get() { + return cancelable.isCanceled; + } + }); + + const promise = timeoutFn(cancelable); + + promise.cancel = cancelable.cancel.bind(cancelable); + + promise.on = (name, fn) => { + proxy.on(name, fn); + return promise; + }; + + return promise; +} + +function asStream(opts) { + opts.stream = true; + + const input = new PassThrough(); + const output = new PassThrough(); + const proxy = duplexer3(input, output); + let timeout; + + if (opts.gotTimeout && opts.gotTimeout.request) { + timeout = setTimeout(() => { + proxy.emit('error', new got.RequestError({message: 'Request timed out', code: 'ETIMEDOUT'}, opts)); + }, opts.gotTimeout.request); + } + + if (opts.json) { + throw new Error('Got can not be used as a stream when the `json` option is used'); + } + + if (opts.body) { + proxy.write = () => { + throw new Error('Got\'s stream is not writable when the `body` option is used'); + }; + } + + const ee = requestAsEventEmitter(opts); + + ee.on('request', req => { + proxy.emit('request', req); + + if (is.nodeStream(opts.body)) { + opts.body.pipe(req); + return; + } + + if (opts.body) { + req.end(opts.body); + return; + } + + if (opts.method === 'POST' || opts.method === 'PUT' || opts.method === 'PATCH') { + input.pipe(req); + return; + } + + req.end(); + }); + + ee.on('response', res => { + clearTimeout(timeout); + + const statusCode = res.statusCode; + + res.on('error', err => { + proxy.emit('error', new got.ReadError(err, opts)); + }); + + res.pipe(output); + + if (opts.throwHttpErrors && statusCode !== 304 && (statusCode < 200 || statusCode > 299)) { + proxy.emit('error', new got.HTTPError(statusCode, res.statusMessage, res.headers, opts), null, res); + return; + } + + proxy.emit('response', res); + }); + + ee.on('error', proxy.emit.bind(proxy, 'error')); + ee.on('redirect', proxy.emit.bind(proxy, 'redirect')); + ee.on('uploadProgress', proxy.emit.bind(proxy, 'uploadProgress')); + ee.on('downloadProgress', proxy.emit.bind(proxy, 'downloadProgress')); + + return proxy; +} + +function normalizeArguments(url, opts) { + if (!is.string(url) && !is.object(url)) { + throw new TypeError(`Parameter \`url\` must be a string or object, not ${is(url)}`); + } else if (is.string(url)) { + url = url.replace(/^unix:/, 'http://$&'); + + try { + decodeURI(url); + } catch (err) { + throw new Error('Parameter `url` must contain valid UTF-8 character sequences'); + } + + url = urlParseLax(url); + if (url.auth) { + throw new Error('Basic authentication must be done with the `auth` option'); + } + } else if (isURL.lenient(url)) { + url = urlToOptions(url); + } + + opts = Object.assign( + { + path: '', + retries: 2, + cache: false, + decompress: true, + useElectronNet: false, + throwHttpErrors: true + }, + url, + { + protocol: url.protocol || 'http:' // Override both null/undefined with default protocol + }, + opts + ); + + const headers = lowercaseKeys(opts.headers); + for (const key of Object.keys(headers)) { + if (is.nullOrUndefined(headers[key])) { + delete headers[key]; + } + } + + opts.headers = Object.assign({ + 'user-agent': `${pkg.name}/${pkg.version} (https://github.com/sindresorhus/got)` + }, headers); + + if (opts.decompress && is.undefined(opts.headers['accept-encoding'])) { + opts.headers['accept-encoding'] = 'gzip, deflate'; + } + + const query = opts.query; + + if (query) { + if (!is.string(query)) { + opts.query = querystring.stringify(query); + } + + opts.path = `${opts.path.split('?')[0]}?${opts.query}`; + delete opts.query; + } + + if (opts.json && is.undefined(opts.headers.accept)) { + opts.headers.accept = 'application/json'; + } + + const body = opts.body; + if (is.nullOrUndefined(body)) { + opts.method = (opts.method || 'GET').toUpperCase(); + } else { + const headers = opts.headers; + if (!is.nodeStream(body) && !is.string(body) && !is.buffer(body) && !(opts.form || opts.json)) { + throw new TypeError('The `body` option must be a stream.Readable, string, Buffer or plain Object'); + } + + const canBodyBeStringified = is.plainObject(body) || is.array(body); + if ((opts.form || opts.json) && !canBodyBeStringified) { + throw new TypeError('The `body` option must be a plain Object or Array when the `form` or `json` option is used'); + } + + if (isFormData(body)) { + // Special case for https://github.com/form-data/form-data + headers['content-type'] = headers['content-type'] || `multipart/form-data; boundary=${body.getBoundary()}`; + } else if (opts.form && canBodyBeStringified) { + headers['content-type'] = headers['content-type'] || 'application/x-www-form-urlencoded'; + opts.body = querystring.stringify(body); + } else if (opts.json && canBodyBeStringified) { + headers['content-type'] = headers['content-type'] || 'application/json'; + opts.body = JSON.stringify(body); + } + + if (is.undefined(headers['content-length']) && is.undefined(headers['transfer-encoding']) && !is.nodeStream(body)) { + const length = is.string(opts.body) ? Buffer.byteLength(opts.body) : opts.body.length; + headers['content-length'] = length; + } + + // Convert buffer to stream to receive upload progress events + // see https://github.com/sindresorhus/got/pull/322 + if (is.buffer(body)) { + opts.body = intoStream(body); + opts.body._buffer = body; + } + + opts.method = (opts.method || 'POST').toUpperCase(); + } + + if (opts.hostname === 'unix') { + const matches = /(.+?):(.+)/.exec(opts.path); + + if (matches) { + opts.socketPath = matches[1]; + opts.path = matches[2]; + opts.host = null; + } + } + + if (!is.function(opts.retries)) { + const retries = opts.retries; + + opts.retries = (iter, err) => { + if (iter > retries || !isRetryAllowed(err)) { + return 0; + } + + const noise = Math.random() * 100; + + return ((1 << iter) * 1000) + noise; + }; + } + + if (is.undefined(opts.followRedirect)) { + opts.followRedirect = true; + } + + if (opts.timeout) { + if (is.number(opts.timeout)) { + opts.gotTimeout = {request: opts.timeout}; + } else { + opts.gotTimeout = opts.timeout; + } + delete opts.timeout; + } + + return opts; +} + +function got(url, opts) { + try { + const normalizedArgs = normalizeArguments(url, opts); + + if (normalizedArgs.stream) { + return asStream(normalizedArgs); + } + + return asPromise(normalizedArgs); + } catch (err) { + return Promise.reject(err); + } +} + +got.stream = (url, opts) => asStream(normalizeArguments(url, opts)); + +const methods = [ + 'get', + 'post', + 'put', + 'patch', + 'head', + 'delete' +]; + +for (const method of methods) { + got[method] = (url, opts) => got(url, Object.assign({}, opts, {method})); + got.stream[method] = (url, opts) => got.stream(url, Object.assign({}, opts, {method})); +} + +Object.assign(got, errors); + +module.exports = got; diff --git a/node_modules/bin-wrapper/node_modules/got/license b/node_modules/bin-wrapper/node_modules/got/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/got/node_modules/pify/index.js b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/index.js new file mode 100644 index 00000000..1dee43ad --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/index.js @@ -0,0 +1,84 @@ +'use strict'; + +const processFn = (fn, opts) => function () { + const P = opts.promiseModule; + const args = new Array(arguments.length); + + for (let i = 0; i < arguments.length; i++) { + args[i] = arguments[i]; + } + + return new P((resolve, reject) => { + if (opts.errorFirst) { + args.push(function (err, result) { + if (opts.multiArgs) { + const results = new Array(arguments.length - 1); + + for (let i = 1; i < arguments.length; i++) { + results[i - 1] = arguments[i]; + } + + if (err) { + results.unshift(err); + reject(results); + } else { + resolve(results); + } + } else if (err) { + reject(err); + } else { + resolve(result); + } + }); + } else { + args.push(function (result) { + if (opts.multiArgs) { + const results = new Array(arguments.length - 1); + + for (let i = 0; i < arguments.length; i++) { + results[i] = arguments[i]; + } + + resolve(results); + } else { + resolve(result); + } + }); + } + + fn.apply(this, args); + }); +}; + +module.exports = (obj, opts) => { + opts = Object.assign({ + exclude: [/.+(Sync|Stream)$/], + errorFirst: true, + promiseModule: Promise + }, opts); + + const filter = key => { + const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key); + return opts.include ? opts.include.some(match) : !opts.exclude.some(match); + }; + + let ret; + if (typeof obj === 'function') { + ret = function () { + if (opts.excludeMain) { + return obj.apply(this, arguments); + } + + return processFn(obj, opts).apply(this, arguments); + }; + } else { + ret = Object.create(Object.getPrototypeOf(obj)); + } + + for (const key in obj) { // eslint-disable-line guard-for-in + const x = obj[key]; + ret[key] = typeof x === 'function' && filter(key) ? processFn(x, opts) : x; + } + + return ret; +}; diff --git a/node_modules/bin-wrapper/node_modules/got/node_modules/pify/license b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/got/node_modules/pify/package.json b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/package.json new file mode 100644 index 00000000..468d8576 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/package.json @@ -0,0 +1,51 @@ +{ + "name": "pify", + "version": "3.0.0", + "description": "Promisify a callback-style function", + "license": "MIT", + "repository": "sindresorhus/pify", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava && npm run optimization-test", + "optimization-test": "node --allow-natives-syntax optimization-test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "promises", + "promisify", + "all", + "denodify", + "denodeify", + "callback", + "cb", + "node", + "then", + "thenify", + "convert", + "transform", + "wrap", + "wrapper", + "bind", + "to", + "async", + "await", + "es2015", + "bluebird" + ], + "devDependencies": { + "ava": "*", + "pinkie-promise": "^2.0.0", + "v8-natives": "^1.0.0", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/node_modules/got/node_modules/pify/readme.md b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/readme.md new file mode 100644 index 00000000..376ca4e5 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/node_modules/pify/readme.md @@ -0,0 +1,131 @@ +# pify [![Build Status](https://travis-ci.org/sindresorhus/pify.svg?branch=master)](https://travis-ci.org/sindresorhus/pify) + +> Promisify a callback-style function + + +## Install + +``` +$ npm install --save pify +``` + + +## Usage + +```js +const fs = require('fs'); +const pify = require('pify'); + +// Promisify a single function +pify(fs.readFile)('package.json', 'utf8').then(data => { + console.log(JSON.parse(data).name); + //=> 'pify' +}); + +// Promisify all methods in a module +pify(fs).readFile('package.json', 'utf8').then(data => { + console.log(JSON.parse(data).name); + //=> 'pify' +}); +``` + + +## API + +### pify(input, [options]) + +Returns a `Promise` wrapped version of the supplied function or module. + +#### input + +Type: `Function` `Object` + +Callback-style function or module whose methods you want to promisify. + +#### options + +##### multiArgs + +Type: `boolean`
+Default: `false` + +By default, the promisified function will only return the second argument from the callback, which works fine for most APIs. This option can be useful for modules like `request` that return multiple arguments. Turning this on will make it return an array of all arguments from the callback, excluding the error argument, instead of just the second argument. This also applies to rejections, where it returns an array of all the callback arguments, including the error. + +```js +const request = require('request'); +const pify = require('pify'); + +pify(request, {multiArgs: true})('https://sindresorhus.com').then(result => { + const [httpResponse, body] = result; +}); +``` + +##### include + +Type: `string[]` `RegExp[]` + +Methods in a module to promisify. Remaining methods will be left untouched. + +##### exclude + +Type: `string[]` `RegExp[]`
+Default: `[/.+(Sync|Stream)$/]` + +Methods in a module **not** to promisify. Methods with names ending with `'Sync'` are excluded by default. + +##### excludeMain + +Type: `boolean`
+Default: `false` + +If given module is a function itself, it will be promisified. Turn this option on if you want to promisify only methods of the module. + +```js +const pify = require('pify'); + +function fn() { + return true; +} + +fn.method = (data, callback) => { + setImmediate(() => { + callback(null, data); + }); +}; + +// Promisify methods but not `fn()` +const promiseFn = pify(fn, {excludeMain: true}); + +if (promiseFn()) { + promiseFn.method('hi').then(data => { + console.log(data); + }); +} +``` + +##### errorFirst + +Type: `boolean`
+Default: `true` + +Whether the callback has an error as the first argument. You'll want to set this to `false` if you're dealing with an API that doesn't have an error as the first argument, like `fs.exists()`, some browser APIs, Chrome Extension APIs, etc. + +##### promiseModule + +Type: `Function` + +Custom promise module to use instead of the native one. + +Check out [`pinkie-promise`](https://github.com/floatdrop/pinkie-promise) if you need a tiny promise polyfill. + + +## Related + +- [p-event](https://github.com/sindresorhus/p-event) - Promisify an event by waiting for it to be emitted +- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/node_modules/got/package.json b/node_modules/bin-wrapper/node_modules/got/package.json new file mode 100644 index 00000000..78f94a8d --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/package.json @@ -0,0 +1,95 @@ +{ + "name": "got", + "version": "8.3.2", + "description": "Simplified HTTP requests", + "license": "MIT", + "repository": "sindresorhus/got", + "maintainers": [ + { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + { + "name": "Vsevolod Strukchinsky", + "email": "floatdrop@gmail.com", + "url": "github.com/floatdrop" + }, + { + "name": "Alexander Tesfamichael", + "email": "alex.tesfamichael@gmail.com", + "url": "alextes.me" + } + ], + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && nyc ava", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "files": [ + "index.js", + "errors.js" + ], + "keywords": [ + "http", + "https", + "get", + "got", + "url", + "uri", + "request", + "util", + "utility", + "simple", + "curl", + "wget", + "fetch", + "net", + "network", + "electron" + ], + "dependencies": { + "@sindresorhus/is": "^0.7.0", + "cacheable-request": "^2.1.1", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "into-stream": "^3.1.0", + "is-retry-allowed": "^1.1.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "mimic-response": "^1.0.0", + "p-cancelable": "^0.4.0", + "p-timeout": "^2.0.1", + "pify": "^3.0.0", + "safe-buffer": "^5.1.1", + "timed-out": "^4.0.1", + "url-parse-lax": "^3.0.0", + "url-to-options": "^1.0.1" + }, + "devDependencies": { + "ava": "^0.25.0", + "coveralls": "^3.0.0", + "form-data": "^2.1.1", + "get-port": "^3.0.0", + "nyc": "^11.0.2", + "p-event": "^1.3.0", + "pem": "^1.4.4", + "proxyquire": "^1.8.0", + "sinon": "^4.0.0", + "slow-stream": "0.0.4", + "tempfile": "^2.0.0", + "tempy": "^0.2.1", + "universal-url": "1.0.0-alpha", + "xo": "^0.20.0" + }, + "ava": { + "concurrency": 4 + }, + "browser": { + "decompress-response": false, + "electron": false + } +} diff --git a/node_modules/bin-wrapper/node_modules/got/readme.md b/node_modules/bin-wrapper/node_modules/got/readme.md new file mode 100644 index 00000000..2347077e --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/got/readme.md @@ -0,0 +1,650 @@ +
+
+
+ Got +
+
+
+

Huge thanks to for sponsoring me! +

+
+
+
+ +> Simplified HTTP requests + +[![Build Status](https://travis-ci.org/sindresorhus/got.svg?branch=master)](https://travis-ci.org/sindresorhus/got) [![Coverage Status](https://coveralls.io/repos/github/sindresorhus/got/badge.svg?branch=master)](https://coveralls.io/github/sindresorhus/got?branch=master) [![Downloads](https://img.shields.io/npm/dm/got.svg)](https://npmjs.com/got) + +A nicer interface to the built-in [`http`](http://nodejs.org/api/http.html) module. + +Created because [`request`](https://github.com/request/request) is bloated *(several megabytes!)*. + + +## Highlights + +- [Promise & stream API](#api) +- [Request cancelation](#aborting-the-request) +- [RFC compliant caching](#cache-adapters) +- [Follows redirects](#followredirect) +- [Retries on network failure](#retries) +- [Progress events](#onuploadprogress-progress) +- [Handles gzip/deflate](#decompress) +- [Timeout handling](#timeout) +- [Errors with metadata](#errors) +- [JSON mode](#json) +- [WHATWG URL support](#url) +- [Electron support](#useelectronnet) + + +## Install + +``` +$ npm install got +``` + + + + + + +## Usage + +```js +const got = require('got'); + +(async () => { + try { + const response = await got('sindresorhus.com'); + console.log(response.body); + //=> ' ...' + } catch (error) { + console.log(error.response.body); + //=> 'Internal server error ...' + } +})(); +``` + +###### Streams + +```js +const fs = require('fs'); +const got = require('got'); + +got.stream('sindresorhus.com').pipe(fs.createWriteStream('index.html')); + +// For POST, PUT, and PATCH methods `got.stream` returns a `stream.Writable` +fs.createReadStream('index.html').pipe(got.stream.post('sindresorhus.com')); +``` + + +### API + +It's a `GET` request by default, but can be changed by using different methods or in the `options`. + +#### got(url, [options]) + +Returns a Promise for a `response` object with a `body` property, a `url` property with the request URL or the final URL after redirects, and a `requestUrl` property with the original request URL. + +The response object will normally be a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage), however if returned from the cache it will be a [responselike object](https://github.com/lukechilds/responselike) which behaves in the same way. + +The response will also have a `fromCache` property set with a boolean value. + +##### url + +Type: `string` `Object` + +The URL to request as simple string, a [`http.request` options](https://nodejs.org/api/http.html#http_http_request_options_callback), or a [WHATWG `URL`](https://nodejs.org/api/url.html#url_class_url). + +Properties from `options` will override properties in the parsed `url`. + +If no protocol is specified, it will default to `https`. + +##### options + +Type: `Object` + +Any of the [`http.request`](http://nodejs.org/api/http.html#http_http_request_options_callback) options. + +###### stream + +Type: `boolean`
+Default: `false` + +Returns a `Stream` instead of a `Promise`. This is equivalent to calling `got.stream(url, [options])`. + +###### body + +Type: `string` `Buffer` `stream.Readable` + +*This is mutually exclusive with stream mode.* + +Body that will be sent with a `POST` request. + +If present in `options` and `options.method` is not set, `options.method` will be set to `POST`. + +If `content-length` or `transfer-encoding` is not set in `options.headers` and `body` is a string or buffer, `content-length` will be set to the body length. + +###### encoding + +Type: `string` `null`
+Default: `'utf8'` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings) to be used on `setEncoding` of the response data. If `null`, the body is returned as a [`Buffer`](https://nodejs.org/api/buffer.html) (binary data). + +###### form + +Type: `boolean`
+Default: `false` + +*This is mutually exclusive with stream mode.* + +If set to `true` and `Content-Type` header is not set, it will be set to `application/x-www-form-urlencoded`. + +`body` must be a plain object or array and will be stringified. + +###### json + +Type: `boolean`
+Default: `false` + +*This is mutually exclusive with stream mode.* + +If set to `true` and `Content-Type` header is not set, it will be set to `application/json`. + +Parse response body with `JSON.parse` and set `accept` header to `application/json`. If used in conjunction with the `form` option, the `body` will the stringified as querystring and the response parsed as JSON. + +`body` must be a plain object or array and will be stringified. + +###### query + +Type: `string` `Object`
+ +Query string object that will be added to the request URL. This will override the query string in `url`. + +###### timeout + +Type: `number` `Object` + +Milliseconds to wait for the server to end the response before aborting request with `ETIMEDOUT` error. + +This also accepts an object with separate `connect`, `socket`, and `request` fields for connection, socket, and entire request timeouts. + +###### retries + +Type: `number` `Function`
+Default: `2` + +Number of request retries when network errors happens. Delays between retries counts with function `1000 * Math.pow(2, retry) + Math.random() * 100`, where `retry` is attempt number (starts from 0). + +Option accepts `function` with `retry` and `error` arguments. Function must return delay in milliseconds (`0` return value cancels retry). + +**Note:** if `retries` is `number`, `ENOTFOUND` and `ENETUNREACH` error will not be retried (see full list in [`is-retry-allowed`](https://github.com/floatdrop/is-retry-allowed/blob/master/index.js#L12) module). + +###### followRedirect + +Type: `boolean`
+Default: `true` + +Defines if redirect responses should be followed automatically. + +Note that if a `303` is sent by the server in response to any request type (`POST`, `DELETE`, etc.), got will automatically +request the resource pointed to in the location header via `GET`. This is in accordance with [the spec](https://tools.ietf.org/html/rfc7231#section-6.4.4). + +###### decompress + +Type: `boolean`
+Default: `true` + +Decompress the response automatically. This will set the `accept-encoding` header to `gzip, deflate` unless you set it yourself. + +If this is disabled, a compressed response is returned as a `Buffer`. This may be useful if you want to handle decompression yourself or stream the raw compressed data. + +###### cache + +Type: `Object`
+Default: `false` + +[Cache adapter instance](#cache-adapters) for storing cached data. + +###### useElectronNet + +Type: `boolean`
+Default: `false` + +When used in Electron, Got will use [`electron.net`](https://electronjs.org/docs/api/net/) instead of the Node.js `http` module. According to the Electron docs, it should be fully compatible, but it's not entirely. See [#315](https://github.com/sindresorhus/got/issues/315). + +###### throwHttpErrors + +Type: `boolean`
+Default: `true` + +Determines if a `got.HTTPError` is thrown for error responses (non-2xx status codes). + +If this is disabled, requests that encounter an error status code will be resolved with the `response` instead of throwing. This may be useful if you are checking for resource availability and are expecting error responses. + +#### Streams + +#### got.stream(url, [options]) + +`stream` method will return Duplex stream with additional events: + +##### .on('request', request) + +`request` event to get the request object of the request. + +**Tip**: You can use `request` event to abort request: + +```js +got.stream('github.com') + .on('request', req => setTimeout(() => req.abort(), 50)); +``` + +##### .on('response', response) + +`response` event to get the response object of the final request. + +##### .on('redirect', response, nextOptions) + +`redirect` event to get the response object of a redirect. The second argument is options for the next request to the redirect location. + +##### .on('uploadProgress', progress) +##### .on('downloadProgress', progress) + +Progress events for uploading (sending request) and downloading (receiving response). The `progress` argument is an object like: + +```js +{ + percent: 0.1, + transferred: 1024, + total: 10240 +} +``` + +If it's not possible to retrieve the body size (can happen when streaming), `total` will be `null`. + +**Note**: Progress events can also be used with promises. + +```js +(async () => { + const response = await got('sindresorhus.com') + .on('downloadProgress', progress => { + // Report download progress + }) + .on('uploadProgress', progress => { + // Report upload progress + }); + + console.log(response); +})(); +``` + +##### .on('error', error, body, response) + +`error` event emitted in case of protocol error (like `ENOTFOUND` etc.) or status error (4xx or 5xx). The second argument is the body of the server response in case of status error. The third argument is response object. + +#### got.get(url, [options]) +#### got.post(url, [options]) +#### got.put(url, [options]) +#### got.patch(url, [options]) +#### got.head(url, [options]) +#### got.delete(url, [options]) + +Sets `options.method` to the method name and makes a request. + + +## Errors + +Each error contains (if available) `statusCode`, `statusMessage`, `host`, `hostname`, `method`, `path`, `protocol` and `url` properties to make debugging easier. + +In Promise mode, the `response` is attached to the error. + +#### got.CacheError + +When a cache method fails, for example if the database goes down, or there's a filesystem error. + +#### got.RequestError + +When a request fails. Contains a `code` property with error class code, like `ECONNREFUSED`. + +#### got.ReadError + +When reading from response stream fails. + +#### got.ParseError + +When `json` option is enabled, server response code is 2xx, and `JSON.parse` fails. + +#### got.HTTPError + +When server response code is not 2xx. Includes `statusCode`, `statusMessage`, and `redirectUrls` properties. + +#### got.MaxRedirectsError + +When server redirects you more than 10 times. Includes a `redirectUrls` property, which is an array of the URLs Got was redirected to before giving up. + +#### got.UnsupportedProtocolError + +When given an unsupported protocol. + +#### got.CancelError + +When the request is aborted with `.cancel()`. + + +## Aborting the request + +The promise returned by Got has a [`.cancel()`](https://github.com/sindresorhus/p-cancelable) method which, when called, aborts the request. + +```js +(async () => { + const request = got(url, options); + + … + + // In another part of the code + if (something) { + request.cancel(); + } + + … + + try { + await request; + } catch (error) { + if (request.isCanceled) { // Or `error instanceof got.CancelError` + // Handle cancelation + } + + // Handle other errors + } +})(); +``` + + +## Cache + +Got implements [RFC 7234](http://httpwg.org/specs/rfc7234.html) compliant HTTP caching which works out of the box in memory or is easily pluggable with a wide range of storage adapters. Fresh cache entries are served directly from cache and stale cache entries are revalidated with `If-None-Match`/`If-Modified-Since` headers. You can read more about the underlying cache behaviour in the `cacheable-request` [documentation](https://github.com/lukechilds/cacheable-request). + +You can use the JavaScript `Map` type as an in memory cache: + +```js +const got = require('got'); +const map = new Map(); + +(async () => { + let response = await got('sindresorhus.com', {cache: map}); + console.log(response.fromCache); + //=> false + + response = await got('sindresorhus.com', {cache: map}); + console.log(response.fromCache); + //=> true +})(); +``` + +Got uses [Keyv](https://github.com/lukechilds/keyv) internally to support a wide range of storage adapters. For something more scalable you could use an [official Keyv storage adapter](https://github.com/lukechilds/keyv#official-storage-adapters): + +``` +$ npm install @keyv/redis +``` + +```js +const got = require('got'); +const KeyvRedis = require('@keyv/redis'); + +const redis = new KeyvRedis('redis://user:pass@localhost:6379'); + +got('sindresorhus.com', {cache: redis}); +``` + +Got supports anything that follows the Map API, so it's easy to write your own storage adapter or use a third-party solution. + +For example, the following are all valid storage adapters: + +```js +const storageAdapter = new Map(); +// or +const storageAdapter = require('./my-storage-adapter'); +// or +const QuickLRU = require('quick-lru'); +const storageAdapter = new QuickLRU({maxSize: 1000}); + +got('sindresorhus.com', {cache: storageAdapter}); +``` + +View the [Keyv docs](https://github.com/lukechilds/keyv) for more information on how to use storage adapters. + + +## Proxies + +You can use the [`tunnel`](https://github.com/koichik/node-tunnel) module with the `agent` option to work with proxies: + +```js +const got = require('got'); +const tunnel = require('tunnel'); + +got('sindresorhus.com', { + agent: tunnel.httpOverHttp({ + proxy: { + host: 'localhost' + } + }) +}); +``` + +If you require different agents for different protocols, you can pass a map of agents to the `agent` option. This is necessary because a request to one protocol might redirect to another. In such a scenario, `got` will switch over to the right protocol agent for you. + +```js +const got = require('got'); +const HttpAgent = require('agentkeepalive'); +const HttpsAgent = HttpAgent.HttpsAgent; + +got('sindresorhus.com', { + agent: { + http: new HttpAgent(), + https: new HttpsAgent() + } +}); +``` + + +## Cookies + +You can use the [`cookie`](https://github.com/jshttp/cookie) module to include cookies in a request: + +```js +const got = require('got'); +const cookie = require('cookie'); + +got('google.com', { + headers: { + cookie: cookie.serialize('foo', 'bar') + } +}); +``` + + +## Form data + +You can use the [`form-data`](https://github.com/form-data/form-data) module to create POST request with form data: + +```js +const fs = require('fs'); +const got = require('got'); +const FormData = require('form-data'); +const form = new FormData(); + +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); + +got.post('google.com', { + body: form +}); +``` + + +## OAuth + +You can use the [`oauth-1.0a`](https://github.com/ddo/oauth-1.0a) module to create a signed OAuth request: + +```js +const got = require('got'); +const crypto = require('crypto'); +const OAuth = require('oauth-1.0a'); + +const oauth = OAuth({ + consumer: { + key: process.env.CONSUMER_KEY, + secret: process.env.CONSUMER_SECRET + }, + signature_method: 'HMAC-SHA1', + hash_function: (baseString, key) => crypto.createHmac('sha1', key).update(baseString).digest('base64') +}); + +const token = { + key: process.env.ACCESS_TOKEN, + secret: process.env.ACCESS_TOKEN_SECRET +}; + +const url = 'https://api.twitter.com/1.1/statuses/home_timeline.json'; + +got(url, { + headers: oauth.toHeader(oauth.authorize({url, method: 'GET'}, token)), + json: true +}); +``` + + +## Unix Domain Sockets + +Requests can also be sent via [unix domain sockets](http://serverfault.com/questions/124517/whats-the-difference-between-unix-socket-and-tcp-ip-socket). Use the following URL scheme: `PROTOCOL://unix:SOCKET:PATH`. + +- `PROTOCOL` - `http` or `https` *(optional)* +- `SOCKET` - absolute path to a unix domain socket, e.g. `/var/run/docker.sock` +- `PATH` - request path, e.g. `/v2/keys` + +```js +got('http://unix:/var/run/docker.sock:/containers/json'); + +// or without protocol (http by default) +got('unix:/var/run/docker.sock:/containers/json'); +``` + +## AWS + +Requests to AWS services need to have their headers signed. This can be accomplished by using the [`aws4`](https://www.npmjs.com/package/aws4) package. This is an example for querying an ["Elasticsearch Service"](https://aws.amazon.com/elasticsearch-service/) host with a signed request. + +```js +const url = require('url'); +const AWS = require('aws-sdk'); +const aws4 = require('aws4'); +const got = require('got'); +const config = require('./config'); + +// Reads keys from the environment or `~/.aws/credentials`. Could be a plain object. +const awsConfig = new AWS.Config({ region: config.region }); + +function request(uri, options) { + const awsOpts = { + region: awsConfig.region, + headers: { + accept: 'application/json', + 'content-type': 'application/json' + }, + method: 'GET', + json: true + }; + + // We need to parse the URL before passing it to `got` so `aws4` can sign the request + const opts = Object.assign(url.parse(uri), awsOpts, options); + aws4.sign(opts, awsConfig.credentials); + + return got(opts); +} + +request(`https://${config.host}/production/users/1`); + +request(`https://${config.host}/production/`, { + // All usual `got` options +}); +``` + + +## Testing + +You can test your requests by using the [`nock`](https://github.com/node-nock/nock) module to mock an endpoint: + +```js +const got = require('got'); +const nock = require('nock'); + +nock('https://sindresorhus.com') + .get('/') + .reply(200, 'Hello world!'); + +(async () => { + const response = await got('sindresorhus.com'); + console.log(response.body); + //=> 'Hello world!' +})(); +``` + +If you need real integration tests you can use [`create-test-server`](https://github.com/lukechilds/create-test-server): + +```js +const got = require('got'); +const createTestServer = require('create-test-server'); + +(async () => { + const server = await createTestServer(); + server.get('/', 'Hello world!'); + + const response = await got(server.url); + console.log(response.body); + //=> 'Hello world!' + + await server.close(); +})(); +``` + + +## Tips + +### User Agent + +It's a good idea to set the `'user-agent'` header so the provider can more easily see how their resource is used. By default, it's the URL to this repo. + +```js +const got = require('got'); +const pkg = require('./package.json'); + +got('sindresorhus.com', { + headers: { + 'user-agent': `my-module/${pkg.version} (https://github.com/username/my-module)` + } +}); +``` + +### 304 Responses + +Bear in mind, if you send an `if-modified-since` header and receive a `304 Not Modified` response, the body will be empty. It's your responsibility to cache and retrieve the body contents. + + +## Related + +- [gh-got](https://github.com/sindresorhus/gh-got) - Got convenience wrapper to interact with the GitHub API +- [gl-got](https://github.com/singapore/gl-got) - Got convenience wrapper to interact with the GitLab API +- [travis-got](https://github.com/samverschueren/travis-got) - Got convenience wrapper to interact with the Travis API +- [graphql-got](https://github.com/kevva/graphql-got) - Got convenience wrapper to interact with GraphQL +- [GotQL](https://github.com/khaosdoctor/gotql) - Got convenience wrapper to interact with GraphQL using JSON-parsed queries instead of strings + + +## Created by + +[![Sindre Sorhus](https://github.com/sindresorhus.png?size=100)](https://sindresorhus.com) | [![Vsevolod Strukchinsky](https://github.com/floatdrop.png?size=100)](https://github.com/floatdrop) | [![Alexander Tesfamichael](https://github.com/AlexTes.png?size=100)](https://github.com/AlexTes) | [![Luke Childs](https://github.com/lukechilds.png?size=100)](https://github.com/lukechilds) +---|---|---|--- +[Sindre Sorhus](https://sindresorhus.com) | [Vsevolod Strukchinsky](https://github.com/floatdrop) | [Alexander Tesfamichael](https://alextes.me) | [Luke Childs](https://github.com/lukechilds) + + +## License + +MIT diff --git a/node_modules/bin-wrapper/node_modules/p-cancelable/index.js b/node_modules/bin-wrapper/node_modules/p-cancelable/index.js new file mode 100644 index 00000000..cdd0cfa8 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-cancelable/index.js @@ -0,0 +1,88 @@ +'use strict'; + +class CancelError extends Error { + constructor() { + super('Promise was canceled'); + this.name = 'CancelError'; + } + + get isCanceled() { + return true; + } +} + +class PCancelable { + static fn(userFn) { + return function () { + const args = [].slice.apply(arguments); + return new PCancelable((resolve, reject, onCancel) => { + args.push(onCancel); + userFn.apply(null, args).then(resolve, reject); + }); + }; + } + + constructor(executor) { + this._cancelHandlers = []; + this._isPending = true; + this._isCanceled = false; + + this._promise = new Promise((resolve, reject) => { + this._reject = reject; + + return executor( + value => { + this._isPending = false; + resolve(value); + }, + error => { + this._isPending = false; + reject(error); + }, + handler => { + this._cancelHandlers.push(handler); + } + ); + }); + } + + then(onFulfilled, onRejected) { + return this._promise.then(onFulfilled, onRejected); + } + + catch(onRejected) { + return this._promise.catch(onRejected); + } + + finally(onFinally) { + return this._promise.finally(onFinally); + } + + cancel() { + if (!this._isPending || this._isCanceled) { + return; + } + + if (this._cancelHandlers.length > 0) { + try { + for (const handler of this._cancelHandlers) { + handler(); + } + } catch (err) { + this._reject(err); + } + } + + this._isCanceled = true; + this._reject(new CancelError()); + } + + get isCanceled() { + return this._isCanceled; + } +} + +Object.setPrototypeOf(PCancelable.prototype, Promise.prototype); + +module.exports = PCancelable; +module.exports.CancelError = CancelError; diff --git a/node_modules/bin-wrapper/node_modules/p-cancelable/license b/node_modules/bin-wrapper/node_modules/p-cancelable/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-cancelable/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/p-cancelable/package.json b/node_modules/bin-wrapper/node_modules/p-cancelable/package.json new file mode 100644 index 00000000..c17dff85 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-cancelable/package.json @@ -0,0 +1,47 @@ +{ + "name": "p-cancelable", + "version": "0.4.1", + "description": "Create a promise that can be canceled", + "license": "MIT", + "repository": "sindresorhus/p-cancelable", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "cancelable", + "cancel", + "canceled", + "canceling", + "cancellable", + "cancellation", + "abort", + "abortable", + "aborting", + "cleanup", + "task", + "token", + "async", + "function", + "await", + "promises", + "bluebird" + ], + "devDependencies": { + "ava": "*", + "delay": "^2.0.0", + "promise.prototype.finally": "^3.1.0", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/node_modules/p-cancelable/readme.md b/node_modules/bin-wrapper/node_modules/p-cancelable/readme.md new file mode 100644 index 00000000..62ec32ee --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-cancelable/readme.md @@ -0,0 +1,135 @@ +# p-cancelable [![Build Status](https://travis-ci.org/sindresorhus/p-cancelable.svg?branch=master)](https://travis-ci.org/sindresorhus/p-cancelable) + +> Create a promise that can be canceled + +Useful for animation, loading resources, long-running async computations, async iteration, etc. + + +## Install + +``` +$ npm install p-cancelable +``` + + +## Usage + +```js +const PCancelable = require('p-cancelable'); + +const cancelablePromise = new PCancelable((resolve, reject, onCancel) => { + const worker = new SomeLongRunningOperation(); + + onCancel(() => { + worker.close(); + }); + + worker.on('finish', resolve); + worker.on('error', reject); +}); + +cancelablePromise + .then(value => { + console.log('Operation finished successfully:', value); + }) + .catch(error => { + if (cancelablePromise.isCanceled) { + // Handle the cancelation here + console.log('Operation was canceled'); + return; + } + + throw error; + }); + +// Cancel the operation after 10 seconds +setTimeout(() => { + cancelablePromise.cancel(); +}, 10000); +``` + + +## API + +### new PCancelable(executor) + +Same as the [`Promise` constructor](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise), but with an appended `onCancel` parameter in `executor`. + +`PCancelable` is a subclass of `Promise`. + +#### onCanceled(fn) + +Type: `Function` + +Accepts a function that is called when the promise is canceled. + +You're not required to call this function. You can call this function multiple times to add multiple cancel handlers. + +### PCancelable#cancel() + +Type: `Function` + +Cancel the promise. + +The cancellation is synchronous. Calling it after the promise has settled or multiple times does nothing. + +### PCancelable#isCanceled + +Type: `boolean` + +Whether the promise is canceled. + +### PCancelable.CancelError + +Type: `Error` + +Rejection reason when `.cancel()` is called. + +It includes a `.isCanceled` property for convenience. + +### PCancelable.fn(fn) + +Convenience method to make your promise-returning or async function cancelable. + +The function you specify will have `onCancel` appended to its parameters. + +```js +const fn = PCancelable.fn((input, onCancel) => { + const job = new Job(); + + onCancel(() => { + job.cleanup(); + }); + + return job.start(); //=> Promise +}); + +const promise = fn('input'); //=> PCancelable + +// … + +promise.cancel(); +``` + + +## FAQ + +### Cancelable vs. Cancellable + +[In American English, the verb cancel is usually inflected canceled and canceling—with one l.](http://grammarist.com/spelling/cancel/)
Both a [browser API](https://developer.mozilla.org/en-US/docs/Web/API/Event/cancelable) and the [Cancelable Promises proposal](https://github.com/tc39/proposal-cancelable-promises) use this spelling. + +### What about the official [Cancelable Promises proposal](https://github.com/tc39/proposal-cancelable-promises)? + +~~It's still an early draft and I don't really like its current direction. It complicates everything and will require deep changes in the ecosystem to adapt to it. And the way you have to use cancel tokens is verbose and convoluted. I much prefer the more pragmatic and less invasive approach in this module.~~ The proposal was withdrawn. + + +## Related + +- [p-progress](https://github.com/sindresorhus/p-progress) - Create a promise that reports progress +- [p-lazy](https://github.com/sindresorhus/p-lazy) - Create a lazy promise that defers execution until `.then()` or `.catch()` is called +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/node_modules/p-event/index.js b/node_modules/bin-wrapper/node_modules/p-event/index.js new file mode 100644 index 00000000..23ee4211 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-event/index.js @@ -0,0 +1,272 @@ +'use strict'; +const pTimeout = require('p-timeout'); + +const symbolAsyncIterator = Symbol.asyncIterator || '@@asyncIterator'; + +const normalizeEmitter = emitter => { + const addListener = emitter.on || emitter.addListener || emitter.addEventListener; + const removeListener = emitter.off || emitter.removeListener || emitter.removeEventListener; + + if (!addListener || !removeListener) { + throw new TypeError('Emitter is not compatible'); + } + + return { + addListener: addListener.bind(emitter), + removeListener: removeListener.bind(emitter) + }; +}; + +const normalizeEvents = event => Array.isArray(event) ? event : [event]; + +const multiple = (emitter, event, options) => { + let cancel; + const ret = new Promise((resolve, reject) => { + options = Object.assign({ + rejectionEvents: ['error'], + multiArgs: false, + resolveImmediately: false + }, options); + + if (!(options.count >= 0 && (options.count === Infinity || Number.isInteger(options.count)))) { + throw new TypeError('The `count` option should be at least 0 or more'); + } + + // Allow multiple events + const events = normalizeEvents(event); + + const items = []; + const {addListener, removeListener} = normalizeEmitter(emitter); + + const onItem = (...args) => { + const value = options.multiArgs ? args : args[0]; + + if (options.filter && !options.filter(value)) { + return; + } + + items.push(value); + + if (options.count === items.length) { + cancel(); + resolve(items); + } + }; + + const rejectHandler = error => { + cancel(); + reject(error); + }; + + cancel = () => { + for (const event of events) { + removeListener(event, onItem); + } + + for (const rejectionEvent of options.rejectionEvents) { + removeListener(rejectionEvent, rejectHandler); + } + }; + + for (const event of events) { + addListener(event, onItem); + } + + for (const rejectionEvent of options.rejectionEvents) { + addListener(rejectionEvent, rejectHandler); + } + + if (options.resolveImmediately) { + resolve(items); + } + }); + + ret.cancel = cancel; + + if (typeof options.timeout === 'number') { + const timeout = pTimeout(ret, options.timeout); + timeout.cancel = cancel; + return timeout; + } + + return ret; +}; + +module.exports = (emitter, event, options) => { + if (typeof options === 'function') { + options = {filter: options}; + } + + options = Object.assign({}, options, { + count: 1, + resolveImmediately: false + }); + + const arrayPromise = multiple(emitter, event, options); + + const promise = arrayPromise.then(array => array[0]); + promise.cancel = arrayPromise.cancel; + + return promise; +}; + +module.exports.multiple = multiple; + +module.exports.iterator = (emitter, event, options) => { + if (typeof options === 'function') { + options = {filter: options}; + } + + // Allow multiple events + const events = normalizeEvents(event); + + options = Object.assign({ + rejectionEvents: ['error'], + resolutionEvents: [], + limit: Infinity, + multiArgs: false + }, options); + + const {limit} = options; + const isValidLimit = limit >= 0 && (limit === Infinity || Number.isInteger(limit)); + if (!isValidLimit) { + throw new TypeError('The `limit` option should be a non-negative integer or Infinity'); + } + + if (limit === 0) { + // Return an empty async iterator to avoid any further cost + return { + [Symbol.asyncIterator]() { + return this; + }, + next() { + return Promise.resolve({done: true, value: undefined}); + } + }; + } + + let isLimitReached = false; + + const {addListener, removeListener} = normalizeEmitter(emitter); + + let done = false; + let error; + let hasPendingError = false; + const nextQueue = []; + const valueQueue = []; + let eventCount = 0; + + const valueHandler = (...args) => { + eventCount++; + isLimitReached = eventCount === limit; + + const value = options.multiArgs ? args : args[0]; + + if (nextQueue.length > 0) { + const {resolve} = nextQueue.shift(); + + resolve({done: false, value}); + + if (isLimitReached) { + cancel(); + } + + return; + } + + valueQueue.push(value); + + if (isLimitReached) { + cancel(); + } + }; + + const cancel = () => { + done = true; + for (const event of events) { + removeListener(event, valueHandler); + } + + for (const rejectionEvent of options.rejectionEvents) { + removeListener(rejectionEvent, rejectHandler); + } + + for (const resolutionEvent of options.resolutionEvents) { + removeListener(resolutionEvent, resolveHandler); + } + + while (nextQueue.length > 0) { + const {resolve} = nextQueue.shift(); + resolve({done: true, value: undefined}); + } + }; + + const rejectHandler = (...args) => { + error = options.multiArgs ? args : args[0]; + + if (nextQueue.length > 0) { + const {reject} = nextQueue.shift(); + reject(error); + } else { + hasPendingError = true; + } + + cancel(); + }; + + const resolveHandler = (...args) => { + const value = options.multiArgs ? args : args[0]; + + if (options.filter && !options.filter(value)) { + return; + } + + if (nextQueue.length > 0) { + const {resolve} = nextQueue.shift(); + resolve({done: true, value}); + } else { + valueQueue.push(value); + } + + cancel(); + }; + + for (const event of events) { + addListener(event, valueHandler); + } + + for (const rejectionEvent of options.rejectionEvents) { + addListener(rejectionEvent, rejectHandler); + } + + for (const resolutionEvent of options.resolutionEvents) { + addListener(resolutionEvent, resolveHandler); + } + + return { + [symbolAsyncIterator]() { + return this; + }, + next() { + if (valueQueue.length > 0) { + const value = valueQueue.shift(); + return Promise.resolve({done: done && valueQueue.length === 0 && !isLimitReached, value}); + } + + if (hasPendingError) { + hasPendingError = false; + return Promise.reject(error); + } + + if (done) { + return Promise.resolve({done: true, value: undefined}); + } + + return new Promise((resolve, reject) => nextQueue.push({resolve, reject})); + }, + return(value) { + cancel(); + return Promise.resolve({done, value}); + } + }; +}; diff --git a/node_modules/bin-wrapper/node_modules/p-event/license b/node_modules/bin-wrapper/node_modules/p-event/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-event/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/p-event/package.json b/node_modules/bin-wrapper/node_modules/p-event/package.json new file mode 100644 index 00000000..00024aa0 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-event/package.json @@ -0,0 +1,52 @@ +{ + "name": "p-event", + "version": "2.3.1", + "description": "Promisify an event by waiting for it to be emitted", + "license": "MIT", + "repository": "sindresorhus/p-event", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "events", + "event", + "emitter", + "eventemitter", + "event-emitter", + "emit", + "emits", + "listener", + "promisify", + "addlistener", + "addeventlistener", + "wait", + "waits", + "on", + "browser", + "dom", + "async", + "await", + "promises", + "bluebird" + ], + "dependencies": { + "p-timeout": "^2.0.1" + }, + "devDependencies": { + "ava": "^1.2.1", + "delay": "^4.1.0", + "xo": "^0.24.0" + } +} diff --git a/node_modules/bin-wrapper/node_modules/p-event/readme.md b/node_modules/bin-wrapper/node_modules/p-event/readme.md new file mode 100644 index 00000000..840b0e16 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-event/readme.md @@ -0,0 +1,315 @@ +# p-event [![Build Status](https://travis-ci.org/sindresorhus/p-event.svg?branch=master)](https://travis-ci.org/sindresorhus/p-event) + +> Promisify an event by waiting for it to be emitted + +Useful when you need only one event emission and want to use it with promises or await it in an async function. + +It's works with any event API in Node.js and the browser (using a bundler). + +If you want multiple individual events as they are emitted, you can use the `pEvent.iterator()` method. [Observables](https://medium.com/@benlesh/learning-observable-by-building-observable-d5da57405d87) can be useful too. + + +## Install + +``` +$ npm install p-event +``` + + +## Usage + +In Node.js: + +```js +const pEvent = require('p-event'); +const emitter = require('./some-event-emitter'); + +(async () => { + try { + const result = await pEvent(emitter, 'finish'); + + // `emitter` emitted a `finish` event + console.log(result); + } catch (error) { + // `emitter` emitted an `error` event + console.error(error); + } +})(); +``` + +In the browser: + +```js +const pEvent = require('p-event'); + +(async () => { + await pEvent(document, 'DOMContentLoaded'); + console.log('😎'); +})(); +``` + +Async iteration: + +```js +const pEvent = require('p-event'); +const emitter = require('./some-event-emitter'); + +(async () => { + const asyncIterator = pEvent.iterator(emitter, 'data', { + resolutionEvents: ['finish'] + }); + + for await (const event of asyncIterator) { + console.log(event); + } +})(); +``` + + +## API + +### pEvent(emitter, event, [options]) +### pEvent(emitter, event, filter) + +Returns a `Promise` that is fulfilled when `emitter` emits an event matching `event`, or rejects if `emitter` emits any of the events defined in the `rejectionEvents` option. + +**Note**: `event` is a string for a single event type, for example, `'data'`. To listen on multiple +events, pass an array of strings, such as `['started', 'stopped']`. + +The returned promise has a `.cancel()` method, which when called, removes the event listeners and causes the promise to never be settled. + +#### emitter + +Type: `Object` + +Event emitter object. + +Should have either a `.on()`/`.addListener()`/`.addEventListener()` and `.off()`/`.removeListener()`/`.removeEventListener()` method, like the [Node.js `EventEmitter`](https://nodejs.org/api/events.html) and [DOM events](https://developer.mozilla.org/en-US/docs/Web/Events). + +#### event + +Type: `string | string[]` + +Name of the event or events to listen to. + +If the same event is defined both here and in `rejectionEvents`, this one takes priority. + +#### options + +Type: `Object` + +##### rejectionEvents + +Type: `string[]`
+Default: `['error']` + +Events that will reject the promise. + +##### multiArgs + +Type: `boolean`
+Default: `false` + +By default, the promisified function will only return the first argument from the event callback, which works fine for most APIs. This option can be useful for APIs that return multiple arguments in the callback. Turning this on will make it return an array of all arguments from the callback, instead of just the first argument. This also applies to rejections. + +Example: + +```js +const pEvent = require('p-event'); +const emitter = require('./some-event-emitter'); + +(async () => { + const [foo, bar] = await pEvent(emitter, 'finish', {multiArgs: true}); +})(); +``` + +##### timeout + +Type: `number`
+Default: `Infinity` + +Time in milliseconds before timing out. + +##### filter + +Type: `Function` + +Filter function for accepting an event. + +```js +const pEvent = require('p-event'); +const emitter = require('./some-event-emitter'); + +(async () => { + const result = await pEvent(emitter, '🦄', value => value > 3); + // Do something with first 🦄 event with a value greater than 3 +})(); +``` + +### pEvent.multiple(emitter, event, options) + +Wait for multiple event emissions. Returns an array. + +This method has the same arguments and options as `pEvent()` with the addition of the following options: + +#### options + +Type: `Object` + +##### count + +*Required*
+Type: `number` + +The number of times the event needs to be emitted before the promise resolves. + +##### resolveImmediately + +Type: `boolean`
+Default: `false` + +Whether to resolve the promise immediately. Emitting one of the `rejectionEvents` won't throw an error. + +**Note**: The returned array will be mutated when an event is emitted. + +Example: + +```js +const emitter = new EventEmitter(); + +const promise = pEvent.multiple(emitter, 'hello', { + resolveImmediately: true, + count: Infinity +}); + +const result = await promise; +console.log(result); +//=> [] + +emitter.emit('hello', 'Jack'); +console.log(result); +//=> ['Jack'] + +emitter.emit('hello', 'Mark'); +console.log(result); +//=> ['Jack', 'Mark'] + +// Stops listening +emitter.emit('error', new Error('😿')); + +emitter.emit('hello', 'John'); +console.log(result); +//=> ['Jack', 'Mark'] +``` + +### pEvent.iterator(emitter, event, [options]) +### pEvent.iterator(emitter, event, filter) + +Returns an [async iterator](http://2ality.com/2016/10/asynchronous-iteration.html) that lets you asynchronously iterate over events of `event` emitted from `emitter`. The iterator ends when `emitter` emits an event matching any of the events defined in `resolutionEvents`, or rejects if `emitter` emits any of the events defined in the `rejectionEvents` option. + +This method has the same arguments and options as `pEvent()` with the addition of the following options: + +#### options + +Type: `Object` + +##### limit + +Type: `number` *(non-negative integer)*
+Default: `Infinity` + +Maximum number of events for the iterator before it ends. When the limit is reached, the iterator will be marked as `done`. This option is useful to paginate events, for example, fetching 10 events per page. + +##### resolutionEvents + +Type: `string[]`
+Default: `[]` + +Events that will end the iterator. + + +## Before and after + +```js +const fs = require('fs'); + +function getOpenReadStream(file, callback) { + const stream = fs.createReadStream(file); + + stream.on('open', () => { + callback(null, stream); + }); + + stream.on('error', error => { + callback(error); + }); +} + +getOpenReadStream('unicorn.txt', (error, stream) => { + if (error) { + console.error(error); + return; + } + + console.log('File descriptor:', stream.fd); + stream.pipe(process.stdout); +}); +``` + +```js +const fs = require('fs'); +const pEvent = require('p-event'); + +async function getOpenReadStream(file) { + const stream = fs.createReadStream(file); + await pEvent(stream, 'open'); + return stream; +} + +(async () => { + const stream = await getOpenReadStream('unicorn.txt'); + console.log('File descriptor:', stream.fd); + stream.pipe(process.stdout); +})().catch(console.error); +``` + + +## Tip + +### Dealing with calls that resolve with an error code + +Some functions might use a single event for success and for certain errors. Promises make it easy to have combined error handler for both error events and successes containing values which represent errors. + +```js +const pEvent = require('p-event'); +const emitter = require('./some-event-emitter'); + +(async () => { + try { + const result = await pEvent(emitter, 'finish'); + + if (result === 'unwanted result') { + throw new Error('Emitter finished with an error'); + } + + // `emitter` emitted a `finish` event with an acceptable value + console.log(result); + } catch (error) { + // `emitter` emitted an `error` event or + // emitted a `finish` with 'unwanted result' + console.error(error); + } +})(); +``` + + +## Related + +- [pify](https://github.com/sindresorhus/pify) - Promisify a callback-style function +- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/node_modules/p-timeout/index.js b/node_modules/bin-wrapper/node_modules/p-timeout/index.js new file mode 100644 index 00000000..8393646a --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-timeout/index.js @@ -0,0 +1,44 @@ +'use strict'; +const pFinally = require('p-finally'); + +class TimeoutError extends Error { + constructor(message) { + super(message); + this.name = 'TimeoutError'; + } +} + +module.exports = (promise, ms, fallback) => new Promise((resolve, reject) => { + if (typeof ms !== 'number' || ms < 0) { + throw new TypeError('Expected `ms` to be a positive number'); + } + + const timer = setTimeout(() => { + if (typeof fallback === 'function') { + try { + resolve(fallback()); + } catch (err) { + reject(err); + } + return; + } + + const message = typeof fallback === 'string' ? fallback : `Promise timed out after ${ms} milliseconds`; + const err = fallback instanceof Error ? fallback : new TimeoutError(message); + + if (typeof promise.cancel === 'function') { + promise.cancel(); + } + + reject(err); + }, ms); + + pFinally( + promise.then(resolve, reject), + () => { + clearTimeout(timer); + } + ); +}); + +module.exports.TimeoutError = TimeoutError; diff --git a/node_modules/bin-wrapper/node_modules/p-timeout/license b/node_modules/bin-wrapper/node_modules/p-timeout/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-timeout/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/p-timeout/package.json b/node_modules/bin-wrapper/node_modules/p-timeout/package.json new file mode 100644 index 00000000..48cb322d --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-timeout/package.json @@ -0,0 +1,43 @@ +{ + "name": "p-timeout", + "version": "2.0.1", + "description": "Timeout a promise after a specified amount of time", + "license": "MIT", + "repository": "sindresorhus/p-timeout", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "timeout", + "error", + "invalidate", + "async", + "await", + "promises", + "time", + "out", + "cancel", + "bluebird" + ], + "dependencies": { + "p-finally": "^1.0.0" + }, + "devDependencies": { + "ava": "*", + "delay": "^2.0.0", + "p-cancelable": "^0.3.0", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/node_modules/p-timeout/readme.md b/node_modules/bin-wrapper/node_modules/p-timeout/readme.md new file mode 100644 index 00000000..94ff3e39 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/p-timeout/readme.md @@ -0,0 +1,89 @@ +# p-timeout [![Build Status](https://travis-ci.org/sindresorhus/p-timeout.svg?branch=master)](https://travis-ci.org/sindresorhus/p-timeout) + +> Timeout a promise after a specified amount of time + + +## Install + +``` +$ npm install p-timeout +``` + + +## Usage + +```js +const delay = require('delay'); +const pTimeout = require('p-timeout'); + +const delayedPromise = delay(200); + +pTimeout(delayedPromise, 50).then(() => 'foo'); +//=> [TimeoutError: Promise timed out after 50 milliseconds] +``` + + +## API + +### pTimeout(input, ms, [message | fallback]) + +Returns a decorated `input` that times out after `ms` time. + +If you pass in a cancelable promise, specifically a promise with a `.cancel()` method, that method will be called when the `pTimeout` promise times out. + +#### input + +Type: `Promise` + +Promise to decorate. + +#### ms + +Type: `number` + +Milliseconds before timing out. + +#### message + +Type: `string` `Error`
+Default: `'Promise timed out after 50 milliseconds'` + +Specify a custom error message or error. + +If you do a custom error, it's recommended to sub-class `pTimeout.TimeoutError`. + +#### fallback + +Type: `Function` + +Do something other than rejecting with an error on timeout. + +You could for example retry: + +```js +const delay = require('delay'); +const pTimeout = require('p-timeout'); + +const delayedPromise = () => delay(200); + +pTimeout(delayedPromise(), 50, () => { + return pTimeout(delayedPromise(), 300); +}); +``` + +### pTimeout.TimeoutError + +Exposed for instance checking and sub-classing. + + +## Related + +- [delay](https://github.com/sindresorhus/delay) - Delay a promise a specified amount of time +- [p-min-delay](https://github.com/sindresorhus/p-min-delay) - Delay a promise a minimum amount of time +- [p-retry](https://github.com/sindresorhus/p-retry) - Retry a promise-returning function +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/node_modules/prepend-http/index.js b/node_modules/bin-wrapper/node_modules/prepend-http/index.js new file mode 100644 index 00000000..82b3a6b2 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/prepend-http/index.js @@ -0,0 +1,15 @@ +'use strict'; +module.exports = (url, opts) => { + if (typeof url !== 'string') { + throw new TypeError(`Expected \`url\` to be of type \`string\`, got \`${typeof url}\``); + } + + url = url.trim(); + opts = Object.assign({https: false}, opts); + + if (/^\.*\/|^(?!localhost)\w+:/.test(url)) { + return url; + } + + return url.replace(/^(?!(?:\w+:)?\/\/)/, opts.https ? 'https://' : 'http://'); +}; diff --git a/node_modules/bin-wrapper/node_modules/prepend-http/license b/node_modules/bin-wrapper/node_modules/prepend-http/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/prepend-http/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/prepend-http/package.json b/node_modules/bin-wrapper/node_modules/prepend-http/package.json new file mode 100644 index 00000000..cbfac022 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/prepend-http/package.json @@ -0,0 +1,35 @@ +{ + "name": "prepend-http", + "version": "2.0.0", + "description": "Prepend `http://` to humanized URLs like todomvc.com and localhost", + "license": "MIT", + "repository": "sindresorhus/prepend-http", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "prepend", + "protocol", + "scheme", + "url", + "uri", + "http", + "https", + "humanized" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/node_modules/prepend-http/readme.md b/node_modules/bin-wrapper/node_modules/prepend-http/readme.md new file mode 100644 index 00000000..55d640df --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/prepend-http/readme.md @@ -0,0 +1,56 @@ +# prepend-http [![Build Status](https://travis-ci.org/sindresorhus/prepend-http.svg?branch=master)](https://travis-ci.org/sindresorhus/prepend-http) + +> Prepend `http://` to humanized URLs like `todomvc.com` and `localhost` + + +## Install + +``` +$ npm install prepend-http +``` + + +## Usage + +```js +const prependHttp = require('prepend-http'); + +prependHttp('todomvc.com'); +//=> 'http://todomvc.com' + +prependHttp('localhost'); +//=> 'http://localhost' + +prependHttp('http://todomvc.com'); +//=> 'http://todomvc.com' + +prependHttp('todomvc.com', {https: true}); +//=> 'https://todomvc.com' +``` + + +## API + +### prependHttp(url, [options]) + +#### url + +Type: `string` + +URL to prepend `http://` on. + +#### options + +Type: `Object` + +##### https + +Type: `boolean`
+Default: `false` + +Prepend `https://` instead of `http://`. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/node_modules/url-parse-lax/index.js b/node_modules/bin-wrapper/node_modules/url-parse-lax/index.js new file mode 100644 index 00000000..5c62a589 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/url-parse-lax/index.js @@ -0,0 +1,12 @@ +'use strict'; +const url = require('url'); +const prependHttp = require('prepend-http'); + +module.exports = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError(`Expected \`url\` to be of type \`string\`, got \`${typeof input}\` instead.`); + } + + const finalUrl = prependHttp(input, Object.assign({https: true}, options)); + return url.parse(finalUrl); +}; diff --git a/node_modules/bin-wrapper/node_modules/url-parse-lax/license b/node_modules/bin-wrapper/node_modules/url-parse-lax/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/url-parse-lax/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bin-wrapper/node_modules/url-parse-lax/package.json b/node_modules/bin-wrapper/node_modules/url-parse-lax/package.json new file mode 100644 index 00000000..b3c58f97 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/url-parse-lax/package.json @@ -0,0 +1,42 @@ +{ + "name": "url-parse-lax", + "version": "3.0.0", + "description": "Lax url.parse() with support for protocol-less URLs & IPs", + "license": "MIT", + "repository": "sindresorhus/url-parse-lax", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "url", + "uri", + "parse", + "parser", + "loose", + "lax", + "protocol", + "less", + "protocol-less", + "ip", + "ipv4", + "ipv6" + ], + "dependencies": { + "prepend-http": "^2.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/node_modules/url-parse-lax/readme.md b/node_modules/bin-wrapper/node_modules/url-parse-lax/readme.md new file mode 100644 index 00000000..be0d4371 --- /dev/null +++ b/node_modules/bin-wrapper/node_modules/url-parse-lax/readme.md @@ -0,0 +1,127 @@ +# url-parse-lax [![Build Status](https://travis-ci.org/sindresorhus/url-parse-lax.svg?branch=master)](https://travis-ci.org/sindresorhus/url-parse-lax) + +> Lax [`url.parse()`](https://nodejs.org/docs/latest/api/url.html#url_url_parse_urlstr_parsequerystring_slashesdenotehost) with support for protocol-less URLs & IPs + + +## Install + +``` +$ npm install url-parse-lax +``` + + +## Usage + +```js +const urlParseLax = require('url-parse-lax'); + +urlParseLax('sindresorhus.com'); +/* +{ + protocol: 'https:', + slashes: true, + auth: null, + host: 'sindresorhus.com', + port: null, + hostname: 'sindresorhus.com', + hash: null, + search: null, + query: null, + pathname: '/', + path: '/', + href: 'https://sindresorhus.com/' +} +*/ + +urlParseLax('[2001:db8::]:8000'); +/* +{ + protocol: null, + slashes: true, + auth: null, + host: '[2001:db8::]:8000', + port: '8000', + hostname: '2001:db8::', + hash: null, + search: null, + query: null, + pathname: '/', + path: '/', + href: 'http://[2001:db8::]:8000/' +} +*/ +``` + +And with the built-in `url.parse()`: + +```js +const url = require('url'); + +url.parse('sindresorhus.com'); +/* +{ + protocol: null, + slashes: null, + auth: null, + host: null, + port: null, + hostname: null, + hash: null, + search: null, + query: null, + pathname: 'sindresorhus', + path: 'sindresorhus', + href: 'sindresorhus' +} +*/ + +url.parse('[2001:db8::]:8000'); +/* +{ + protocol: null, + slashes: null, + auth: null, + host: null, + port: null, + hostname: null, + hash: null, + search: null, + query: null, + pathname: '[2001:db8::]:8000', + path: '[2001:db8::]:8000', + href: '[2001:db8::]:8000' +} +*/ +``` + + +## API + +### urlParseLax(url, [options]) + +#### url + +Type: `string` + +URL to parse. + +#### options + +Type: `Object` + +##### https + +Type: `boolean`
+Default: `true` + +Prepend `https://` instead of `http://` to protocol-less URLs. + + +## Related + +- [url-format-lax](https://github.com/sindresorhus/url-format-lax) - Lax `url.format()` that formats a hostname and port into IPv6-compatible socket form of `hostname:port` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/bin-wrapper/package.json b/node_modules/bin-wrapper/package.json new file mode 100644 index 00000000..e49e305d --- /dev/null +++ b/node_modules/bin-wrapper/package.json @@ -0,0 +1,44 @@ +{ + "name": "bin-wrapper", + "version": "4.1.0", + "description": "Binary wrapper that makes your programs seamlessly available as local dependencies", + "license": "MIT", + "repository": "kevva/bin-wrapper", + "author": { + "name": "Kevin Mårtensson", + "email": "kevinmartensson@gmail.com", + "url": "https://github.com/kevva" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "bin", + "check", + "local", + "wrapper" + ], + "dependencies": { + "bin-check": "^4.1.0", + "bin-version-check": "^4.0.0", + "download": "^7.1.0", + "import-lazy": "^3.1.0", + "os-filter-obj": "^2.0.0", + "pify": "^4.0.1" + }, + "devDependencies": { + "ava": "*", + "executable": "^4.1.1", + "nock": "^10.0.2", + "path-exists": "^3.0.0", + "rimraf": "^2.6.2", + "tempy": "^0.2.1", + "xo": "*" + } +} diff --git a/node_modules/bin-wrapper/readme.md b/node_modules/bin-wrapper/readme.md new file mode 100644 index 00000000..e57de2a7 --- /dev/null +++ b/node_modules/bin-wrapper/readme.md @@ -0,0 +1,132 @@ +# bin-wrapper [![Build Status](https://travis-ci.org/kevva/bin-wrapper.svg?branch=master)](https://travis-ci.org/kevva/bin-wrapper) + +> Binary wrapper that makes your programs seamlessly available as local dependencies + + +## Install + +``` +$ npm install bin-wrapper +``` + + +## Usage + +```js +const BinWrapper = require('bin-wrapper'); + +const base = 'https://github.com/imagemin/gifsicle-bin/raw/master/vendor'; +const bin = new BinWrapper() + .src(`${base}/macos/gifsicle`, 'darwin') + .src(`${base}/linux/x64/gifsicle`, 'linux', 'x64') + .src(`${base}/win/x64/gifsicle.exe`, 'win32', 'x64') + .dest(path.join('vendor')) + .use(process.platform === 'win32' ? 'gifsicle.exe' : 'gifsicle') + .version('>=1.71'); + +(async () => { + await bin.run(['--version']); + console.log('gifsicle is working'); +})(); +``` + +Get the path to your binary with `bin.path()`: + +```js +console.log(bin.path()); +//=> 'path/to/vendor/gifsicle' +``` + + +## API + +### `new BinWrapper(options)` + +Creates a new `BinWrapper` instance. + +#### options + +Type: `Object` + +##### skipCheck + +Type: `boolean`
+Default: `false` + +Whether to skip the binary check or not. + +##### strip + +Type: `number`
+Default: `1` + +Strip a number of leading paths from file names on extraction. + +### .src(url, [os], [arch]) + +Adds a source to download. + +#### url + +Type: `string` + +Accepts a URL pointing to a file to download. + +#### os + +Type: `string` + +Tie the source to a specific OS. + +#### arch + +Type: `string` + +Tie the source to a specific arch. + +### .dest(destination) + +#### destination + +Type: `string` + +Accepts a path which the files will be downloaded to. + +### .use(binary) + +#### binary + +Type: `string` + +Define which file to use as the binary. + +### .path() + +Returns the full path to your binary. + +### .version(range) + +#### range + +Type: `string` + +Define a [semver range](https://github.com/isaacs/node-semver#ranges) to check +the binary against. + +### .run([arguments]) + +Runs the search for the binary. If no binary is found it will download the file +using the URL provided in `.src()`. + +#### arguments + +Type: `Array`
+Default: `['--version']` + +Command to run the binary with. If it exits with code `0` it means that the +binary is working. + + +## License + +MIT © [Kevin Mårtensson](http://kevinmartensson.com) diff --git a/node_modules/binary-extensions/binary-extensions.json b/node_modules/binary-extensions/binary-extensions.json new file mode 100644 index 00000000..4aab3837 --- /dev/null +++ b/node_modules/binary-extensions/binary-extensions.json @@ -0,0 +1,260 @@ +[ + "3dm", + "3ds", + "3g2", + "3gp", + "7z", + "a", + "aac", + "adp", + "ai", + "aif", + "aiff", + "alz", + "ape", + "apk", + "appimage", + "ar", + "arj", + "asf", + "au", + "avi", + "bak", + "baml", + "bh", + "bin", + "bk", + "bmp", + "btif", + "bz2", + "bzip2", + "cab", + "caf", + "cgm", + "class", + "cmx", + "cpio", + "cr2", + "cur", + "dat", + "dcm", + "deb", + "dex", + "djvu", + "dll", + "dmg", + "dng", + "doc", + "docm", + "docx", + "dot", + "dotm", + "dra", + "DS_Store", + "dsk", + "dts", + "dtshd", + "dvb", + "dwg", + "dxf", + "ecelp4800", + "ecelp7470", + "ecelp9600", + "egg", + "eol", + "eot", + "epub", + "exe", + "f4v", + "fbs", + "fh", + "fla", + "flac", + "flatpak", + "fli", + "flv", + "fpx", + "fst", + "fvt", + "g3", + "gh", + "gif", + "graffle", + "gz", + "gzip", + "h261", + "h263", + "h264", + "icns", + "ico", + "ief", + "img", + "ipa", + "iso", + "jar", + "jpeg", + "jpg", + "jpgv", + "jpm", + "jxr", + "key", + "ktx", + "lha", + "lib", + "lvp", + "lz", + "lzh", + "lzma", + "lzo", + "m3u", + "m4a", + "m4v", + "mar", + "mdi", + "mht", + "mid", + "midi", + "mj2", + "mka", + "mkv", + "mmr", + "mng", + "mobi", + "mov", + "movie", + "mp3", + "mp4", + "mp4a", + "mpeg", + "mpg", + "mpga", + "mxu", + "nef", + "npx", + "numbers", + "nupkg", + "o", + "odp", + "ods", + "odt", + "oga", + "ogg", + "ogv", + "otf", + "ott", + "pages", + "pbm", + "pcx", + "pdb", + "pdf", + "pea", + "pgm", + "pic", + "png", + "pnm", + "pot", + "potm", + "potx", + "ppa", + "ppam", + "ppm", + "pps", + "ppsm", + "ppsx", + "ppt", + "pptm", + "pptx", + "psd", + "pya", + "pyc", + "pyo", + "pyv", + "qt", + "rar", + "ras", + "raw", + "resources", + "rgb", + "rip", + "rlc", + "rmf", + "rmvb", + "rpm", + "rtf", + "rz", + "s3m", + "s7z", + "scpt", + "sgi", + "shar", + "snap", + "sil", + "sketch", + "slk", + "smv", + "snk", + "so", + "stl", + "suo", + "sub", + "swf", + "tar", + "tbz", + "tbz2", + "tga", + "tgz", + "thmx", + "tif", + "tiff", + "tlz", + "ttc", + "ttf", + "txz", + "udf", + "uvh", + "uvi", + "uvm", + "uvp", + "uvs", + "uvu", + "viv", + "vob", + "war", + "wav", + "wax", + "wbmp", + "wdp", + "weba", + "webm", + "webp", + "whl", + "wim", + "wm", + "wma", + "wmv", + "wmx", + "woff", + "woff2", + "wrm", + "wvx", + "xbm", + "xif", + "xla", + "xlam", + "xls", + "xlsb", + "xlsm", + "xlsx", + "xlt", + "xltm", + "xltx", + "xm", + "xmind", + "xpi", + "xpm", + "xwd", + "xz", + "z", + "zip", + "zipx" +] diff --git a/node_modules/binary-extensions/binary-extensions.json.d.ts b/node_modules/binary-extensions/binary-extensions.json.d.ts new file mode 100644 index 00000000..94a248c2 --- /dev/null +++ b/node_modules/binary-extensions/binary-extensions.json.d.ts @@ -0,0 +1,3 @@ +declare const binaryExtensionsJson: readonly string[]; + +export = binaryExtensionsJson; diff --git a/node_modules/binary-extensions/index.d.ts b/node_modules/binary-extensions/index.d.ts new file mode 100644 index 00000000..f469ac5f --- /dev/null +++ b/node_modules/binary-extensions/index.d.ts @@ -0,0 +1,14 @@ +/** +List of binary file extensions. + +@example +``` +import binaryExtensions = require('binary-extensions'); + +console.log(binaryExtensions); +//=> ['3ds', '3g2', …] +``` +*/ +declare const binaryExtensions: readonly string[]; + +export = binaryExtensions; diff --git a/node_modules/binary-extensions/index.js b/node_modules/binary-extensions/index.js new file mode 100644 index 00000000..d46e4688 --- /dev/null +++ b/node_modules/binary-extensions/index.js @@ -0,0 +1 @@ +module.exports = require('./binary-extensions.json'); diff --git a/node_modules/binary-extensions/license b/node_modules/binary-extensions/license new file mode 100644 index 00000000..401b1c73 --- /dev/null +++ b/node_modules/binary-extensions/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2019 Sindre Sorhus (https://sindresorhus.com), Paul Miller (https://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/binary-extensions/package.json b/node_modules/binary-extensions/package.json new file mode 100644 index 00000000..c4d36417 --- /dev/null +++ b/node_modules/binary-extensions/package.json @@ -0,0 +1,38 @@ +{ + "name": "binary-extensions", + "version": "2.2.0", + "description": "List of binary file extensions", + "license": "MIT", + "repository": "sindresorhus/binary-extensions", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "binary-extensions.json", + "binary-extensions.json.d.ts" + ], + "keywords": [ + "binary", + "extensions", + "extension", + "file", + "json", + "list", + "array" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/binary-extensions/readme.md b/node_modules/binary-extensions/readme.md new file mode 100644 index 00000000..3e25dd83 --- /dev/null +++ b/node_modules/binary-extensions/readme.md @@ -0,0 +1,41 @@ +# binary-extensions + +> List of binary file extensions + +The list is just a [JSON file](binary-extensions.json) and can be used anywhere. + + +## Install + +``` +$ npm install binary-extensions +``` + + +## Usage + +```js +const binaryExtensions = require('binary-extensions'); + +console.log(binaryExtensions); +//=> ['3ds', '3g2', …] +``` + + +## Related + +- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file +- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/bl/.jshintrc b/node_modules/bl/.jshintrc new file mode 100644 index 00000000..c8ef3ca4 --- /dev/null +++ b/node_modules/bl/.jshintrc @@ -0,0 +1,59 @@ +{ + "predef": [ ] + , "bitwise": false + , "camelcase": false + , "curly": false + , "eqeqeq": false + , "forin": false + , "immed": false + , "latedef": false + , "noarg": true + , "noempty": true + , "nonew": true + , "plusplus": false + , "quotmark": true + , "regexp": false + , "undef": true + , "unused": true + , "strict": false + , "trailing": true + , "maxlen": 120 + , "asi": true + , "boss": true + , "debug": true + , "eqnull": true + , "esnext": true + , "evil": true + , "expr": true + , "funcscope": false + , "globalstrict": false + , "iterator": false + , "lastsemic": true + , "laxbreak": true + , "laxcomma": true + , "loopfunc": true + , "multistr": false + , "onecase": false + , "proto": false + , "regexdash": false + , "scripturl": true + , "smarttabs": false + , "shadow": false + , "sub": true + , "supernew": false + , "validthis": true + , "browser": true + , "couch": false + , "devel": false + , "dojo": false + , "mootools": false + , "node": true + , "nonstandard": true + , "prototypejs": false + , "rhino": false + , "worker": true + , "wsh": false + , "nomen": false + , "onevar": false + , "passfail": false +} \ No newline at end of file diff --git a/node_modules/bl/.travis.yml b/node_modules/bl/.travis.yml new file mode 100644 index 00000000..a349506c --- /dev/null +++ b/node_modules/bl/.travis.yml @@ -0,0 +1,13 @@ +sudo: false +language: node_js +node_js: + - '6' + - '8' + - '10' + - '12' + - '14' + - lts/* +notifications: + email: + - rod@vagg.org + - matteo.collina@gmail.com diff --git a/node_modules/bl/LICENSE.md b/node_modules/bl/LICENSE.md new file mode 100644 index 00000000..ff35a347 --- /dev/null +++ b/node_modules/bl/LICENSE.md @@ -0,0 +1,13 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2013-2016 bl contributors +---------------------------------- + +*bl contributors listed at * + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bl/README.md b/node_modules/bl/README.md new file mode 100644 index 00000000..9eebd88b --- /dev/null +++ b/node_modules/bl/README.md @@ -0,0 +1,208 @@ +# bl *(BufferList)* + +[![Build Status](https://travis-ci.org/rvagg/bl.svg?branch=master)](https://travis-ci.org/rvagg/bl) + +**A Node.js Buffer list collector, reader and streamer thingy.** + +[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/) +[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/) + +**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them! + +The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently. + +```js +const BufferList = require('bl') + +var bl = new BufferList() +bl.append(new Buffer('abcd')) +bl.append(new Buffer('efg')) +bl.append('hi') // bl will also accept & convert Strings +bl.append(new Buffer('j')) +bl.append(new Buffer([ 0x3, 0x4 ])) + +console.log(bl.length) // 12 + +console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij' +console.log(bl.slice(3, 10).toString('ascii')) // 'defghij' +console.log(bl.slice(3, 6).toString('ascii')) // 'def' +console.log(bl.slice(3, 8).toString('ascii')) // 'defgh' +console.log(bl.slice(5, 10).toString('ascii')) // 'fghij' + +// or just use toString! +console.log(bl.toString()) // 'abcdefghij\u0003\u0004' +console.log(bl.toString('ascii', 3, 8)) // 'defgh' +console.log(bl.toString('ascii', 5, 10)) // 'fghij' + +// other standard Buffer readables +console.log(bl.readUInt16BE(10)) // 0x0304 +console.log(bl.readUInt16LE(10)) // 0x0403 +``` + +Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**: + +```js +const bl = require('bl') + , fs = require('fs') + +fs.createReadStream('README.md') + .pipe(bl(function (err, data) { // note 'new' isn't strictly required + // `data` is a complete Buffer object containing the full data + console.log(data.toString()) + })) +``` + +Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream. + +Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!): +```js +const hyperquest = require('hyperquest') + , bl = require('bl') + , url = 'https://raw.github.com/rvagg/bl/master/README.md' + +hyperquest(url).pipe(bl(function (err, data) { + console.log(data.toString()) +})) +``` + +Or, use it as a readable stream to recompose a list of Buffers to an output source: + +```js +const BufferList = require('bl') + , fs = require('fs') + +var bl = new BufferList() +bl.append(new Buffer('abcd')) +bl.append(new Buffer('efg')) +bl.append(new Buffer('hi')) +bl.append(new Buffer('j')) + +bl.pipe(fs.createWriteStream('gibberish.txt')) +``` + +## API + + * new BufferList([ callback ]) + * bl.length + * bl.append(buffer) + * bl.get(index) + * bl.slice([ start[, end ] ]) + * bl.shallowSlice([ start[, end ] ]) + * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) + * bl.duplicate() + * bl.consume(bytes) + * bl.toString([encoding, [ start, [ end ]]]) + * bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + * Streams + +-------------------------------------------------------- + +### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ]) +The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream. + +Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object. + +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: + +```js +var bl = require('bl') +var myinstance = bl() + +// equivalent to: + +var BufferList = require('bl') +var myinstance = new BufferList() +``` + +-------------------------------------------------------- + +### bl.length +Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list. + +-------------------------------------------------------- + +### bl.append(Buffer | Buffer array | BufferList | BufferList array | String) +`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained. + +-------------------------------------------------------- + +### bl.get(index) +`get()` will return the byte at the specified index. + +-------------------------------------------------------- + +### bl.slice([ start, [ end ] ]) +`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer. + +-------------------------------------------------------- + +### bl.shallowSlice([ start, [ end ] ]) +`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +No copies will be performed. All buffers in the result share memory with the original list. + +-------------------------------------------------------- + +### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) +`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively. + +-------------------------------------------------------- + +### bl.duplicate() +`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example: + +```js +var bl = new BufferList() + +bl.append('hello') +bl.append(' world') +bl.append('\n') + +bl.duplicate().pipe(process.stdout, { end: false }) + +console.log(bl.toString()) +``` + +-------------------------------------------------------- + +### bl.consume(bytes) +`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data. + +-------------------------------------------------------- + +### bl.toString([encoding, [ start, [ end ]]]) +`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information. + +-------------------------------------------------------- + +### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + +All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently. + +See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work. + +-------------------------------------------------------- + +### Streams +**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance. + +-------------------------------------------------------- + +## Contributors + +**bl** is brought to you by the following hackers: + + * [Rod Vagg](https://github.com/rvagg) + * [Matteo Collina](https://github.com/mcollina) + * [Jarett Cruger](https://github.com/jcrugzz) + +======= + + +## License & copyright + +Copyright (c) 2013-2016 bl contributors (listed above). + +bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/node_modules/bl/bl.js b/node_modules/bl/bl.js new file mode 100644 index 00000000..0c8de189 --- /dev/null +++ b/node_modules/bl/bl.js @@ -0,0 +1,290 @@ +var DuplexStream = require('readable-stream/duplex') + , util = require('util') + , Buffer = require('safe-buffer').Buffer + + +function BufferList (callback) { + if (!(this instanceof BufferList)) + return new BufferList(callback) + + this._bufs = [] + this.length = 0 + + if (typeof callback == 'function') { + this._callback = callback + + var piper = function piper (err) { + if (this._callback) { + this._callback(err) + this._callback = null + } + }.bind(this) + + this.on('pipe', function onPipe (src) { + src.on('error', piper) + }) + this.on('unpipe', function onUnpipe (src) { + src.removeListener('error', piper) + }) + } else { + this.append(callback) + } + + DuplexStream.call(this) +} + + +util.inherits(BufferList, DuplexStream) + + +BufferList.prototype._offset = function _offset (offset) { + var tot = 0, i = 0, _t + if (offset === 0) return [ 0, 0 ] + for (; i < this._bufs.length; i++) { + _t = tot + this._bufs[i].length + if (offset < _t || i == this._bufs.length - 1) + return [ i, offset - tot ] + tot = _t + } +} + + +BufferList.prototype.append = function append (buf) { + var i = 0 + + if (Buffer.isBuffer(buf)) { + this._appendBuffer(buf); + } else if (Array.isArray(buf)) { + for (; i < buf.length; i++) + this.append(buf[i]) + } else if (buf instanceof BufferList) { + // unwrap argument into individual BufferLists + for (; i < buf._bufs.length; i++) + this.append(buf._bufs[i]) + } else if (buf != null) { + // coerce number arguments to strings, since Buffer(number) does + // uninitialized memory allocation + if (typeof buf == 'number') + buf = buf.toString() + + this._appendBuffer(Buffer.from(buf)); + } + + return this +} + + +BufferList.prototype._appendBuffer = function appendBuffer (buf) { + this._bufs.push(buf) + this.length += buf.length +} + + +BufferList.prototype._write = function _write (buf, encoding, callback) { + this._appendBuffer(buf) + + if (typeof callback == 'function') + callback() +} + + +BufferList.prototype._read = function _read (size) { + if (!this.length) + return this.push(null) + + size = Math.min(size, this.length) + this.push(this.slice(0, size)) + this.consume(size) +} + + +BufferList.prototype.end = function end (chunk) { + DuplexStream.prototype.end.call(this, chunk) + + if (this._callback) { + this._callback(null, this.slice()) + this._callback = null + } +} + + +BufferList.prototype.get = function get (index) { + return this.slice(index, index + 1)[0] +} + + +BufferList.prototype.slice = function slice (start, end) { + if (typeof start == 'number' && start < 0) + start += this.length + if (typeof end == 'number' && end < 0) + end += this.length + return this.copy(null, 0, start, end) +} + + +BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { + if (typeof srcStart != 'number' || srcStart < 0) + srcStart = 0 + if (typeof srcEnd != 'number' || srcEnd > this.length) + srcEnd = this.length + if (srcStart >= this.length) + return dst || Buffer.alloc(0) + if (srcEnd <= 0) + return dst || Buffer.alloc(0) + + var copy = !!dst + , off = this._offset(srcStart) + , len = srcEnd - srcStart + , bytes = len + , bufoff = (copy && dstStart) || 0 + , start = off[1] + , l + , i + + // copy/slice everything + if (srcStart === 0 && srcEnd == this.length) { + if (!copy) { // slice, but full concat if multiple buffers + return this._bufs.length === 1 + ? this._bufs[0] + : Buffer.concat(this._bufs, this.length) + } + + // copy, need to copy individual buffers + for (i = 0; i < this._bufs.length; i++) { + this._bufs[i].copy(dst, bufoff) + bufoff += this._bufs[i].length + } + + return dst + } + + // easy, cheap case where it's a subset of one of the buffers + if (bytes <= this._bufs[off[0]].length - start) { + return copy + ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) + : this._bufs[off[0]].slice(start, start + bytes) + } + + if (!copy) // a slice, we need something to copy in to + dst = Buffer.allocUnsafe(len) + + for (i = off[0]; i < this._bufs.length; i++) { + l = this._bufs[i].length - start + + if (bytes > l) { + this._bufs[i].copy(dst, bufoff, start) + bufoff += l + } else { + this._bufs[i].copy(dst, bufoff, start, start + bytes) + bufoff += l + break + } + + bytes -= l + + if (start) + start = 0 + } + + // safeguard so that we don't return uninitialized memory + if (dst.length > bufoff) return dst.slice(0, bufoff) + + return dst +} + +BufferList.prototype.shallowSlice = function shallowSlice (start, end) { + start = start || 0 + end = end || this.length + + if (start < 0) + start += this.length + if (end < 0) + end += this.length + + var startOffset = this._offset(start) + , endOffset = this._offset(end) + , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) + + if (endOffset[1] == 0) + buffers.pop() + else + buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1]) + + if (startOffset[1] != 0) + buffers[0] = buffers[0].slice(startOffset[1]) + + return new BufferList(buffers) +} + +BufferList.prototype.toString = function toString (encoding, start, end) { + return this.slice(start, end).toString(encoding) +} + +BufferList.prototype.consume = function consume (bytes) { + // first, normalize the argument, in accordance with how Buffer does it + bytes = Math.trunc(bytes) + // do nothing if not a positive number + if (Number.isNaN(bytes) || bytes <= 0) return this + + while (this._bufs.length) { + if (bytes >= this._bufs[0].length) { + bytes -= this._bufs[0].length + this.length -= this._bufs[0].length + this._bufs.shift() + } else { + this._bufs[0] = this._bufs[0].slice(bytes) + this.length -= bytes + break + } + } + return this +} + + +BufferList.prototype.duplicate = function duplicate () { + var i = 0 + , copy = new BufferList() + + for (; i < this._bufs.length; i++) + copy.append(this._bufs[i]) + + return copy +} + + +BufferList.prototype.destroy = function destroy () { + this._bufs.length = 0 + this.length = 0 + this.push(null) +} + + +;(function () { + var methods = { + 'readDoubleBE' : 8 + , 'readDoubleLE' : 8 + , 'readFloatBE' : 4 + , 'readFloatLE' : 4 + , 'readInt32BE' : 4 + , 'readInt32LE' : 4 + , 'readUInt32BE' : 4 + , 'readUInt32LE' : 4 + , 'readInt16BE' : 2 + , 'readInt16LE' : 2 + , 'readUInt16BE' : 2 + , 'readUInt16LE' : 2 + , 'readInt8' : 1 + , 'readUInt8' : 1 + } + + for (var m in methods) { + (function (m) { + BufferList.prototype[m] = function (offset) { + return this.slice(offset, offset + methods[m])[m](0) + } + }(m)) + } +}()) + + +module.exports = BufferList diff --git a/node_modules/bl/node_modules/readable-stream/.travis.yml b/node_modules/bl/node_modules/readable-stream/.travis.yml new file mode 100644 index 00000000..f62cdac0 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/.travis.yml @@ -0,0 +1,34 @@ +sudo: false +language: node_js +before_install: + - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: NPM_LEGACY=true + - node_js: '0.10' + env: NPM_LEGACY=true + - node_js: '0.11' + env: NPM_LEGACY=true + - node_js: '0.12' + env: NPM_LEGACY=true + - node_js: 1 + env: NPM_LEGACY=true + - node_js: 2 + env: NPM_LEGACY=true + - node_js: 3 + env: NPM_LEGACY=true + - node_js: 4 + - node_js: 5 + - node_js: 6 + - node_js: 7 + - node_js: 8 + - node_js: 9 +script: "npm run test" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md b/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/bl/node_modules/readable-stream/LICENSE b/node_modules/bl/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/bl/node_modules/readable-stream/README.md b/node_modules/bl/node_modules/readable-stream/README.md new file mode 100644 index 00000000..f1c5a931 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.17.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.17.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 00000000..83275f19 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/bl/node_modules/readable-stream/duplex-browser.js b/node_modules/bl/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 00000000..f8b2db83 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/bl/node_modules/readable-stream/duplex.js b/node_modules/bl/node_modules/readable-stream/duplex.js new file mode 100644 index 00000000..46924cbf --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..57003c32 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..612edb4d --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..3af95cb2 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { hasUnpiped: false }); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..fcfc105a --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..e1e897ff --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,685 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 00000000..5e080976 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,78 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..85a82140 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,84 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + pna.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, this, err); + } + } + + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + pna.nextTick(emitErrorNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, _this, err); + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/bl/node_modules/readable-stream/package.json b/node_modules/bl/node_modules/readable-stream/package.json new file mode 100644 index 00000000..514c178e --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/package.json @@ -0,0 +1,52 @@ +{ + "name": "readable-stream", + "version": "2.3.8", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "scripts": { + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/bl/node_modules/readable-stream/passthrough.js b/node_modules/bl/node_modules/readable-stream/passthrough.js new file mode 100644 index 00000000..ffd791d7 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/bl/node_modules/readable-stream/readable-browser.js b/node_modules/bl/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..e5037259 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/bl/node_modules/readable-stream/readable.js b/node_modules/bl/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..ec89ec53 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/bl/node_modules/readable-stream/transform.js b/node_modules/bl/node_modules/readable-stream/transform.js new file mode 100644 index 00000000..b1baba26 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/bl/node_modules/readable-stream/writable-browser.js b/node_modules/bl/node_modules/readable-stream/writable-browser.js new file mode 100644 index 00000000..ebdde6a8 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/bl/node_modules/readable-stream/writable.js b/node_modules/bl/node_modules/readable-stream/writable.js new file mode 100644 index 00000000..3211a6f8 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/bl/node_modules/safe-buffer/LICENSE b/node_modules/bl/node_modules/safe-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/node_modules/bl/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/bl/node_modules/safe-buffer/README.md b/node_modules/bl/node_modules/safe-buffer/README.md new file mode 100644 index 00000000..e9a81afd --- /dev/null +++ b/node_modules/bl/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/bl/node_modules/safe-buffer/index.d.ts b/node_modules/bl/node_modules/safe-buffer/index.d.ts new file mode 100644 index 00000000..e9fed809 --- /dev/null +++ b/node_modules/bl/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/bl/node_modules/safe-buffer/index.js b/node_modules/bl/node_modules/safe-buffer/index.js new file mode 100644 index 00000000..22438dab --- /dev/null +++ b/node_modules/bl/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/bl/node_modules/safe-buffer/package.json b/node_modules/bl/node_modules/safe-buffer/package.json new file mode 100644 index 00000000..623fbc3f --- /dev/null +++ b/node_modules/bl/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.1.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/bl/node_modules/string_decoder/.travis.yml b/node_modules/bl/node_modules/string_decoder/.travis.yml new file mode 100644 index 00000000..3347a725 --- /dev/null +++ b/node_modules/bl/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/bl/node_modules/string_decoder/LICENSE b/node_modules/bl/node_modules/string_decoder/LICENSE new file mode 100644 index 00000000..778edb20 --- /dev/null +++ b/node_modules/bl/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/bl/node_modules/string_decoder/README.md b/node_modules/bl/node_modules/string_decoder/README.md new file mode 100644 index 00000000..5fd58315 --- /dev/null +++ b/node_modules/bl/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/bl/node_modules/string_decoder/lib/string_decoder.js b/node_modules/bl/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 00000000..2e89e63f --- /dev/null +++ b/node_modules/bl/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/bl/node_modules/string_decoder/package.json b/node_modules/bl/node_modules/string_decoder/package.json new file mode 100644 index 00000000..518c3eb9 --- /dev/null +++ b/node_modules/bl/node_modules/string_decoder/package.json @@ -0,0 +1,31 @@ +{ + "name": "string_decoder", + "version": "1.1.1", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/bl/package.json b/node_modules/bl/package.json new file mode 100644 index 00000000..1ffd2526 --- /dev/null +++ b/node_modules/bl/package.json @@ -0,0 +1,35 @@ +{ + "name": "bl", + "version": "1.2.3", + "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!", + "main": "bl.js", + "scripts": { + "test": "node test/test.js | faucet" + }, + "repository": { + "type": "git", + "url": "https://github.com/rvagg/bl.git" + }, + "homepage": "https://github.com/rvagg/bl", + "authors": [ + "Rod Vagg (https://github.com/rvagg)", + "Matteo Collina (https://github.com/mcollina)", + "Jarett Cruger (https://github.com/jcrugzz)" + ], + "keywords": [ + "buffer", + "buffers", + "stream", + "awesomesauce" + ], + "license": "MIT", + "dependencies": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + }, + "devDependencies": { + "faucet": "0.0.1", + "hash_file": "~0.1.1", + "tape": "~4.9.0" + } +} diff --git a/node_modules/bl/test/test.js b/node_modules/bl/test/test.js new file mode 100644 index 00000000..dac1861e --- /dev/null +++ b/node_modules/bl/test/test.js @@ -0,0 +1,718 @@ +var tape = require('tape') + , crypto = require('crypto') + , fs = require('fs') + , hash = require('hash_file') + , BufferList = require('../') + , Buffer = require('safe-buffer').Buffer + + , encodings = + ('hex utf8 utf-8 ascii binary base64' + + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ') + +tape('single bytes from single buffer', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + + t.end() +}) + +tape('single bytes from multiple buffers', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), 101) + t.equal(bl.get(5), 102) + t.equal(bl.get(6), 103) + t.equal(bl.get(7), 104) + t.equal(bl.get(8), 105) + t.equal(bl.get(9), 106) + t.end() +}) + +tape('multi bytes from single buffer', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + + t.equal(bl.slice(0, 4).toString('ascii'), 'abcd') + t.equal(bl.slice(0, 3).toString('ascii'), 'abc') + t.equal(bl.slice(1, 4).toString('ascii'), 'bcd') + t.equal(bl.slice(-4, -1).toString('ascii'), 'abc') + + t.end() +}) + +tape('multi bytes from single buffer (negative indexes)', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('buffer')) + + t.equal(bl.length, 6) + + t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe') + t.equal(bl.slice(-6, -2).toString('ascii'), 'buff') + t.equal(bl.slice(-5, -2).toString('ascii'), 'uff') + + t.end() +}) + +tape('multiple bytes from multiple buffers', function (t) { + var bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + t.equal(bl.slice(-7, -4).toString('ascii'), 'def') + + t.end() +}) + +tape('multiple bytes from multiple buffer lists', function (t) { + var bl = new BufferList() + + bl.append(new BufferList([ Buffer.from('abcd'), Buffer.from('efg') ])) + bl.append(new BufferList([ Buffer.from('hi'), Buffer.from('j') ])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +// same data as previous test, just using nested constructors +tape('multiple bytes from crazy nested buffer lists', function (t) { + var bl = new BufferList() + + bl.append(new BufferList([ + new BufferList([ + new BufferList(Buffer.from('abc')) + , Buffer.from('d') + , new BufferList(Buffer.from('efg')) + ]) + , new BufferList([ Buffer.from('hi') ]) + , new BufferList(Buffer.from('j')) + ])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +tape('append accepts arrays of Buffers', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abc')) + bl.append([ Buffer.from('def') ]) + bl.append([ Buffer.from('ghi'), Buffer.from('jkl') ]) + bl.append([ Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz') ]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + t.end() +}) + +tape('append accepts arrays of BufferLists', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abc')) + bl.append([ new BufferList('def') ]) + bl.append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ])) + bl.append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + t.end() +}) + +tape('append chainable', function (t) { + var bl = new BufferList() + t.ok(bl.append(Buffer.from('abcd')) === bl) + t.ok(bl.append([ Buffer.from('abcd') ]) === bl) + t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl) + t.ok(bl.append([ new BufferList(Buffer.from('abcd')) ]) === bl) + t.end() +}) + +tape('append chainable (test results)', function (t) { + var bl = new BufferList('abc') + .append([ new BufferList('def') ]) + .append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ])) + .append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ]) + + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + t.end() +}) + +tape('consuming from multiple buffers', function (t) { + var bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + bl.consume(3) + t.equal(bl.length, 7) + t.equal(bl.slice(0, 7).toString('ascii'), 'defghij') + + bl.consume(2) + t.equal(bl.length, 5) + t.equal(bl.slice(0, 5).toString('ascii'), 'fghij') + + bl.consume(1) + t.equal(bl.length, 4) + t.equal(bl.slice(0, 4).toString('ascii'), 'ghij') + + bl.consume(1) + t.equal(bl.length, 3) + t.equal(bl.slice(0, 3).toString('ascii'), 'hij') + + bl.consume(2) + t.equal(bl.length, 1) + t.equal(bl.slice(0, 1).toString('ascii'), 'j') + + t.end() +}) + +tape('complete consumption', function (t) { + var bl = new BufferList() + + bl.append(Buffer.from('a')) + bl.append(Buffer.from('b')) + + bl.consume(2) + + t.equal(bl.length, 0) + t.equal(bl._bufs.length, 0) + + t.end() +}) + +tape('test readUInt8 / readInt8', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt8(2), 0x3) + t.equal(bl.readInt8(2), 0x3) + t.equal(bl.readUInt8(3), 0x4) + t.equal(bl.readInt8(3), 0x4) + t.equal(bl.readUInt8(4), 0x23) + t.equal(bl.readInt8(4), 0x23) + t.equal(bl.readUInt8(5), 0x42) + t.equal(bl.readInt8(5), 0x42) + t.end() +}) + +tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt16BE(2), 0x0304) + t.equal(bl.readUInt16LE(2), 0x0403) + t.equal(bl.readInt16BE(2), 0x0304) + t.equal(bl.readInt16LE(2), 0x0403) + t.equal(bl.readUInt16BE(3), 0x0423) + t.equal(bl.readUInt16LE(3), 0x2304) + t.equal(bl.readInt16BE(3), 0x0423) + t.equal(bl.readInt16LE(3), 0x2304) + t.equal(bl.readUInt16BE(4), 0x2342) + t.equal(bl.readUInt16LE(4), 0x4223) + t.equal(bl.readInt16BE(4), 0x2342) + t.equal(bl.readInt16LE(4), 0x4223) + t.end() +}) + +tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt32BE(2), 0x03042342) + t.equal(bl.readUInt32LE(2), 0x42230403) + t.equal(bl.readInt32BE(2), 0x03042342) + t.equal(bl.readInt32LE(2), 0x42230403) + t.end() +}) + +tape('test readFloatLE / readFloatBE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[1] = 0x00 + buf2[2] = 0x00 + buf3[0] = 0x80 + buf3[1] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readFloatLE(2), 0x01) + t.end() +}) + +tape('test readDoubleLE / readDoubleBE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(10) + , bl = new BufferList() + + buf2[1] = 0x55 + buf2[2] = 0x55 + buf3[0] = 0x55 + buf3[1] = 0x55 + buf3[2] = 0x55 + buf3[3] = 0x55 + buf3[4] = 0xd5 + buf3[5] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readDoubleLE(2), 0.3333333333333333) + t.end() +}) + +tape('test toString', function (t) { + var bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.toString('ascii', 0, 10), 'abcdefghij') + t.equal(bl.toString('ascii', 3, 10), 'defghij') + t.equal(bl.toString('ascii', 3, 6), 'def') + t.equal(bl.toString('ascii', 3, 8), 'defgh') + t.equal(bl.toString('ascii', 5, 10), 'fghij') + + t.end() +}) + +tape('test toString encoding', function (t) { + var bl = new BufferList() + , b = Buffer.from('abcdefghij\xff\x00') + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + bl.append(Buffer.from('\xff\x00')) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc), enc) + }) + + t.end() +}) + +tape('uninitialized memory', function (t) { + const secret = crypto.randomBytes(256) + for (let i = 0; i < 1e6; i++) { + const clone = Buffer.from(secret) + const bl = new BufferList() + bl.append(Buffer.from('a')) + bl.consume(-1024) + const buf = bl.slice(1) + if (buf.indexOf(clone) !== -1) { + t.fail(`Match (at ${i})`) + break + } + } + t.end() +}) + +!process.browser && tape('test stream', function (t) { + var random = crypto.randomBytes(65534) + , rndhash = hash(random, 'md5') + , md5sum = crypto.createHash('md5') + , bl = new BufferList(function (err, buf) { + t.ok(Buffer.isBuffer(buf)) + t.ok(err === null) + t.equal(rndhash, hash(bl.slice(), 'md5')) + t.equal(rndhash, hash(buf, 'md5')) + + bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat')) + .on('close', function () { + var s = fs.createReadStream('/tmp/bl_test_rnd_out.dat') + s.on('data', md5sum.update.bind(md5sum)) + s.on('end', function() { + t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!') + t.end() + }) + }) + + }) + + fs.writeFileSync('/tmp/bl_test_rnd.dat', random) + fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl) +}) + +tape('instantiation with Buffer', function (t) { + var buf = crypto.randomBytes(1024) + , buf2 = crypto.randomBytes(1024) + , b = BufferList(buf) + + t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer') + b = BufferList([ buf, buf2 ]) + t.equal(b.slice().toString('hex'), Buffer.concat([ buf, buf2 ]).toString('hex'), 'same buffer') + t.end() +}) + +tape('test String appendage', function (t) { + var bl = new BufferList() + , b = Buffer.from('abcdefghij\xff\x00') + + bl.append('abcd') + bl.append('efg') + bl.append('hi') + bl.append('j') + bl.append('\xff\x00') + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('test Number appendage', function (t) { + var bl = new BufferList() + , b = Buffer.from('1234567890') + + bl.append(1234) + bl.append(567) + bl.append(89) + bl.append(0) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('write nothing, should get empty buffer', function (t) { + t.plan(3) + BufferList(function (err, data) { + t.notOk(err, 'no error') + t.ok(Buffer.isBuffer(data), 'got a buffer') + t.equal(0, data.length, 'got a zero-length buffer') + t.end() + }).end() +}) + +tape('unicode string', function (t) { + t.plan(2) + var inp1 = '\u2600' + , inp2 = '\u2603' + , exp = inp1 + ' and ' + inp2 + , bl = BufferList() + bl.write(inp1) + bl.write(' and ') + bl.write(inp2) + t.equal(exp, bl.toString()) + t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex')) +}) + +tape('should emit finish', function (t) { + var source = BufferList() + , dest = BufferList() + + source.write('hello') + source.pipe(dest) + + dest.on('finish', function () { + t.equal(dest.toString('utf8'), 'hello') + t.end() + }) +}) + +tape('basic copy', function (t) { + var buf = crypto.randomBytes(1024) + , buf2 = Buffer.alloc(1024) + , b = BufferList(buf) + + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + t.end() +}) + +tape('copy after many appends', function (t) { + var buf = crypto.randomBytes(512) + , buf2 = Buffer.alloc(1024) + , b = BufferList(buf) + + b.append(buf) + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + t.end() +}) + +tape('copy at a precise position', function (t) { + var buf = crypto.randomBytes(1004) + , buf2 = Buffer.alloc(1024) + , b = BufferList(buf) + + b.copy(buf2, 20) + t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer') + t.end() +}) + +tape('copy starting from a precise location', function (t) { + var buf = crypto.randomBytes(10) + , buf2 = Buffer.alloc(5) + , b = BufferList(buf) + + b.copy(buf2, 0, 5) + t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer') + t.end() +}) + +tape('copy in an interval', function (t) { + var rnd = crypto.randomBytes(10) + , b = BufferList(rnd) // put the random bytes there + , actual = Buffer.alloc(3) + , expected = Buffer.alloc(3) + + rnd.copy(expected, 0, 5, 8) + b.copy(actual, 0, 5, 8) + + t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer') + t.end() +}) + +tape('copy an interval between two buffers', function (t) { + var buf = crypto.randomBytes(10) + , buf2 = Buffer.alloc(10) + , b = BufferList(buf) + + b.append(buf) + b.copy(buf2, 0, 5, 15) + + t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer') + t.end() +}) + +tape('shallow slice across buffer boundaries', function (t) { + var bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh') + t.end() +}) + +tape('shallow slice within single buffer', function (t) { + t.plan(2) + var bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(5, 10).toString(), 'Secon') + t.equal(bl.shallowSlice(7, 10).toString(), 'con') + t.end() +}) + +tape('shallow slice single buffer', function (t) { + t.plan(3) + var bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(0, 5).toString(), 'First') + t.equal(bl.shallowSlice(5, 11).toString(), 'Second') + t.equal(bl.shallowSlice(11, 16).toString(), 'Third') +}) + +tape('shallow slice with negative or omitted indices', function (t) { + t.plan(4) + var bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice().toString(), 'FirstSecondThird') + t.equal(bl.shallowSlice(5).toString(), 'SecondThird') + t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh') + t.equal(bl.shallowSlice(-8).toString(), 'ondThird') +}) + +tape('shallow slice does not make a copy', function (t) { + t.plan(1) + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + var bl = (new BufferList(buffers)).shallowSlice(5, -3) + + buffers[1].fill('h') + buffers[2].fill('h') + + t.equal(bl.toString(), 'hhhhhhhh') +}) + +tape('duplicate', function (t) { + t.plan(2) + + var bl = new BufferList('abcdefghij\xff\x00') + , dup = bl.duplicate() + + t.equal(bl.prototype, dup.prototype) + t.equal(bl.toString('hex'), dup.toString('hex')) +}) + +tape('destroy no pipe', function (t) { + t.plan(2) + + var bl = new BufferList('alsdkfja;lsdkfja;lsdk') + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +!process.browser && tape('destroy with pipe before read end', function (t) { + t.plan(2) + + var bl = new BufferList() + fs.createReadStream(__dirname + '/test.js') + .pipe(bl) + + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + +}) + +!process.browser && tape('destroy with pipe before read end with race', function (t) { + t.plan(2) + + var bl = new BufferList() + fs.createReadStream(__dirname + '/test.js') + .pipe(bl) + + setTimeout(function () { + bl.destroy() + setTimeout(function () { + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + }, 500) + }, 500) +}) + +!process.browser && tape('destroy with pipe after read end', function (t) { + t.plan(2) + + var bl = new BufferList() + fs.createReadStream(__dirname + '/test.js') + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + } +}) + +!process.browser && tape('destroy with pipe while writing to a destination', function (t) { + t.plan(4) + + var bl = new BufferList() + , ds = new BufferList() + + fs.createReadStream(__dirname + '/test.js') + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.pipe(ds) + + setTimeout(function () { + bl.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + + ds.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + + }, 100) + } +}) + +!process.browser && tape('handle error', function (t) { + t.plan(2) + fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) { + t.ok(err instanceof Error, 'has error') + t.notOk(data, 'no data') + })) +}) diff --git a/node_modules/body-parser/HISTORY.md b/node_modules/body-parser/HISTORY.md new file mode 100644 index 00000000..b8924919 --- /dev/null +++ b/node_modules/body-parser/HISTORY.md @@ -0,0 +1,665 @@ +1.20.2 / 2023-02-21 +=================== + + * Fix strict json error message on Node.js 19+ + * deps: content-type@~1.0.5 + - perf: skip value escaping when unnecessary + * deps: raw-body@2.5.2 + +1.20.1 / 2022-10-06 +=================== + + * deps: qs@6.11.0 + * perf: remove unnecessary object clone + +1.20.0 / 2022-04-02 +=================== + + * Fix error message for json parse whitespace in `strict` + * Fix internal error when inflated body exceeds limit + * Prevent loss of async hooks context + * Prevent hanging when request already read + * deps: depd@2.0.0 + - Replace internal `eval` usage with `Function` constructor + - Use instance methods on `process` to check for listeners + * deps: http-errors@2.0.0 + - deps: depd@2.0.0 + - deps: statuses@2.0.1 + * deps: on-finished@2.4.1 + * deps: qs@6.10.3 + * deps: raw-body@2.5.1 + - deps: http-errors@2.0.0 + +1.19.2 / 2022-02-15 +=================== + + * deps: bytes@3.1.2 + * deps: qs@6.9.7 + * Fix handling of `__proto__` keys + * deps: raw-body@2.4.3 + - deps: bytes@3.1.2 + +1.19.1 / 2021-12-10 +=================== + + * deps: bytes@3.1.1 + * deps: http-errors@1.8.1 + - deps: inherits@2.0.4 + - deps: toidentifier@1.0.1 + - deps: setprototypeof@1.2.0 + * deps: qs@6.9.6 + * deps: raw-body@2.4.2 + - deps: bytes@3.1.1 + - deps: http-errors@1.8.1 + * deps: safe-buffer@5.2.1 + * deps: type-is@~1.6.18 + +1.19.0 / 2019-04-25 +=================== + + * deps: bytes@3.1.0 + - Add petabyte (`pb`) support + * deps: http-errors@1.7.2 + - Set constructor name when possible + - deps: setprototypeof@1.1.1 + - deps: statuses@'>= 1.5.0 < 2' + * deps: iconv-lite@0.4.24 + - Added encoding MIK + * deps: qs@6.7.0 + - Fix parsing array brackets after index + * deps: raw-body@2.4.0 + - deps: bytes@3.1.0 + - deps: http-errors@1.7.2 + - deps: iconv-lite@0.4.24 + * deps: type-is@~1.6.17 + - deps: mime-types@~2.1.24 + - perf: prevent internal `throw` on invalid type + +1.18.3 / 2018-05-14 +=================== + + * Fix stack trace for strict json parse error + * deps: depd@~1.1.2 + - perf: remove argument reassignment + * deps: http-errors@~1.6.3 + - deps: depd@~1.1.2 + - deps: setprototypeof@1.1.0 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.23 + - Fix loading encoding with year appended + - Fix deprecation warnings on Node.js 10+ + * deps: qs@6.5.2 + * deps: raw-body@2.3.3 + - deps: http-errors@1.6.3 + - deps: iconv-lite@0.4.23 + * deps: type-is@~1.6.16 + - deps: mime-types@~2.1.18 + +1.18.2 / 2017-09-22 +=================== + + * deps: debug@2.6.9 + * perf: remove argument reassignment + +1.18.1 / 2017-09-12 +=================== + + * deps: content-type@~1.0.4 + - perf: remove argument reassignment + - perf: skip parameter parsing when no parameters + * deps: iconv-lite@0.4.19 + - Fix ISO-8859-1 regression + - Update Windows-1255 + * deps: qs@6.5.1 + - Fix parsing & compacting very deep objects + * deps: raw-body@2.3.2 + - deps: iconv-lite@0.4.19 + +1.18.0 / 2017-09-08 +=================== + + * Fix JSON strict violation error to match native parse error + * Include the `body` property on verify errors + * Include the `type` property on all generated errors + * Use `http-errors` to set status code on errors + * deps: bytes@3.0.0 + * deps: debug@2.6.8 + * deps: depd@~1.1.1 + - Remove unnecessary `Buffer` loading + * deps: http-errors@~1.6.2 + - deps: depd@1.1.1 + * deps: iconv-lite@0.4.18 + - Add support for React Native + - Add a warning if not loaded as utf-8 + - Fix CESU-8 decoding in Node.js 8 + - Improve speed of ISO-8859-1 encoding + * deps: qs@6.5.0 + * deps: raw-body@2.3.1 + - Use `http-errors` for standard emitted errors + - deps: bytes@3.0.0 + - deps: iconv-lite@0.4.18 + - perf: skip buffer decoding on overage chunk + * perf: prevent internal `throw` when missing charset + +1.17.2 / 2017-05-17 +=================== + + * deps: debug@2.6.7 + - Fix `DEBUG_MAX_ARRAY_LENGTH` + - deps: ms@2.0.0 + * deps: type-is@~1.6.15 + - deps: mime-types@~2.1.15 + +1.17.1 / 2017-03-06 +=================== + + * deps: qs@6.4.0 + - Fix regression parsing keys starting with `[` + +1.17.0 / 2017-03-01 +=================== + + * deps: http-errors@~1.6.1 + - Make `message` property enumerable for `HttpError`s + - deps: setprototypeof@1.0.3 + * deps: qs@6.3.1 + - Fix compacting nested arrays + +1.16.1 / 2017-02-10 +=================== + + * deps: debug@2.6.1 + - Fix deprecation messages in WebStorm and other editors + - Undeprecate `DEBUG_FD` set to `1` or `2` + +1.16.0 / 2017-01-17 +=================== + + * deps: debug@2.6.0 + - Allow colors in workers + - Deprecated `DEBUG_FD` environment variable + - Fix error when running under React Native + - Use same color for same namespace + - deps: ms@0.7.2 + * deps: http-errors@~1.5.1 + - deps: inherits@2.0.3 + - deps: setprototypeof@1.0.2 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.15 + - Added encoding MS-31J + - Added encoding MS-932 + - Added encoding MS-936 + - Added encoding MS-949 + - Added encoding MS-950 + - Fix GBK/GB18030 handling of Euro character + * deps: qs@6.2.1 + - Fix array parsing from skipping empty values + * deps: raw-body@~2.2.0 + - deps: iconv-lite@0.4.15 + * deps: type-is@~1.6.14 + - deps: mime-types@~2.1.13 + +1.15.2 / 2016-06-19 +=================== + + * deps: bytes@2.4.0 + * deps: content-type@~1.0.2 + - perf: enable strict mode + * deps: http-errors@~1.5.0 + - Use `setprototypeof` module to replace `__proto__` setting + - deps: statuses@'>= 1.3.0 < 2' + - perf: enable strict mode + * deps: qs@6.2.0 + * deps: raw-body@~2.1.7 + - deps: bytes@2.4.0 + - perf: remove double-cleanup on happy path + * deps: type-is@~1.6.13 + - deps: mime-types@~2.1.11 + +1.15.1 / 2016-05-05 +=================== + + * deps: bytes@2.3.0 + - Drop partial bytes on all parsed units + - Fix parsing byte string that looks like hex + * deps: raw-body@~2.1.6 + - deps: bytes@2.3.0 + * deps: type-is@~1.6.12 + - deps: mime-types@~2.1.10 + +1.15.0 / 2016-02-10 +=================== + + * deps: http-errors@~1.4.0 + - Add `HttpError` export, for `err instanceof createError.HttpError` + - deps: inherits@2.0.1 + - deps: statuses@'>= 1.2.1 < 2' + * deps: qs@6.1.0 + * deps: type-is@~1.6.11 + - deps: mime-types@~2.1.9 + +1.14.2 / 2015-12-16 +=================== + + * deps: bytes@2.2.0 + * deps: iconv-lite@0.4.13 + * deps: qs@5.2.0 + * deps: raw-body@~2.1.5 + - deps: bytes@2.2.0 + - deps: iconv-lite@0.4.13 + * deps: type-is@~1.6.10 + - deps: mime-types@~2.1.8 + +1.14.1 / 2015-09-27 +=================== + + * Fix issue where invalid charset results in 400 when `verify` used + * deps: iconv-lite@0.4.12 + - Fix CESU-8 decoding in Node.js 4.x + * deps: raw-body@~2.1.4 + - Fix masking critical errors from `iconv-lite` + - deps: iconv-lite@0.4.12 + * deps: type-is@~1.6.9 + - deps: mime-types@~2.1.7 + +1.14.0 / 2015-09-16 +=================== + + * Fix JSON strict parse error to match syntax errors + * Provide static `require` analysis in `urlencoded` parser + * deps: depd@~1.1.0 + - Support web browser loading + * deps: qs@5.1.0 + * deps: raw-body@~2.1.3 + - Fix sync callback when attaching data listener causes sync read + * deps: type-is@~1.6.8 + - Fix type error when given invalid type to match against + - deps: mime-types@~2.1.6 + +1.13.3 / 2015-07-31 +=================== + + * deps: type-is@~1.6.6 + - deps: mime-types@~2.1.4 + +1.13.2 / 2015-07-05 +=================== + + * deps: iconv-lite@0.4.11 + * deps: qs@4.0.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix user-visible incompatibilities from 3.1.0 + - Fix various parsing edge cases + * deps: raw-body@~2.1.2 + - Fix error stack traces to skip `makeError` + - deps: iconv-lite@0.4.11 + * deps: type-is@~1.6.4 + - deps: mime-types@~2.1.2 + - perf: enable strict mode + - perf: remove argument reassignment + +1.13.1 / 2015-06-16 +=================== + + * deps: qs@2.4.2 + - Downgraded from 3.1.0 because of user-visible incompatibilities + +1.13.0 / 2015-06-14 +=================== + + * Add `statusCode` property on `Error`s, in addition to `status` + * Change `type` default to `application/json` for JSON parser + * Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser + * Provide static `require` analysis + * Use the `http-errors` module to generate errors + * deps: bytes@2.1.0 + - Slight optimizations + * deps: iconv-lite@0.4.10 + - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails + - Leading BOM is now removed when decoding + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * deps: qs@3.1.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix various parsing edge cases + - Parsed object now has `null` prototype + * deps: raw-body@~2.1.1 + - Use `unpipe` module for unpiping requests + - deps: iconv-lite@0.4.10 + * deps: type-is@~1.6.3 + - deps: mime-types@~2.1.1 + - perf: reduce try block size + - perf: remove bitwise operations + * perf: enable strict mode + * perf: remove argument reassignment + * perf: remove delete call + +1.12.4 / 2015-05-10 +=================== + + * deps: debug@~2.2.0 + * deps: qs@2.4.2 + - Fix allowing parameters like `constructor` + * deps: on-finished@~2.2.1 + * deps: raw-body@~2.0.1 + - Fix a false-positive when unpiping in Node.js 0.8 + - deps: bytes@2.0.1 + * deps: type-is@~1.6.2 + - deps: mime-types@~2.0.11 + +1.12.3 / 2015-04-15 +=================== + + * Slight efficiency improvement when not debugging + * deps: depd@~1.0.1 + * deps: iconv-lite@0.4.8 + - Add encoding alias UNICODE-1-1-UTF-7 + * deps: raw-body@1.3.4 + - Fix hanging callback if request aborts during read + - deps: iconv-lite@0.4.8 + +1.12.2 / 2015-03-16 +=================== + + * deps: qs@2.4.1 + - Fix error when parameter `hasOwnProperty` is present + +1.12.1 / 2015-03-15 +=================== + + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + * deps: type-is@~1.6.1 + - deps: mime-types@~2.0.10 + +1.12.0 / 2015-02-13 +=================== + + * add `debug` messages + * accept a function for the `type` option + * use `content-type` to parse `Content-Type` headers + * deps: iconv-lite@0.4.7 + - Gracefully support enumerables on `Object.prototype` + * deps: raw-body@1.3.3 + - deps: iconv-lite@0.4.7 + * deps: type-is@~1.6.0 + - fix argument reassignment + - fix false-positives in `hasBody` `Transfer-Encoding` check + - support wildcard for both type and subtype (`*/*`) + - deps: mime-types@~2.0.9 + +1.11.0 / 2015-01-30 +=================== + + * make internal `extended: true` depth limit infinity + * deps: type-is@~1.5.6 + - deps: mime-types@~2.0.8 + +1.10.2 / 2015-01-20 +=================== + + * deps: iconv-lite@0.4.6 + - Fix rare aliases of single-byte encodings + * deps: raw-body@1.3.2 + - deps: iconv-lite@0.4.6 + +1.10.1 / 2015-01-01 +=================== + + * deps: on-finished@~2.2.0 + * deps: type-is@~1.5.5 + - deps: mime-types@~2.0.7 + +1.10.0 / 2014-12-02 +=================== + + * make internal `extended: true` array limit dynamic + +1.9.3 / 2014-11-21 +================== + + * deps: iconv-lite@0.4.5 + - Fix Windows-31J and X-SJIS encoding support + * deps: qs@2.3.3 + - Fix `arrayLimit` behavior + * deps: raw-body@1.3.1 + - deps: iconv-lite@0.4.5 + * deps: type-is@~1.5.3 + - deps: mime-types@~2.0.3 + +1.9.2 / 2014-10-27 +================== + + * deps: qs@2.3.2 + - Fix parsing of mixed objects and values + +1.9.1 / 2014-10-22 +================== + + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + * deps: qs@2.3.0 + - Fix parsing of mixed implicit and explicit arrays + * deps: type-is@~1.5.2 + - deps: mime-types@~2.0.2 + +1.9.0 / 2014-09-24 +================== + + * include the charset in "unsupported charset" error message + * include the encoding in "unsupported content encoding" error message + * deps: depd@~1.0.0 + +1.8.4 / 2014-09-23 +================== + + * fix content encoding to be case-insensitive + +1.8.3 / 2014-09-19 +================== + + * deps: qs@2.2.4 + - Fix issue with object keys starting with numbers truncated + +1.8.2 / 2014-09-15 +================== + + * deps: depd@0.4.5 + +1.8.1 / 2014-09-07 +================== + + * deps: media-typer@0.3.0 + * deps: type-is@~1.5.1 + +1.8.0 / 2014-09-05 +================== + + * make empty-body-handling consistent between chunked requests + - empty `json` produces `{}` + - empty `raw` produces `new Buffer(0)` + - empty `text` produces `''` + - empty `urlencoded` produces `{}` + * deps: qs@2.2.3 + - Fix issue where first empty value in array is discarded + * deps: type-is@~1.5.0 + - fix `hasbody` to be true for `content-length: 0` + +1.7.0 / 2014-09-01 +================== + + * add `parameterLimit` option to `urlencoded` parser + * change `urlencoded` extended array limit to 100 + * respond with 413 when over `parameterLimit` in `urlencoded` + +1.6.7 / 2014-08-29 +================== + + * deps: qs@2.2.2 + - Remove unnecessary cloning + +1.6.6 / 2014-08-27 +================== + + * deps: qs@2.2.0 + - Array parsing fix + - Performance improvements + +1.6.5 / 2014-08-16 +================== + + * deps: on-finished@2.1.0 + +1.6.4 / 2014-08-14 +================== + + * deps: qs@1.2.2 + +1.6.3 / 2014-08-10 +================== + + * deps: qs@1.2.1 + +1.6.2 / 2014-08-07 +================== + + * deps: qs@1.2.0 + - Fix parsing array of objects + +1.6.1 / 2014-08-06 +================== + + * deps: qs@1.1.0 + - Accept urlencoded square brackets + - Accept empty values in implicit array notation + +1.6.0 / 2014-08-05 +================== + + * deps: qs@1.0.2 + - Complete rewrite + - Limits array length to 20 + - Limits object depth to 5 + - Limits parameters to 1,000 + +1.5.2 / 2014-07-27 +================== + + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + +1.5.1 / 2014-07-26 +================== + + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + +1.5.0 / 2014-07-20 +================== + + * deps: depd@0.4.2 + - Add `TRACE_DEPRECATION` environment variable + - Remove non-standard grey color from color output + - Support `--no-deprecation` argument + - Support `--trace-deprecation` argument + * deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + * deps: raw-body@1.3.0 + - deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + - Fix `Cannot switch to old mode now` error on Node.js 0.10+ + * deps: type-is@~1.3.2 + +1.4.3 / 2014-06-19 +================== + + * deps: type-is@1.3.1 + - fix global variable leak + +1.4.2 / 2014-06-19 +================== + + * deps: type-is@1.3.0 + - improve type parsing + +1.4.1 / 2014-06-19 +================== + + * fix urlencoded extended deprecation message + +1.4.0 / 2014-06-19 +================== + + * add `text` parser + * add `raw` parser + * check accepted charset in content-type (accepts utf-8) + * check accepted encoding in content-encoding (accepts identity) + * deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed + * deprecate `urlencoded()` without provided `extended` option + * lazy-load urlencoded parsers + * parsers split into files for reduced mem usage + * support gzip and deflate bodies + - set `inflate: false` to turn off + * deps: raw-body@1.2.2 + - Support all encodings from `iconv-lite` + +1.3.1 / 2014-06-11 +================== + + * deps: type-is@1.2.1 + - Switch dependency from mime to mime-types@1.0.0 + +1.3.0 / 2014-05-31 +================== + + * add `extended` option to urlencoded parser + +1.2.2 / 2014-05-27 +================== + + * deps: raw-body@1.1.6 + - assert stream encoding on node.js 0.8 + - assert stream encoding on node.js < 0.10.6 + - deps: bytes@1 + +1.2.1 / 2014-05-26 +================== + + * invoke `next(err)` after request fully read + - prevents hung responses and socket hang ups + +1.2.0 / 2014-05-11 +================== + + * add `verify` option + * deps: type-is@1.2.0 + - support suffix matching + +1.1.2 / 2014-05-11 +================== + + * improve json parser speed + +1.1.1 / 2014-05-11 +================== + + * fix repeated limit parsing with every request + +1.1.0 / 2014-05-10 +================== + + * add `type` option + * deps: pin for safety and consistency + +1.0.2 / 2014-04-14 +================== + + * use `type-is` module + +1.0.1 / 2014-03-20 +================== + + * lower default limits to 100kb diff --git a/node_modules/body-parser/LICENSE b/node_modules/body-parser/LICENSE new file mode 100644 index 00000000..386b7b69 --- /dev/null +++ b/node_modules/body-parser/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/body-parser/README.md b/node_modules/body-parser/README.md new file mode 100644 index 00000000..38553bf7 --- /dev/null +++ b/node_modules/body-parser/README.md @@ -0,0 +1,465 @@ +# body-parser + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Node.js body parsing middleware. + +Parse incoming request bodies in a middleware before your handlers, available +under the `req.body` property. + +**Note** As `req.body`'s shape is based on user-controlled input, all +properties and values in this object are untrusted and should be validated +before trusting. For example, `req.body.foo.toString()` may fail in multiple +ways, for example the `foo` property may not be there or may not be a string, +and `toString` may not be a function and instead a string or other user input. + +[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/). + +_This does not handle multipart bodies_, due to their complex and typically +large nature. For multipart bodies, you may be interested in the following +modules: + + * [busboy](https://www.npmjs.org/package/busboy#readme) and + [connect-busboy](https://www.npmjs.org/package/connect-busboy#readme) + * [multiparty](https://www.npmjs.org/package/multiparty#readme) and + [connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme) + * [formidable](https://www.npmjs.org/package/formidable#readme) + * [multer](https://www.npmjs.org/package/multer#readme) + +This module provides the following parsers: + + * [JSON body parser](#bodyparserjsonoptions) + * [Raw body parser](#bodyparserrawoptions) + * [Text body parser](#bodyparsertextoptions) + * [URL-encoded form body parser](#bodyparserurlencodedoptions) + +Other body parsers you might be interested in: + +- [body](https://www.npmjs.org/package/body#readme) +- [co-body](https://www.npmjs.org/package/co-body#readme) + +## Installation + +```sh +$ npm install body-parser +``` + +## API + +```js +var bodyParser = require('body-parser') +``` + +The `bodyParser` object exposes various factories to create middlewares. All +middlewares will populate the `req.body` property with the parsed body when +the `Content-Type` request header matches the `type` option, or an empty +object (`{}`) if there was no body to parse, the `Content-Type` was not matched, +or an error occurred. + +The various errors returned by this module are described in the +[errors section](#errors). + +### bodyParser.json([options]) + +Returns middleware that only parses `json` and only looks at requests where +the `Content-Type` header matches the `type` option. This parser accepts any +Unicode encoding of the body and supports automatic inflation of `gzip` and +`deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). + +#### Options + +The `json` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### reviver + +The `reviver` option is passed directly to `JSON.parse` as the second +argument. You can find more information on this argument +[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter). + +##### strict + +When set to `true`, will only accept arrays and objects; when `false` will +accept anything `JSON.parse` accepts. Defaults to `true`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not a +function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `json`), a mime type (like `application/json`), or +a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a truthy +value. Defaults to `application/json`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.raw([options]) + +Returns middleware that parses all bodies as a `Buffer` and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a `Buffer` object +of the body. + +#### Options + +The `raw` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. +If not a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this +can be an extension name (like `bin`), a mime type (like +`application/octet-stream`), or a mime type with a wildcard (like `*/*` or +`application/*`). If a function, the `type` option is called as `fn(req)` +and the request is parsed if it returns a truthy value. Defaults to +`application/octet-stream`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.text([options]) + +Returns middleware that parses all bodies as a string and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` string containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a string of the +body. + +#### Options + +The `text` function takes an optional `options` object that may contain any of +the following keys: + +##### defaultCharset + +Specify the default character set for the text content if the charset is not +specified in the `Content-Type` header of the request. Defaults to `utf-8`. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `txt`), a mime type (like `text/plain`), or a mime +type with a wildcard (like `*/*` or `text/*`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a +truthy value. Defaults to `text/plain`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.urlencoded([options]) + +Returns middleware that only parses `urlencoded` bodies and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser accepts only UTF-8 encoding of the body and supports automatic +inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This object will contain +key-value pairs, where the value can be a string or array (when `extended` is +`false`), or any type (when `extended` is `true`). + +#### Options + +The `urlencoded` function takes an optional `options` object that may contain +any of the following keys: + +##### extended + +The `extended` option allows to choose between parsing the URL-encoded data +with the `querystring` library (when `false`) or the `qs` library (when +`true`). The "extended" syntax allows for rich objects and arrays to be +encoded into the URL-encoded format, allowing for a JSON-like experience +with URL-encoded. For more information, please +[see the qs library](https://www.npmjs.org/package/qs#readme). + +Defaults to `true`, but using the default has been deprecated. Please +research into the difference between `qs` and `querystring` and choose the +appropriate setting. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### parameterLimit + +The `parameterLimit` option controls the maximum number of parameters that +are allowed in the URL-encoded data. If a request contains more parameters +than this value, a 413 will be returned to the client. Defaults to `1000`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `urlencoded`), a mime type (like +`application/x-www-form-urlencoded`), or a mime type with a wildcard (like +`*/x-www-form-urlencoded`). If a function, the `type` option is called as +`fn(req)` and the request is parsed if it returns a truthy value. Defaults +to `application/x-www-form-urlencoded`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +## Errors + +The middlewares provided by this module create errors using the +[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors +will typically have a `status`/`statusCode` property that contains the suggested +HTTP response code, an `expose` property to determine if the `message` property +should be displayed to the client, a `type` property to determine the type of +error without matching against the `message`, and a `body` property containing +the read body, if available. + +The following are the common errors created, though any error can come through +for various reasons. + +### content encoding unsupported + +This error will occur when the request had a `Content-Encoding` header that +contained an encoding but the "inflation" option was set to `false`. The +`status` property is set to `415`, the `type` property is set to +`'encoding.unsupported'`, and the `charset` property will be set to the +encoding that is unsupported. + +### entity parse failed + +This error will occur when the request contained an entity that could not be +parsed by the middleware. The `status` property is set to `400`, the `type` +property is set to `'entity.parse.failed'`, and the `body` property is set to +the entity value that failed parsing. + +### entity verify failed + +This error will occur when the request contained an entity that could not be +failed verification by the defined `verify` option. The `status` property is +set to `403`, the `type` property is set to `'entity.verify.failed'`, and the +`body` property is set to the entity value that failed verification. + +### request aborted + +This error will occur when the request is aborted by the client before reading +the body has finished. The `received` property will be set to the number of +bytes received before the request was aborted and the `expected` property is +set to the number of expected bytes. The `status` property is set to `400` +and `type` property is set to `'request.aborted'`. + +### request entity too large + +This error will occur when the request body's size is larger than the "limit" +option. The `limit` property will be set to the byte limit and the `length` +property will be set to the request body's length. The `status` property is +set to `413` and the `type` property is set to `'entity.too.large'`. + +### request size did not match content length + +This error will occur when the request's length did not match the length from +the `Content-Length` header. This typically occurs when the request is malformed, +typically when the `Content-Length` header was calculated based on characters +instead of bytes. The `status` property is set to `400` and the `type` property +is set to `'request.size.invalid'`. + +### stream encoding should not be set + +This error will occur when something called the `req.setEncoding` method prior +to this middleware. This module operates directly on bytes only and you cannot +call `req.setEncoding` when using this module. The `status` property is set to +`500` and the `type` property is set to `'stream.encoding.set'`. + +### stream is not readable + +This error will occur when the request is no longer readable when this middleware +attempts to read it. This typically means something other than a middleware from +this module read the request body already and the middleware was also configured to +read the same request. The `status` property is set to `500` and the `type` +property is set to `'stream.not.readable'`. + +### too many parameters + +This error will occur when the content of the request exceeds the configured +`parameterLimit` for the `urlencoded` parser. The `status` property is set to +`413` and the `type` property is set to `'parameters.too.many'`. + +### unsupported charset "BOGUS" + +This error will occur when the request had a charset parameter in the +`Content-Type` header, but the `iconv-lite` module does not support it OR the +parser does not support it. The charset is contained in the message as well +as in the `charset` property. The `status` property is set to `415`, the +`type` property is set to `'charset.unsupported'`, and the `charset` property +is set to the charset that is unsupported. + +### unsupported content encoding "bogus" + +This error will occur when the request had a `Content-Encoding` header that +contained an unsupported encoding. The encoding is contained in the message +as well as in the `encoding` property. The `status` property is set to `415`, +the `type` property is set to `'encoding.unsupported'`, and the `encoding` +property is set to the encoding that is unsupported. + +## Examples + +### Express/Connect top-level generic + +This example demonstrates adding a generic JSON and URL-encoded parser as a +top-level middleware, which will parse the bodies of all incoming requests. +This is the simplest setup. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse application/x-www-form-urlencoded +app.use(bodyParser.urlencoded({ extended: false })) + +// parse application/json +app.use(bodyParser.json()) + +app.use(function (req, res) { + res.setHeader('Content-Type', 'text/plain') + res.write('you posted:\n') + res.end(JSON.stringify(req.body, null, 2)) +}) +``` + +### Express route-specific + +This example demonstrates adding body parsers specifically to the routes that +need them. In general, this is the most recommended way to use body-parser with +Express. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// create application/json parser +var jsonParser = bodyParser.json() + +// create application/x-www-form-urlencoded parser +var urlencodedParser = bodyParser.urlencoded({ extended: false }) + +// POST /login gets urlencoded bodies +app.post('/login', urlencodedParser, function (req, res) { + res.send('welcome, ' + req.body.username) +}) + +// POST /api/users gets JSON bodies +app.post('/api/users', jsonParser, function (req, res) { + // create user in req.body +}) +``` + +### Change accepted type for parsers + +All the parsers accept a `type` option which allows you to change the +`Content-Type` that the middleware will parse. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse various different custom JSON types as JSON +app.use(bodyParser.json({ type: 'application/*+json' })) + +// parse some custom thing into a Buffer +app.use(bodyParser.raw({ type: 'application/vnd.custom-type' })) + +// parse an HTML body into a string +app.use(bodyParser.text({ type: 'text/html' })) +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci +[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master +[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master +[node-version-image]: https://badgen.net/npm/node/body-parser +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/body-parser +[npm-url]: https://npmjs.org/package/body-parser +[npm-version-image]: https://badgen.net/npm/v/body-parser diff --git a/node_modules/body-parser/SECURITY.md b/node_modules/body-parser/SECURITY.md new file mode 100644 index 00000000..9694d429 --- /dev/null +++ b/node_modules/body-parser/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policies and Procedures + +## Reporting a Bug + +The Express team and community take all security bugs seriously. Thank you +for improving the security of Express. We appreciate your efforts and +responsible disclosure and will make every effort to acknowledge your +contributions. + +Report security bugs by emailing the current owner(s) of `body-parser`. This +information can be found in the npm registry using the command +`npm owner ls body-parser`. +If unsure or unable to get the information from the above, open an issue +in the [project issue tracker](https://github.com/expressjs/body-parser/issues) +asking for the current contact information. + +To ensure the timely response to your report, please ensure that the entirety +of the report is contained within the email body and not solely behind a web +link or an attachment. + +At least one owner will acknowledge your email within 48 hours, and will send a +more detailed response within 48 hours indicating the next steps in handling +your report. After the initial reply to your report, the owners will +endeavor to keep you informed of the progress towards a fix and full +announcement, and may ask for additional information or guidance. diff --git a/node_modules/body-parser/index.js b/node_modules/body-parser/index.js new file mode 100644 index 00000000..bb24d739 --- /dev/null +++ b/node_modules/body-parser/index.js @@ -0,0 +1,156 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var deprecate = require('depd')('body-parser') + +/** + * Cache of loaded parsers. + * @private + */ + +var parsers = Object.create(null) + +/** + * @typedef Parsers + * @type {function} + * @property {function} json + * @property {function} raw + * @property {function} text + * @property {function} urlencoded + */ + +/** + * Module exports. + * @type {Parsers} + */ + +exports = module.exports = deprecate.function(bodyParser, + 'bodyParser: use individual json/urlencoded middlewares') + +/** + * JSON parser. + * @public + */ + +Object.defineProperty(exports, 'json', { + configurable: true, + enumerable: true, + get: createParserGetter('json') +}) + +/** + * Raw parser. + * @public + */ + +Object.defineProperty(exports, 'raw', { + configurable: true, + enumerable: true, + get: createParserGetter('raw') +}) + +/** + * Text parser. + * @public + */ + +Object.defineProperty(exports, 'text', { + configurable: true, + enumerable: true, + get: createParserGetter('text') +}) + +/** + * URL-encoded parser. + * @public + */ + +Object.defineProperty(exports, 'urlencoded', { + configurable: true, + enumerable: true, + get: createParserGetter('urlencoded') +}) + +/** + * Create a middleware to parse json and urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @deprecated + * @public + */ + +function bodyParser (options) { + // use default type for parsers + var opts = Object.create(options || null, { + type: { + configurable: true, + enumerable: true, + value: undefined, + writable: true + } + }) + + var _urlencoded = exports.urlencoded(opts) + var _json = exports.json(opts) + + return function bodyParser (req, res, next) { + _json(req, res, function (err) { + if (err) return next(err) + _urlencoded(req, res, next) + }) + } +} + +/** + * Create a getter for loading a parser. + * @private + */ + +function createParserGetter (name) { + return function get () { + return loadParser(name) + } +} + +/** + * Load a parser module. + * @private + */ + +function loadParser (parserName) { + var parser = parsers[parserName] + + if (parser !== undefined) { + return parser + } + + // this uses a switch for static require analysis + switch (parserName) { + case 'json': + parser = require('./lib/types/json') + break + case 'raw': + parser = require('./lib/types/raw') + break + case 'text': + parser = require('./lib/types/text') + break + case 'urlencoded': + parser = require('./lib/types/urlencoded') + break + } + + // store to prevent invoking require() + return (parsers[parserName] = parser) +} diff --git a/node_modules/body-parser/lib/read.js b/node_modules/body-parser/lib/read.js new file mode 100644 index 00000000..fce6283f --- /dev/null +++ b/node_modules/body-parser/lib/read.js @@ -0,0 +1,205 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var createError = require('http-errors') +var destroy = require('destroy') +var getBody = require('raw-body') +var iconv = require('iconv-lite') +var onFinished = require('on-finished') +var unpipe = require('unpipe') +var zlib = require('zlib') + +/** + * Module exports. + */ + +module.exports = read + +/** + * Read a request into a buffer and parse. + * + * @param {object} req + * @param {object} res + * @param {function} next + * @param {function} parse + * @param {function} debug + * @param {object} options + * @private + */ + +function read (req, res, next, parse, debug, options) { + var length + var opts = options + var stream + + // flag as parsed + req._body = true + + // read options + var encoding = opts.encoding !== null + ? opts.encoding + : null + var verify = opts.verify + + try { + // get the content stream + stream = contentstream(req, debug, opts.inflate) + length = stream.length + stream.length = undefined + } catch (err) { + return next(err) + } + + // set raw-body options + opts.length = length + opts.encoding = verify + ? null + : encoding + + // assert charset is supported + if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) { + return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + })) + } + + // read body + debug('read body') + getBody(stream, opts, function (error, body) { + if (error) { + var _error + + if (error.type === 'encoding.unsupported') { + // echo back charset + _error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + }) + } else { + // set status code on error + _error = createError(400, error) + } + + // unpipe from stream and destroy + if (stream !== req) { + unpipe(req) + destroy(stream, true) + } + + // read off entire request + dump(req, function onfinished () { + next(createError(400, _error)) + }) + return + } + + // verify + if (verify) { + try { + debug('verify body') + verify(req, res, body, encoding) + } catch (err) { + next(createError(403, err, { + body: body, + type: err.type || 'entity.verify.failed' + })) + return + } + } + + // parse + var str = body + try { + debug('parse body') + str = typeof body !== 'string' && encoding !== null + ? iconv.decode(body, encoding) + : body + req.body = parse(str) + } catch (err) { + next(createError(400, err, { + body: str, + type: err.type || 'entity.parse.failed' + })) + return + } + + next() + }) +} + +/** + * Get the content stream of the request. + * + * @param {object} req + * @param {function} debug + * @param {boolean} [inflate=true] + * @return {object} + * @api private + */ + +function contentstream (req, debug, inflate) { + var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase() + var length = req.headers['content-length'] + var stream + + debug('content-encoding "%s"', encoding) + + if (inflate === false && encoding !== 'identity') { + throw createError(415, 'content encoding unsupported', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + switch (encoding) { + case 'deflate': + stream = zlib.createInflate() + debug('inflate body') + req.pipe(stream) + break + case 'gzip': + stream = zlib.createGunzip() + debug('gunzip body') + req.pipe(stream) + break + case 'identity': + stream = req + stream.length = length + break + default: + throw createError(415, 'unsupported content encoding "' + encoding + '"', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + return stream +} + +/** + * Dump the contents of a request. + * + * @param {object} req + * @param {function} callback + * @api private + */ + +function dump (req, callback) { + if (onFinished.isFinished(req)) { + callback(null) + } else { + onFinished(req, callback) + req.resume() + } +} diff --git a/node_modules/body-parser/lib/types/json.js b/node_modules/body-parser/lib/types/json.js new file mode 100644 index 00000000..59f3f7e2 --- /dev/null +++ b/node_modules/body-parser/lib/types/json.js @@ -0,0 +1,247 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:json') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = json + +/** + * RegExp to match the first non-space in a string. + * + * Allowed whitespace is defined in RFC 7159: + * + * ws = *( + * %x20 / ; Space + * %x09 / ; Horizontal tab + * %x0A / ; Line feed or New line + * %x0D ) ; Carriage return + */ + +var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex + +var JSON_SYNTAX_CHAR = '#' +var JSON_SYNTAX_REGEXP = /#+/g + +/** + * Create a middleware to parse JSON bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function json (options) { + var opts = options || {} + + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var inflate = opts.inflate !== false + var reviver = opts.reviver + var strict = opts.strict !== false + var type = opts.type || 'application/json' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + if (body.length === 0) { + // special-case empty json body, as it's a common client-side mistake + // TODO: maybe make this configurable or part of "strict" option + return {} + } + + if (strict) { + var first = firstchar(body) + + if (first !== '{' && first !== '[') { + debug('strict violation') + throw createStrictSyntaxError(body, first) + } + } + + try { + debug('parse json') + return JSON.parse(body, reviver) + } catch (e) { + throw normalizeJsonSyntaxError(e, { + message: e.message, + stack: e.stack + }) + } + } + + return function jsonParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset per RFC 7159 sec 8.1 + var charset = getCharset(req) || 'utf-8' + if (charset.slice(0, 4) !== 'utf-') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Create strict violation syntax error matching native error. + * + * @param {string} str + * @param {string} char + * @return {Error} + * @private + */ + +function createStrictSyntaxError (str, char) { + var index = str.indexOf(char) + var partial = '' + + if (index !== -1) { + partial = str.substring(0, index) + JSON_SYNTAX_CHAR + + for (var i = index + 1; i < str.length; i++) { + partial += JSON_SYNTAX_CHAR + } + } + + try { + JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation') + } catch (e) { + return normalizeJsonSyntaxError(e, { + message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) { + return str.substring(index, index + placeholder.length) + }), + stack: e.stack + }) + } +} + +/** + * Get the first non-whitespace character in a string. + * + * @param {string} str + * @return {function} + * @private + */ + +function firstchar (str) { + var match = FIRST_CHAR_REGEXP.exec(str) + + return match + ? match[1] + : undefined +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Normalize a SyntaxError for JSON.parse. + * + * @param {SyntaxError} error + * @param {object} obj + * @return {SyntaxError} + */ + +function normalizeJsonSyntaxError (error, obj) { + var keys = Object.getOwnPropertyNames(error) + + for (var i = 0; i < keys.length; i++) { + var key = keys[i] + if (key !== 'stack' && key !== 'message') { + delete error[key] + } + } + + // replace stack before message for Node.js 0.10 and below + error.stack = obj.stack.replace(error.message, obj.message) + error.message = obj.message + + return error +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/raw.js b/node_modules/body-parser/lib/types/raw.js new file mode 100644 index 00000000..f5d1b674 --- /dev/null +++ b/node_modules/body-parser/lib/types/raw.js @@ -0,0 +1,101 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var debug = require('debug')('body-parser:raw') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = raw + +/** + * Create a middleware to parse raw bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function raw (options) { + var opts = options || {} + + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/octet-stream' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function rawParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // read + read(req, res, next, parse, debug, { + encoding: null, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/text.js b/node_modules/body-parser/lib/types/text.js new file mode 100644 index 00000000..083a0090 --- /dev/null +++ b/node_modules/body-parser/lib/types/text.js @@ -0,0 +1,121 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var debug = require('debug')('body-parser:text') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = text + +/** + * Create a middleware to parse text bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function text (options) { + var opts = options || {} + + var defaultCharset = opts.defaultCharset || 'utf-8' + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'text/plain' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function textParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // get charset + var charset = getCharset(req) || defaultCharset + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/lib/types/urlencoded.js b/node_modules/body-parser/lib/types/urlencoded.js new file mode 100644 index 00000000..b2ca8f16 --- /dev/null +++ b/node_modules/body-parser/lib/types/urlencoded.js @@ -0,0 +1,284 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:urlencoded') +var deprecate = require('depd')('body-parser') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = urlencoded + +/** + * Cache of parser modules. + */ + +var parsers = Object.create(null) + +/** + * Create a middleware to parse urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function urlencoded (options) { + var opts = options || {} + + // notice because option default will flip in next major + if (opts.extended === undefined) { + deprecate('undefined extended: provide extended option') + } + + var extended = opts.extended !== false + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/x-www-form-urlencoded' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate query parser + var queryparse = extended + ? extendedparser(opts) + : simpleparser(opts) + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + return body.length + ? queryparse(body) + : {} + } + + return function urlencodedParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset + var charset = getCharset(req) || 'utf-8' + if (charset !== 'utf-8') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + debug: debug, + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the extended query parser. + * + * @param {object} options + */ + +function extendedparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('qs') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + var arrayLimit = Math.max(100, paramCount) + + debug('parse extended urlencoding') + return parse(body, { + allowPrototypes: true, + arrayLimit: arrayLimit, + depth: Infinity, + parameterLimit: parameterLimit + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Count the number of parameters, stopping once limit reached + * + * @param {string} body + * @param {number} limit + * @api private + */ + +function parameterCount (body, limit) { + var count = 0 + var index = 0 + + while ((index = body.indexOf('&', index)) !== -1) { + count++ + index++ + + if (count === limit) { + return undefined + } + } + + return count +} + +/** + * Get parser for module name dynamically. + * + * @param {string} name + * @return {function} + * @api private + */ + +function parser (name) { + var mod = parsers[name] + + if (mod !== undefined) { + return mod.parse + } + + // this uses a switch for static require analysis + switch (name) { + case 'qs': + mod = require('qs') + break + case 'querystring': + mod = require('querystring') + break + } + + // store to prevent invoking require() + parsers[name] = mod + + return mod.parse +} + +/** + * Get the simple query parser. + * + * @param {object} options + */ + +function simpleparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('querystring') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + debug('parse urlencoding') + return parse(body, undefined, undefined, { maxKeys: parameterLimit }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/node_modules/body-parser/node_modules/debug/.coveralls.yml b/node_modules/body-parser/node_modules/debug/.coveralls.yml new file mode 100644 index 00000000..20a70685 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/.coveralls.yml @@ -0,0 +1 @@ +repo_token: SIAeZjKYlHK74rbcFvNHMUzjRiMpflxve diff --git a/node_modules/body-parser/node_modules/debug/.eslintrc b/node_modules/body-parser/node_modules/debug/.eslintrc new file mode 100644 index 00000000..8a37ae2c --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/.eslintrc @@ -0,0 +1,11 @@ +{ + "env": { + "browser": true, + "node": true + }, + "rules": { + "no-console": 0, + "no-empty": [1, { "allowEmptyCatch": true }] + }, + "extends": "eslint:recommended" +} diff --git a/node_modules/body-parser/node_modules/debug/.npmignore b/node_modules/body-parser/node_modules/debug/.npmignore new file mode 100644 index 00000000..5f60eecc --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/.npmignore @@ -0,0 +1,9 @@ +support +test +examples +example +*.sock +dist +yarn.lock +coverage +bower.json diff --git a/node_modules/body-parser/node_modules/debug/.travis.yml b/node_modules/body-parser/node_modules/debug/.travis.yml new file mode 100644 index 00000000..6c6090c3 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/.travis.yml @@ -0,0 +1,14 @@ + +language: node_js +node_js: + - "6" + - "5" + - "4" + +install: + - make node_modules + +script: + - make lint + - make test + - make coveralls diff --git a/node_modules/body-parser/node_modules/debug/CHANGELOG.md b/node_modules/body-parser/node_modules/debug/CHANGELOG.md new file mode 100644 index 00000000..eadaa189 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/CHANGELOG.md @@ -0,0 +1,362 @@ + +2.6.9 / 2017-09-22 +================== + + * remove ReDoS regexp in %o formatter (#504) + +2.6.8 / 2017-05-18 +================== + + * Fix: Check for undefined on browser globals (#462, @marbemac) + +2.6.7 / 2017-05-16 +================== + + * Fix: Update ms to 2.0.0 to fix regular expression denial of service vulnerability (#458, @hubdotcom) + * Fix: Inline extend function in node implementation (#452, @dougwilson) + * Docs: Fix typo (#455, @msasad) + +2.6.5 / 2017-04-27 +================== + + * Fix: null reference check on window.documentElement.style.WebkitAppearance (#447, @thebigredgeek) + * Misc: clean up browser reference checks (#447, @thebigredgeek) + * Misc: add npm-debug.log to .gitignore (@thebigredgeek) + + +2.6.4 / 2017-04-20 +================== + + * Fix: bug that would occure if process.env.DEBUG is a non-string value. (#444, @LucianBuzzo) + * Chore: ignore bower.json in npm installations. (#437, @joaovieira) + * Misc: update "ms" to v0.7.3 (@tootallnate) + +2.6.3 / 2017-03-13 +================== + + * Fix: Electron reference to `process.env.DEBUG` (#431, @paulcbetts) + * Docs: Changelog fix (@thebigredgeek) + +2.6.2 / 2017-03-10 +================== + + * Fix: DEBUG_MAX_ARRAY_LENGTH (#420, @slavaGanzin) + * Docs: Add backers and sponsors from Open Collective (#422, @piamancini) + * Docs: Add Slackin invite badge (@tootallnate) + +2.6.1 / 2017-02-10 +================== + + * Fix: Module's `export default` syntax fix for IE8 `Expected identifier` error + * Fix: Whitelist DEBUG_FD for values 1 and 2 only (#415, @pi0) + * Fix: IE8 "Expected identifier" error (#414, @vgoma) + * Fix: Namespaces would not disable once enabled (#409, @musikov) + +2.6.0 / 2016-12-28 +================== + + * Fix: added better null pointer checks for browser useColors (@thebigredgeek) + * Improvement: removed explicit `window.debug` export (#404, @tootallnate) + * Improvement: deprecated `DEBUG_FD` environment variable (#405, @tootallnate) + +2.5.2 / 2016-12-25 +================== + + * Fix: reference error on window within webworkers (#393, @KlausTrainer) + * Docs: fixed README typo (#391, @lurch) + * Docs: added notice about v3 api discussion (@thebigredgeek) + +2.5.1 / 2016-12-20 +================== + + * Fix: babel-core compatibility + +2.5.0 / 2016-12-20 +================== + + * Fix: wrong reference in bower file (@thebigredgeek) + * Fix: webworker compatibility (@thebigredgeek) + * Fix: output formatting issue (#388, @kribblo) + * Fix: babel-loader compatibility (#383, @escwald) + * Misc: removed built asset from repo and publications (@thebigredgeek) + * Misc: moved source files to /src (#378, @yamikuronue) + * Test: added karma integration and replaced babel with browserify for browser tests (#378, @yamikuronue) + * Test: coveralls integration (#378, @yamikuronue) + * Docs: simplified language in the opening paragraph (#373, @yamikuronue) + +2.4.5 / 2016-12-17 +================== + + * Fix: `navigator` undefined in Rhino (#376, @jochenberger) + * Fix: custom log function (#379, @hsiliev) + * Improvement: bit of cleanup + linting fixes (@thebigredgeek) + * Improvement: rm non-maintainted `dist/` dir (#375, @freewil) + * Docs: simplified language in the opening paragraph. (#373, @yamikuronue) + +2.4.4 / 2016-12-14 +================== + + * Fix: work around debug being loaded in preload scripts for electron (#368, @paulcbetts) + +2.4.3 / 2016-12-14 +================== + + * Fix: navigation.userAgent error for react native (#364, @escwald) + +2.4.2 / 2016-12-14 +================== + + * Fix: browser colors (#367, @tootallnate) + * Misc: travis ci integration (@thebigredgeek) + * Misc: added linting and testing boilerplate with sanity check (@thebigredgeek) + +2.4.1 / 2016-12-13 +================== + + * Fix: typo that broke the package (#356) + +2.4.0 / 2016-12-13 +================== + + * Fix: bower.json references unbuilt src entry point (#342, @justmatt) + * Fix: revert "handle regex special characters" (@tootallnate) + * Feature: configurable util.inspect()`options for NodeJS (#327, @tootallnate) + * Feature: %O`(big O) pretty-prints objects (#322, @tootallnate) + * Improvement: allow colors in workers (#335, @botverse) + * Improvement: use same color for same namespace. (#338, @lchenay) + +2.3.3 / 2016-11-09 +================== + + * Fix: Catch `JSON.stringify()` errors (#195, Jovan Alleyne) + * Fix: Returning `localStorage` saved values (#331, Levi Thomason) + * Improvement: Don't create an empty object when no `process` (Nathan Rajlich) + +2.3.2 / 2016-11-09 +================== + + * Fix: be super-safe in index.js as well (@TooTallNate) + * Fix: should check whether process exists (Tom Newby) + +2.3.1 / 2016-11-09 +================== + + * Fix: Added electron compatibility (#324, @paulcbetts) + * Improvement: Added performance optimizations (@tootallnate) + * Readme: Corrected PowerShell environment variable example (#252, @gimre) + * Misc: Removed yarn lock file from source control (#321, @fengmk2) + +2.3.0 / 2016-11-07 +================== + + * Fix: Consistent placement of ms diff at end of output (#215, @gorangajic) + * Fix: Escaping of regex special characters in namespace strings (#250, @zacronos) + * Fix: Fixed bug causing crash on react-native (#282, @vkarpov15) + * Feature: Enabled ES6+ compatible import via default export (#212 @bucaran) + * Feature: Added %O formatter to reflect Chrome's console.log capability (#279, @oncletom) + * Package: Update "ms" to 0.7.2 (#315, @DevSide) + * Package: removed superfluous version property from bower.json (#207 @kkirsche) + * Readme: fix USE_COLORS to DEBUG_COLORS + * Readme: Doc fixes for format string sugar (#269, @mlucool) + * Readme: Updated docs for DEBUG_FD and DEBUG_COLORS environment variables (#232, @mattlyons0) + * Readme: doc fixes for PowerShell (#271 #243, @exoticknight @unreadable) + * Readme: better docs for browser support (#224, @matthewmueller) + * Tooling: Added yarn integration for development (#317, @thebigredgeek) + * Misc: Renamed History.md to CHANGELOG.md (@thebigredgeek) + * Misc: Added license file (#226 #274, @CantemoInternal @sdaitzman) + * Misc: Updated contributors (@thebigredgeek) + +2.2.0 / 2015-05-09 +================== + + * package: update "ms" to v0.7.1 (#202, @dougwilson) + * README: add logging to file example (#193, @DanielOchoa) + * README: fixed a typo (#191, @amir-s) + * browser: expose `storage` (#190, @stephenmathieson) + * Makefile: add a `distclean` target (#189, @stephenmathieson) + +2.1.3 / 2015-03-13 +================== + + * Updated stdout/stderr example (#186) + * Updated example/stdout.js to match debug current behaviour + * Renamed example/stderr.js to stdout.js + * Update Readme.md (#184) + * replace high intensity foreground color for bold (#182, #183) + +2.1.2 / 2015-03-01 +================== + + * dist: recompile + * update "ms" to v0.7.0 + * package: update "browserify" to v9.0.3 + * component: fix "ms.js" repo location + * changed bower package name + * updated documentation about using debug in a browser + * fix: security error on safari (#167, #168, @yields) + +2.1.1 / 2014-12-29 +================== + + * browser: use `typeof` to check for `console` existence + * browser: check for `console.log` truthiness (fix IE 8/9) + * browser: add support for Chrome apps + * Readme: added Windows usage remarks + * Add `bower.json` to properly support bower install + +2.1.0 / 2014-10-15 +================== + + * node: implement `DEBUG_FD` env variable support + * package: update "browserify" to v6.1.0 + * package: add "license" field to package.json (#135, @panuhorsmalahti) + +2.0.0 / 2014-09-01 +================== + + * package: update "browserify" to v5.11.0 + * node: use stderr rather than stdout for logging (#29, @stephenmathieson) + +1.0.4 / 2014-07-15 +================== + + * dist: recompile + * example: remove `console.info()` log usage + * example: add "Content-Type" UTF-8 header to browser example + * browser: place %c marker after the space character + * browser: reset the "content" color via `color: inherit` + * browser: add colors support for Firefox >= v31 + * debug: prefer an instance `log()` function over the global one (#119) + * Readme: update documentation about styled console logs for FF v31 (#116, @wryk) + +1.0.3 / 2014-07-09 +================== + + * Add support for multiple wildcards in namespaces (#122, @seegno) + * browser: fix lint + +1.0.2 / 2014-06-10 +================== + + * browser: update color palette (#113, @gscottolson) + * common: make console logging function configurable (#108, @timoxley) + * node: fix %o colors on old node <= 0.8.x + * Makefile: find node path using shell/which (#109, @timoxley) + +1.0.1 / 2014-06-06 +================== + + * browser: use `removeItem()` to clear localStorage + * browser, node: don't set DEBUG if namespaces is undefined (#107, @leedm777) + * package: add "contributors" section + * node: fix comment typo + * README: list authors + +1.0.0 / 2014-06-04 +================== + + * make ms diff be global, not be scope + * debug: ignore empty strings in enable() + * node: make DEBUG_COLORS able to disable coloring + * *: export the `colors` array + * npmignore: don't publish the `dist` dir + * Makefile: refactor to use browserify + * package: add "browserify" as a dev dependency + * Readme: add Web Inspector Colors section + * node: reset terminal color for the debug content + * node: map "%o" to `util.inspect()` + * browser: map "%j" to `JSON.stringify()` + * debug: add custom "formatters" + * debug: use "ms" module for humanizing the diff + * Readme: add "bash" syntax highlighting + * browser: add Firebug color support + * browser: add colors for WebKit browsers + * node: apply log to `console` + * rewrite: abstract common logic for Node & browsers + * add .jshintrc file + +0.8.1 / 2014-04-14 +================== + + * package: re-add the "component" section + +0.8.0 / 2014-03-30 +================== + + * add `enable()` method for nodejs. Closes #27 + * change from stderr to stdout + * remove unnecessary index.js file + +0.7.4 / 2013-11-13 +================== + + * remove "browserify" key from package.json (fixes something in browserify) + +0.7.3 / 2013-10-30 +================== + + * fix: catch localStorage security error when cookies are blocked (Chrome) + * add debug(err) support. Closes #46 + * add .browser prop to package.json. Closes #42 + +0.7.2 / 2013-02-06 +================== + + * fix package.json + * fix: Mobile Safari (private mode) is broken with debug + * fix: Use unicode to send escape character to shell instead of octal to work with strict mode javascript + +0.7.1 / 2013-02-05 +================== + + * add repository URL to package.json + * add DEBUG_COLORED to force colored output + * add browserify support + * fix component. Closes #24 + +0.7.0 / 2012-05-04 +================== + + * Added .component to package.json + * Added debug.component.js build + +0.6.0 / 2012-03-16 +================== + + * Added support for "-" prefix in DEBUG [Vinay Pulim] + * Added `.enabled` flag to the node version [TooTallNate] + +0.5.0 / 2012-02-02 +================== + + * Added: humanize diffs. Closes #8 + * Added `debug.disable()` to the CS variant + * Removed padding. Closes #10 + * Fixed: persist client-side variant again. Closes #9 + +0.4.0 / 2012-02-01 +================== + + * Added browser variant support for older browsers [TooTallNate] + * Added `debug.enable('project:*')` to browser variant [TooTallNate] + * Added padding to diff (moved it to the right) + +0.3.0 / 2012-01-26 +================== + + * Added millisecond diff when isatty, otherwise UTC string + +0.2.0 / 2012-01-22 +================== + + * Added wildcard support + +0.1.0 / 2011-12-02 +================== + + * Added: remove colors unless stderr isatty [TooTallNate] + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/node_modules/body-parser/node_modules/debug/LICENSE b/node_modules/body-parser/node_modules/debug/LICENSE new file mode 100644 index 00000000..658c933d --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/LICENSE @@ -0,0 +1,19 @@ +(The MIT License) + +Copyright (c) 2014 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/body-parser/node_modules/debug/Makefile b/node_modules/body-parser/node_modules/debug/Makefile new file mode 100644 index 00000000..584da8bf --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/Makefile @@ -0,0 +1,50 @@ +# get Makefile directory name: http://stackoverflow.com/a/5982798/376773 +THIS_MAKEFILE_PATH:=$(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)) +THIS_DIR:=$(shell cd $(dir $(THIS_MAKEFILE_PATH));pwd) + +# BIN directory +BIN := $(THIS_DIR)/node_modules/.bin + +# Path +PATH := node_modules/.bin:$(PATH) +SHELL := /bin/bash + +# applications +NODE ?= $(shell which node) +YARN ?= $(shell which yarn) +PKG ?= $(if $(YARN),$(YARN),$(NODE) $(shell which npm)) +BROWSERIFY ?= $(NODE) $(BIN)/browserify + +.FORCE: + +install: node_modules + +node_modules: package.json + @NODE_ENV= $(PKG) install + @touch node_modules + +lint: .FORCE + eslint browser.js debug.js index.js node.js + +test-node: .FORCE + istanbul cover node_modules/mocha/bin/_mocha -- test/**.js + +test-browser: .FORCE + mkdir -p dist + + @$(BROWSERIFY) \ + --standalone debug \ + . > dist/debug.js + + karma start --single-run + rimraf dist + +test: .FORCE + concurrently \ + "make test-node" \ + "make test-browser" + +coveralls: + cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js + +.PHONY: all install clean distclean diff --git a/node_modules/body-parser/node_modules/debug/README.md b/node_modules/body-parser/node_modules/debug/README.md new file mode 100644 index 00000000..f67be6b3 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/README.md @@ -0,0 +1,312 @@ +# debug +[![Build Status](https://travis-ci.org/visionmedia/debug.svg?branch=master)](https://travis-ci.org/visionmedia/debug) [![Coverage Status](https://coveralls.io/repos/github/visionmedia/debug/badge.svg?branch=master)](https://coveralls.io/github/visionmedia/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny node.js debugging utility modelled after node core's debugging technique. + +**Discussion around the V3 API is under way [here](https://github.com/visionmedia/debug/issues/370)** + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example _app.js_: + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %s', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example _worker.js_: + +```js +var debug = require('debug')('worker'); + +setInterval(function(){ + debug('doing some work'); +}, 1000); +``` + + The __DEBUG__ environment variable is then used to enable these based on space or comma-delimited names. Here are some examples: + + ![debug http and worker](http://f.cl.ly/items/18471z1H402O24072r1J/Screenshot.png) + + ![debug worker](http://f.cl.ly/items/1X413v1a3M0d3C2c1E0i/Screenshot.png) + +#### Windows note + + On Windows the environment variable is set using the `set` command. + + ```cmd + set DEBUG=*,-not_this + ``` + + Note that PowerShell uses different syntax to set environment variables. + + ```cmd + $env:DEBUG = "*,-not_this" + ``` + +Then, run the program to be debugged as usual. + +## Millisecond diff + + When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + ![](http://f.cl.ly/items/2i3h1d3t121M2Z1A3Q0N/Screenshot.png) + + When stdout is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below: + + ![](http://f.cl.ly/items/112H3i0e0o0P0a2Q2r11/Screenshot.png) + +## Conventions + + If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". + +## Wildcards + + The `*` character may be used as a wildcard. Suppose for example your library has debuggers named "connect:bodyParser", "connect:compress", "connect:session", instead of listing all three with `DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + + You can also exclude specific debuggers by prefixing them with a "-" character. For example, `DEBUG=*,-connect:*` would include all debuggers except those starting with "connect:". + +## Environment Variables + + When running through Node.js, you can set a few environment variables that will + change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + + __Note:__ The environment variables beginning with `DEBUG_` end up being + converted into an Options object that gets used with `%o`/`%O` formatters. + See the Node.js documentation for + [`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) + for the complete list. + +## Formatters + + + Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + +### Custom formatters + + You can add custom formatters by extending the `debug.formatters` object. For example, if you wanted to add support for rendering a Buffer as hex with `%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + +## Browser support + You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), + or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), + if you don't want to build it yourself. + + Debug's enable state is currently persisted by `localStorage`. + Consider the situation shown below where you have `worker:a` and `worker:b`, + and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +#### Web Inspector Colors + + Colors are also enabled on "Web Inspectors" that understand the `%c` formatting + option. These are WebKit web inspectors, Firefox ([since version + 31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) + and the Firebug plugin for Firefox (any version). + + Colored output looks something like: + + ![](https://cloud.githubusercontent.com/assets/71256/3139768/b98c5fd8-e8ef-11e3-862a-f7253b6f47c6.png) + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example _stdout.js_: + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2016 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/body-parser/node_modules/debug/component.json b/node_modules/body-parser/node_modules/debug/component.json new file mode 100644 index 00000000..9de26410 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/component.json @@ -0,0 +1,19 @@ +{ + "name": "debug", + "repo": "visionmedia/debug", + "description": "small debugging utility", + "version": "2.6.9", + "keywords": [ + "debug", + "log", + "debugger" + ], + "main": "src/browser.js", + "scripts": [ + "src/browser.js", + "src/debug.js" + ], + "dependencies": { + "rauchg/ms.js": "0.7.1" + } +} diff --git a/node_modules/body-parser/node_modules/debug/karma.conf.js b/node_modules/body-parser/node_modules/debug/karma.conf.js new file mode 100644 index 00000000..103a82d1 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/karma.conf.js @@ -0,0 +1,70 @@ +// Karma configuration +// Generated on Fri Dec 16 2016 13:09:51 GMT+0000 (UTC) + +module.exports = function(config) { + config.set({ + + // base path that will be used to resolve all patterns (eg. files, exclude) + basePath: '', + + + // frameworks to use + // available frameworks: https://npmjs.org/browse/keyword/karma-adapter + frameworks: ['mocha', 'chai', 'sinon'], + + + // list of files / patterns to load in the browser + files: [ + 'dist/debug.js', + 'test/*spec.js' + ], + + + // list of files to exclude + exclude: [ + 'src/node.js' + ], + + + // preprocess matching files before serving them to the browser + // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor + preprocessors: { + }, + + // test results reporter to use + // possible values: 'dots', 'progress' + // available reporters: https://npmjs.org/browse/keyword/karma-reporter + reporters: ['progress'], + + + // web server port + port: 9876, + + + // enable / disable colors in the output (reporters and logs) + colors: true, + + + // level of logging + // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG + logLevel: config.LOG_INFO, + + + // enable / disable watching file and executing tests whenever any file changes + autoWatch: true, + + + // start these browsers + // available browser launchers: https://npmjs.org/browse/keyword/karma-launcher + browsers: ['PhantomJS'], + + + // Continuous Integration mode + // if true, Karma captures browsers, runs the tests and exits + singleRun: false, + + // Concurrency level + // how many browser should be started simultaneous + concurrency: Infinity + }) +} diff --git a/node_modules/body-parser/node_modules/debug/node.js b/node_modules/body-parser/node_modules/debug/node.js new file mode 100644 index 00000000..7fc36fe6 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/node.js @@ -0,0 +1 @@ +module.exports = require('./src/node'); diff --git a/node_modules/body-parser/node_modules/debug/package.json b/node_modules/body-parser/node_modules/debug/package.json new file mode 100644 index 00000000..dc787ba7 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/package.json @@ -0,0 +1,49 @@ +{ + "name": "debug", + "version": "2.6.9", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/debug.git" + }, + "description": "small debugging utility", + "keywords": [ + "debug", + "log", + "debugger" + ], + "author": "TJ Holowaychuk ", + "contributors": [ + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + }, + "devDependencies": { + "browserify": "9.0.3", + "chai": "^3.5.0", + "concurrently": "^3.1.0", + "coveralls": "^2.11.15", + "eslint": "^3.12.1", + "istanbul": "^0.4.5", + "karma": "^1.3.0", + "karma-chai": "^0.1.0", + "karma-mocha": "^1.3.0", + "karma-phantomjs-launcher": "^1.0.2", + "karma-sinon": "^1.0.5", + "mocha": "^3.2.0", + "mocha-lcov-reporter": "^1.2.0", + "rimraf": "^2.5.4", + "sinon": "^1.17.6", + "sinon-chai": "^2.8.0" + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "component": { + "scripts": { + "debug/index.js": "browser.js", + "debug/debug.js": "debug.js" + } + } +} diff --git a/node_modules/body-parser/node_modules/debug/src/browser.js b/node_modules/body-parser/node_modules/debug/src/browser.js new file mode 100644 index 00000000..71069249 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/src/browser.js @@ -0,0 +1,185 @@ +/** + * This is the web browser implementation of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = require('./debug'); +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = 'undefined' != typeof chrome + && 'undefined' != typeof chrome.storage + ? chrome.storage.local + : localstorage(); + +/** + * Colors. + */ + +exports.colors = [ + 'lightseagreen', + 'forestgreen', + 'goldenrod', + 'dodgerblue', + 'darkorchid', + 'crimson' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && window.process.type === 'renderer') { + return true; + } + + // is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +exports.formatters.j = function(v) { + try { + return JSON.stringify(v); + } catch (err) { + return '[UnexpectedJSONParseError]: ' + err.message; + } +}; + + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + var useColors = this.useColors; + + args[0] = (useColors ? '%c' : '') + + this.namespace + + (useColors ? ' %c' : ' ') + + args[0] + + (useColors ? '%c ' : ' ') + + '+' + exports.humanize(this.diff); + + if (!useColors) return; + + var c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit') + + // the final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + var index = 0; + var lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, function(match) { + if ('%%' === match) return; + index++; + if ('%c' === match) { + // we only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.log()` when available. + * No-op when `console.log` is not a "function". + * + * @api public + */ + +function log() { + // this hackery is required for IE8/9, where + // the `console.log` function doesn't have 'apply' + return 'object' === typeof console + && console.log + && Function.prototype.apply.call(console.log, console, arguments); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + +function save(namespaces) { + try { + if (null == namespaces) { + exports.storage.removeItem('debug'); + } else { + exports.storage.debug = namespaces; + } + } catch(e) {} +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + var r; + try { + r = exports.storage.debug; + } catch(e) {} + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Enable namespaces listed in `localStorage.debug` initially. + */ + +exports.enable(load()); + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + return window.localStorage; + } catch (e) {} +} diff --git a/node_modules/body-parser/node_modules/debug/src/debug.js b/node_modules/body-parser/node_modules/debug/src/debug.js new file mode 100644 index 00000000..6a5e3fc9 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/src/debug.js @@ -0,0 +1,202 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = createDebug.debug = createDebug['default'] = createDebug; +exports.coerce = coerce; +exports.disable = disable; +exports.enable = enable; +exports.enabled = enabled; +exports.humanize = require('ms'); + +/** + * The currently active debug mode names, and names to skip. + */ + +exports.names = []; +exports.skips = []; + +/** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + +exports.formatters = {}; + +/** + * Previous log timestamp. + */ + +var prevTime; + +/** + * Select a color. + * @param {String} namespace + * @return {Number} + * @api private + */ + +function selectColor(namespace) { + var hash = 0, i; + + for (i in namespace) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return exports.colors[Math.abs(hash) % exports.colors.length]; +} + +/** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + +function createDebug(namespace) { + + function debug() { + // disabled? + if (!debug.enabled) return; + + var self = debug; + + // set `diff` timestamp + var curr = +new Date(); + var ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + // turn the `arguments` into a proper Array + var args = new Array(arguments.length); + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i]; + } + + args[0] = exports.coerce(args[0]); + + if ('string' !== typeof args[0]) { + // anything else let's inspect with %O + args.unshift('%O'); + } + + // apply any `formatters` transformations + var index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, function(match, format) { + // if we encounter an escaped % then don't increase the array index + if (match === '%%') return match; + index++; + var formatter = exports.formatters[format]; + if ('function' === typeof formatter) { + var val = args[index]; + match = formatter.call(self, val); + + // now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // apply env-specific formatting (colors, etc.) + exports.formatArgs.call(self, args); + + var logFn = debug.log || exports.log || console.log.bind(console); + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.enabled = exports.enabled(namespace); + debug.useColors = exports.useColors(); + debug.color = selectColor(namespace); + + // env-specific initialization logic for debug instances + if ('function' === typeof exports.init) { + exports.init(debug); + } + + return debug; +} + +/** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + +function enable(namespaces) { + exports.save(namespaces); + + exports.names = []; + exports.skips = []; + + var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + var len = split.length; + + for (var i = 0; i < len; i++) { + if (!split[i]) continue; // ignore empty strings + namespaces = split[i].replace(/\*/g, '.*?'); + if (namespaces[0] === '-') { + exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); + } else { + exports.names.push(new RegExp('^' + namespaces + '$')); + } + } +} + +/** + * Disable debug output. + * + * @api public + */ + +function disable() { + exports.enable(''); +} + +/** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + +function enabled(name) { + var i, len; + for (i = 0, len = exports.skips.length; i < len; i++) { + if (exports.skips[i].test(name)) { + return false; + } + } + for (i = 0, len = exports.names.length; i < len; i++) { + if (exports.names[i].test(name)) { + return true; + } + } + return false; +} + +/** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + +function coerce(val) { + if (val instanceof Error) return val.stack || val.message; + return val; +} diff --git a/node_modules/body-parser/node_modules/debug/src/index.js b/node_modules/body-parser/node_modules/debug/src/index.js new file mode 100644 index 00000000..e12cf4d5 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer process, which is node, but we should + * treat as a browser. + */ + +if (typeof process !== 'undefined' && process.type === 'renderer') { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/body-parser/node_modules/debug/src/inspector-log.js b/node_modules/body-parser/node_modules/debug/src/inspector-log.js new file mode 100644 index 00000000..60ea6c04 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/src/inspector-log.js @@ -0,0 +1,15 @@ +module.exports = inspectorLog; + +// black hole +const nullStream = new (require('stream').Writable)(); +nullStream._write = () => {}; + +/** + * Outputs a `console.log()` to the Node.js Inspector console *only*. + */ +function inspectorLog() { + const stdout = console._stdout; + console._stdout = nullStream; + console.log.apply(console, arguments); + console._stdout = stdout; +} diff --git a/node_modules/body-parser/node_modules/debug/src/node.js b/node_modules/body-parser/node_modules/debug/src/node.js new file mode 100644 index 00000000..b15109c9 --- /dev/null +++ b/node_modules/body-parser/node_modules/debug/src/node.js @@ -0,0 +1,248 @@ +/** + * Module dependencies. + */ + +var tty = require('tty'); +var util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = require('./debug'); +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(function (key) { + return /^debug_/i.test(key); +}).reduce(function (obj, key) { + // camel-case + var prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, function (_, k) { return k.toUpperCase() }); + + // coerce string value into JS value + var val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) val = true; + else if (/^(no|off|false|disabled)$/i.test(val)) val = false; + else if (val === 'null') val = null; + else val = Number(val); + + obj[prop] = val; + return obj; +}, {}); + +/** + * The file descriptor to write the `debug()` calls to. + * Set the `DEBUG_FD` env variable to override with another value. i.e.: + * + * $ DEBUG_FD=3 node script.js 3>debug.log + */ + +var fd = parseInt(process.env.DEBUG_FD, 10) || 2; + +if (1 !== fd && 2 !== fd) { + util.deprecate(function(){}, 'except for stderr(2) and stdout(1), any other usage of DEBUG_FD is deprecated. Override debug.log if you want to use a different log function (https://git.io/debug_fd)')() +} + +var stream = 1 === fd ? process.stdout : + 2 === fd ? process.stderr : + createWritableStdioStream(fd); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts + ? Boolean(exports.inspectOpts.colors) + : tty.isatty(fd); +} + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +exports.formatters.o = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n').map(function(str) { + return str.trim() + }).join(' '); +}; + +/** + * Map %o to `util.inspect()`, allowing multiple lines if needed. + */ + +exports.formatters.O = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + var name = this.namespace; + var useColors = this.useColors; + + if (useColors) { + var c = this.color; + var prefix = ' \u001b[3' + c + ';1m' + name + ' ' + '\u001b[0m'; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push('\u001b[3' + c + 'm+' + exports.humanize(this.diff) + '\u001b[0m'); + } else { + args[0] = new Date().toUTCString() + + ' ' + name + ' ' + args[0]; + } +} + +/** + * Invokes `util.format()` with the specified arguments and writes to `stream`. + */ + +function log() { + return stream.write(util.format.apply(util, arguments) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + +function save(namespaces) { + if (null == namespaces) { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } else { + process.env.DEBUG = namespaces; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Copied from `node/src/node.js`. + * + * XXX: It's lame that node doesn't expose this API out-of-the-box. It also + * relies on the undocumented `tty_wrap.guessHandleType()` which is also lame. + */ + +function createWritableStdioStream (fd) { + var stream; + var tty_wrap = process.binding('tty_wrap'); + + // Note stream._type is used for test-module-load-list.js + + switch (tty_wrap.guessHandleType(fd)) { + case 'TTY': + stream = new tty.WriteStream(fd); + stream._type = 'tty'; + + // Hack to have stream not keep the event loop alive. + // See https://github.com/joyent/node/issues/1726 + if (stream._handle && stream._handle.unref) { + stream._handle.unref(); + } + break; + + case 'FILE': + var fs = require('fs'); + stream = new fs.SyncWriteStream(fd, { autoClose: false }); + stream._type = 'fs'; + break; + + case 'PIPE': + case 'TCP': + var net = require('net'); + stream = new net.Socket({ + fd: fd, + readable: false, + writable: true + }); + + // FIXME Should probably have an option in net.Socket to create a + // stream from an existing fd which is writable only. But for now + // we'll just add this hack and set the `readable` member to false. + // Test: ./node test/fixtures/echo.js < /etc/passwd + stream.readable = false; + stream.read = null; + stream._type = 'pipe'; + + // FIXME Hack to have stream not keep the event loop alive. + // See https://github.com/joyent/node/issues/1726 + if (stream._handle && stream._handle.unref) { + stream._handle.unref(); + } + break; + + default: + // Probably an error on in uv_guess_handle() + throw new Error('Implement me. Unknown stream file type!'); + } + + // For supporting legacy API we put the FD here. + stream.fd = fd; + + stream._isStdio = true; + + return stream; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init (debug) { + debug.inspectOpts = {}; + + var keys = Object.keys(exports.inspectOpts); + for (var i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +/** + * Enable namespaces listed in `process.env.DEBUG` initially. + */ + +exports.enable(load()); diff --git a/node_modules/body-parser/node_modules/ms/index.js b/node_modules/body-parser/node_modules/ms/index.js new file mode 100644 index 00000000..6a522b16 --- /dev/null +++ b/node_modules/body-parser/node_modules/ms/index.js @@ -0,0 +1,152 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isNaN(val) === false) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + if (ms >= d) { + return Math.round(ms / d) + 'd'; + } + if (ms >= h) { + return Math.round(ms / h) + 'h'; + } + if (ms >= m) { + return Math.round(ms / m) + 'm'; + } + if (ms >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + return plural(ms, d, 'day') || + plural(ms, h, 'hour') || + plural(ms, m, 'minute') || + plural(ms, s, 'second') || + ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, n, name) { + if (ms < n) { + return; + } + if (ms < n * 1.5) { + return Math.floor(ms / n) + ' ' + name; + } + return Math.ceil(ms / n) + ' ' + name + 's'; +} diff --git a/node_modules/body-parser/node_modules/ms/license.md b/node_modules/body-parser/node_modules/ms/license.md new file mode 100644 index 00000000..69b61253 --- /dev/null +++ b/node_modules/body-parser/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/body-parser/node_modules/ms/package.json b/node_modules/body-parser/node_modules/ms/package.json new file mode 100644 index 00000000..6a31c81f --- /dev/null +++ b/node_modules/body-parser/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.0.0", + "description": "Tiny milisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "3.19.0", + "expect.js": "0.3.1", + "husky": "0.13.3", + "lint-staged": "3.4.1", + "mocha": "3.4.1" + } +} diff --git a/node_modules/body-parser/node_modules/ms/readme.md b/node_modules/body-parser/node_modules/ms/readme.md new file mode 100644 index 00000000..84a9974c --- /dev/null +++ b/node_modules/body-parser/node_modules/ms/readme.md @@ -0,0 +1,51 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Slack Channel](http://zeit-slackin.now.sh/badge.svg)](https://zeit.chat/) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +``` + +### Convert from milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(ms('10 hours')) // "10h" +``` + +### Time format written-out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [node](https://nodejs.org) and in the browser. +- If a number is supplied to `ms`, a string with a unit is returned. +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`). +- If you pass a string with a number and a valid unit, the number of equivalent ms is returned. + +## Caught a bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, node will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/body-parser/package.json b/node_modules/body-parser/package.json new file mode 100644 index 00000000..46373043 --- /dev/null +++ b/node_modules/body-parser/package.json @@ -0,0 +1,56 @@ +{ + "name": "body-parser", + "description": "Node.js body parsing middleware", + "version": "1.20.2", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "expressjs/body-parser", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "devDependencies": { + "eslint": "8.34.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.27.5", + "eslint-plugin-markdown": "3.0.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "6.1.1", + "eslint-plugin-standard": "4.1.0", + "methods": "1.1.2", + "mocha": "10.2.0", + "nyc": "15.1.0", + "safe-buffer": "5.2.1", + "supertest": "6.3.3" + }, + "files": [ + "lib/", + "LICENSE", + "HISTORY.md", + "SECURITY.md", + "index.js" + ], + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 00000000..de322667 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 00000000..6b4e0e16 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 00000000..0478be81 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 00000000..a18faa8f --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/braces/CHANGELOG.md b/node_modules/braces/CHANGELOG.md new file mode 100644 index 00000000..36f798b0 --- /dev/null +++ b/node_modules/braces/CHANGELOG.md @@ -0,0 +1,184 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [3.0.0] - 2018-04-08 + +v3.0 is a complete refactor, resulting in a faster, smaller codebase, with fewer deps, and a more accurate parser and compiler. + +**Breaking Changes** + +- The undocumented `.makeRe` method was removed + +**Non-breaking changes** + +- Caching was removed + +## [2.3.2] - 2018-04-08 + +- start refactoring +- cover sets +- better range handling + +## [2.3.1] - 2018-02-17 + +- Remove unnecessary escape in Regex. (#14) + +## [2.3.0] - 2017-10-19 + +- minor code reorganization +- optimize regex +- expose `maxLength` option + +## [2.2.1] - 2017-05-30 + +- don't condense when braces contain extglobs + +## [2.2.0] - 2017-05-28 + +- ensure word boundaries are preserved +- fixes edge case where extglob characters precede a brace pattern + +## [2.1.1] - 2017-04-27 + +- use snapdragon-node +- handle edge case +- optimizations, lint + +## [2.0.4] - 2017-04-11 + +- pass opts to compiler +- minor optimization in create method +- re-write parser handlers to remove negation regex + +## [2.0.3] - 2016-12-10 + +- use split-string +- clear queue at the end +- adds sequences example +- add unit tests + +## [2.0.2] - 2016-10-21 + +- fix comma handling in nested extglobs + +## [2.0.1] - 2016-10-20 + +- add comments +- more tests, ensure quotes are stripped + +## [2.0.0] - 2016-10-19 + +- don't expand braces inside character classes +- add quantifier pattern + +## [1.8.5] - 2016-05-21 + +- Refactor (#10) + +## [1.8.4] - 2016-04-20 + +- fixes https://github.com/jonschlinkert/micromatch/issues/66 + +## [1.8.0] - 2015-03-18 + +- adds exponent examples, tests +- fixes the first example in https://github.com/jonschlinkert/micromatch/issues/38 + +## [1.6.0] - 2015-01-30 + +- optimizations, `bash` mode: +- improve path escaping + +## [1.5.0] - 2015-01-28 + +- Merge pull request #5 from eush77/lib-files + +## [1.4.0] - 2015-01-24 + +- add extglob tests +- externalize exponent function +- better whitespace handling + +## [1.3.0] - 2015-01-24 + +- make regex patterns explicity + +## [1.1.0] - 2015-01-11 + +- don't create a match group with `makeRe` + +## [1.0.0] - 2014-12-23 + +- Merge commit '97b05f5544f8348736a8efaecf5c32bbe3e2ad6e' +- support empty brace syntax +- better bash coverage +- better support for regex strings + +## [0.1.4] - 2014-11-14 + +- improve recognition of bad args, recognize mismatched argument types +- support escaping +- remove pathname-expansion +- support whitespace in patterns + +## [0.1.0] + +- first commit + +[2.3.2]: https://github.com/micromatch/braces/compare/2.3.1...2.3.2 +[2.3.1]: https://github.com/micromatch/braces/compare/2.3.0...2.3.1 +[2.3.0]: https://github.com/micromatch/braces/compare/2.2.1...2.3.0 +[2.2.1]: https://github.com/micromatch/braces/compare/2.2.0...2.2.1 +[2.2.0]: https://github.com/micromatch/braces/compare/2.1.1...2.2.0 +[2.1.1]: https://github.com/micromatch/braces/compare/2.1.0...2.1.1 +[2.1.0]: https://github.com/micromatch/braces/compare/2.0.4...2.1.0 +[2.0.4]: https://github.com/micromatch/braces/compare/2.0.3...2.0.4 +[2.0.3]: https://github.com/micromatch/braces/compare/2.0.2...2.0.3 +[2.0.2]: https://github.com/micromatch/braces/compare/2.0.1...2.0.2 +[2.0.1]: https://github.com/micromatch/braces/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/micromatch/braces/compare/1.8.5...2.0.0 +[1.8.5]: https://github.com/micromatch/braces/compare/1.8.4...1.8.5 +[1.8.4]: https://github.com/micromatch/braces/compare/1.8.0...1.8.4 +[1.8.0]: https://github.com/micromatch/braces/compare/1.6.0...1.8.0 +[1.6.0]: https://github.com/micromatch/braces/compare/1.5.0...1.6.0 +[1.5.0]: https://github.com/micromatch/braces/compare/1.4.0...1.5.0 +[1.4.0]: https://github.com/micromatch/braces/compare/1.3.0...1.4.0 +[1.3.0]: https://github.com/micromatch/braces/compare/1.2.0...1.3.0 +[1.2.0]: https://github.com/micromatch/braces/compare/1.1.0...1.2.0 +[1.1.0]: https://github.com/micromatch/braces/compare/1.0.0...1.1.0 +[1.0.0]: https://github.com/micromatch/braces/compare/0.1.4...1.0.0 +[0.1.4]: https://github.com/micromatch/braces/compare/0.1.0...0.1.4 + +[Unreleased]: https://github.com/micromatch/braces/compare/0.1.0...HEAD +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog \ No newline at end of file diff --git a/node_modules/braces/LICENSE b/node_modules/braces/LICENSE new file mode 100644 index 00000000..d32ab442 --- /dev/null +++ b/node_modules/braces/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/braces/README.md b/node_modules/braces/README.md new file mode 100644 index 00000000..cba2f600 --- /dev/null +++ b/node_modules/braces/README.md @@ -0,0 +1,593 @@ +# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) + +> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save braces +``` + +## v3.0.0 Released!! + +See the [changelog](CHANGELOG.md) for details. + +## Why use braces? + +Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. + +* **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) +* **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. +* **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. +* **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). +* **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). +* [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` +* [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` +* [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` +* [Supports escaping](#escaping) - To prevent evaluation of special characters. + +## Usage + +The main export is a function that takes one or more brace `patterns` and `options`. + +```js +const braces = require('braces'); +// braces(patterns[, options]); + +console.log(braces(['{01..05}', '{a..e}'])); +//=> ['(0[1-5])', '([a-e])'] + +console.log(braces(['{01..05}', '{a..e}'], { expand: true })); +//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] +``` + +### Brace Expansion vs. Compilation + +By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. + +**Compiled** + +```js +console.log(braces('a/{x,y,z}/b')); +//=> ['a/(x|y|z)/b'] +console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); +//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] +``` + +**Expanded** + +Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): + +```js +console.log(braces('a/{x,y,z}/b', { expand: true })); +//=> ['a/x/b', 'a/y/b', 'a/z/b'] + +console.log(braces.expand('{01..10}')); +//=> ['01','02','03','04','05','06','07','08','09','10'] +``` + +### Lists + +Expand lists (like Bash "sets"): + +```js +console.log(braces('a/{foo,bar,baz}/*.js')); +//=> ['a/(foo|bar|baz)/*.js'] + +console.log(braces.expand('a/{foo,bar,baz}/*.js')); +//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] +``` + +### Sequences + +Expand ranges of characters (like Bash "sequences"): + +```js +console.log(braces.expand('{1..3}')); // ['1', '2', '3'] +console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] +console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] +console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] + +// supports zero-padded ranges +console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] +console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] +``` + +See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. + +### Steppped ranges + +Steps, or increments, may be used with ranges: + +```js +console.log(braces.expand('{2..10..2}')); +//=> ['2', '4', '6', '8', '10'] + +console.log(braces('{2..10..2}')); +//=> ['(2|4|6|8|10)'] +``` + +When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. + +### Nesting + +Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. + +**"Expanded" braces** + +```js +console.log(braces.expand('a{b,c,/{x,y}}/e')); +//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] + +console.log(braces.expand('a/{x,{1..5},y}/c')); +//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] +``` + +**"Optimized" braces** + +```js +console.log(braces('a{b,c,/{x,y}}/e')); +//=> ['a(b|c|/(x|y))/e'] + +console.log(braces('a/{x,{1..5},y}/c')); +//=> ['a/(x|([1-5])|y)/c'] +``` + +### Escaping + +**Escaping braces** + +A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: + +```js +console.log(braces.expand('a\\{d,c,b}e')); +//=> ['a{d,c,b}e'] + +console.log(braces.expand('a{d,c,b\\}e')); +//=> ['a{d,c,b}e'] +``` + +**Escaping commas** + +Commas inside braces may also be escaped: + +```js +console.log(braces.expand('a{b\\,c}d')); +//=> ['a{b,c}d'] + +console.log(braces.expand('a{d\\,c,b}e')); +//=> ['ad,ce', 'abe'] +``` + +**Single items** + +Following bash conventions, a brace pattern is also not expanded when it contains a single character: + +```js +console.log(braces.expand('a{b}c')); +//=> ['a{b}c'] +``` + +## Options + +### options.maxLength + +**Type**: `Number` + +**Default**: `65,536` + +**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. + +```js +console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error +``` + +### options.expand + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). + +```js +console.log(braces('a/{b,c}/d', { expand: true })); +//=> [ 'a/b/d', 'a/c/d' ] +``` + +### options.nodupes + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Remove duplicates from the returned array. + +### options.rangeLimit + +**Type**: `Number` + +**Default**: `1000` + +**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. + +You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. + +**Examples** + +```js +// pattern exceeds the "rangeLimit", so it's optimized automatically +console.log(braces.expand('{1..1000}')); +//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] + +// pattern does not exceed "rangeLimit", so it's NOT optimized +console.log(braces.expand('{1..100}')); +//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] +``` + +### options.transform + +**Type**: `Function` + +**Default**: `undefined` + +**Description**: Customize range expansion. + +**Example: Transforming non-numeric values** + +```js +const alpha = braces.expand('x/{a..e}/y', { + transform(value, index) { + // When non-numeric values are passed, "value" is a character code. + return 'foo/' + String.fromCharCode(value) + '-' + index; + } +}); +console.log(alpha); +//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] +``` + +**Example: Transforming numeric values** + +```js +const numeric = braces.expand('{1..5}', { + transform(value) { + // when numeric values are passed, "value" is a number + return 'foo/' + value * 2; + } +}); +console.log(numeric); +//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] +``` + +### options.quantifiers + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. + +Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) + +The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. + +**Examples** + +```js +const braces = require('braces'); +console.log(braces('a/b{1,3}/{x,y,z}')); +//=> [ 'a/b(1|3)/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', {quantifiers: true})); +//=> [ 'a/b{1,3}/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', {quantifiers: true, expand: true})); +//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] +``` + +### options.unescape + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Strip backslashes that were used for escaping from the result. + +## What is "brace expansion"? + +Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). + +In addition to "expansion", braces are also used for matching. In other words: + +* [brace expansion](#brace-expansion) is for generating new lists +* [brace matching](#brace-matching) is for filtering existing lists + +
+More about brace expansion (click to expand) + +There are two main types of brace expansion: + +1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` +2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". + +Here are some example brace patterns to illustrate how they work: + +**Sets** + +``` +{a,b,c} => a b c +{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 +``` + +**Sequences** + +``` +{1..9} => 1 2 3 4 5 6 7 8 9 +{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 +{1..20..3} => 1 4 7 10 13 16 19 +{a..j} => a b c d e f g h i j +{j..a} => j i h g f e d c b a +{a..z..3} => a d g j m p s v y +``` + +**Combination** + +Sets and sequences can be mixed together or used along with any other strings. + +``` +{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 +foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar +``` + +The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. + +## Brace matching + +In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. + +For example, the pattern `foo/{1..3}/bar` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +``` + +But not: + +``` +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +## Brace matching pitfalls + +Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. + +### tldr + +**"brace bombs"** + +* brace expansion can eat up a huge amount of processing resources +* as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially +* users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) + +For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. + +### The solution + +Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. + +### Geometric complexity + +At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. + +For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: + +``` +{1,2}{3,4} => (2X2) => 13 14 23 24 +{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 +``` + +But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: + +``` +{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 + 249 257 258 259 267 268 269 347 348 349 357 + 358 359 367 368 369 +``` + +Now, imagine how this complexity grows given that each element is a n-tuple: + +``` +{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) +{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) +``` + +Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. + +**More information** + +Interested in learning more about brace expansion? + +* [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) +* [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) +* [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) + +
+ +## Performance + +Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. + +### Better algorithms + +Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. + +Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. + +**The proof is in the numbers** + +Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. + +| **Pattern** | **braces** | **[minimatch][]** | +| --- | --- | --- | +| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs)| N/A (freezes) | +| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | +| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | +| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | +| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | +| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | +| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | +| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | +| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | +| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | +| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | +| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | +| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | +| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | +| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | +| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | +| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | + +### Faster algorithms + +When you need expansion, braces is still much faster. + +_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ + +| **Pattern** | **braces** | **[minimatch][]** | +| --- | --- | --- | +| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | +| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | +| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | +| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | +| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | +| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | +| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | +| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | + +If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). + +## Benchmarks + +### Running benchmarks + +Install dev dependencies: + +```bash +npm i -d && npm benchmark +``` + +### Latest results + +Braces is more accurate, without sacrificing performance. + +```bash +# range (expanded) + braces x 29,040 ops/sec ±3.69% (91 runs sampled)) + minimatch x 4,735 ops/sec ±1.28% (90 runs sampled) + +# range (optimized for regex) + braces x 382,878 ops/sec ±0.56% (94 runs sampled) + minimatch x 1,040 ops/sec ±0.44% (93 runs sampled) + +# nested ranges (expanded) + braces x 19,744 ops/sec ±2.27% (92 runs sampled)) + minimatch x 4,579 ops/sec ±0.50% (93 runs sampled) + +# nested ranges (optimized for regex) + braces x 246,019 ops/sec ±2.02% (93 runs sampled) + minimatch x 1,028 ops/sec ±0.39% (94 runs sampled) + +# set (expanded) + braces x 138,641 ops/sec ±0.53% (95 runs sampled) + minimatch x 219,582 ops/sec ±0.98% (94 runs sampled) + +# set (optimized for regex) + braces x 388,408 ops/sec ±0.41% (95 runs sampled) + minimatch x 44,724 ops/sec ±0.91% (89 runs sampled) + +# nested sets (expanded) + braces x 84,966 ops/sec ±0.48% (94 runs sampled) + minimatch x 140,720 ops/sec ±0.37% (95 runs sampled) + +# nested sets (optimized for regex) + braces x 263,340 ops/sec ±2.06% (92 runs sampled) + minimatch x 28,714 ops/sec ±0.40% (90 runs sampled) +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 197 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [doowb](https://github.com/doowb) | +| 1 | [es128](https://github.com/es128) | +| 1 | [eush77](https://github.com/eush77) | +| 1 | [hemanth](https://github.com/hemanth) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/braces/index.js b/node_modules/braces/index.js new file mode 100644 index 00000000..0eee0f56 --- /dev/null +++ b/node_modules/braces/index.js @@ -0,0 +1,170 @@ +'use strict'; + +const stringify = require('./lib/stringify'); +const compile = require('./lib/compile'); +const expand = require('./lib/expand'); +const parse = require('./lib/parse'); + +/** + * Expand the given pattern or create a regex-compatible string. + * + * ```js + * const braces = require('braces'); + * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] + * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {String} + * @api public + */ + +const braces = (input, options = {}) => { + let output = []; + + if (Array.isArray(input)) { + for (let pattern of input) { + let result = braces.create(pattern, options); + if (Array.isArray(result)) { + output.push(...result); + } else { + output.push(result); + } + } + } else { + output = [].concat(braces.create(input, options)); + } + + if (options && options.expand === true && options.nodupes === true) { + output = [...new Set(output)]; + } + return output; +}; + +/** + * Parse the given `str` with the given `options`. + * + * ```js + * // braces.parse(pattern, [, options]); + * const ast = braces.parse('a/{b,c}/d'); + * console.log(ast); + * ``` + * @param {String} pattern Brace pattern to parse + * @param {Object} options + * @return {Object} Returns an AST + * @api public + */ + +braces.parse = (input, options = {}) => parse(input, options); + +/** + * Creates a braces string from an AST, or an AST node. + * + * ```js + * const braces = require('braces'); + * let ast = braces.parse('foo/{a,b}/bar'); + * console.log(stringify(ast.nodes[2])); //=> '{a,b}' + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.stringify = (input, options = {}) => { + if (typeof input === 'string') { + return stringify(braces.parse(input, options), options); + } + return stringify(input, options); +}; + +/** + * Compiles a brace pattern into a regex-compatible, optimized string. + * This method is called by the main [braces](#braces) function by default. + * + * ```js + * const braces = require('braces'); + * console.log(braces.compile('a/{b,c}/d')); + * //=> ['a/(b|c)/d'] + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.compile = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + return compile(input, options); +}; + +/** + * Expands a brace pattern into an array. This method is called by the + * main [braces](#braces) function when `options.expand` is true. Before + * using this method it's recommended that you read the [performance notes](#performance)) + * and advantages of using [.compile](#compile) instead. + * + * ```js + * const braces = require('braces'); + * console.log(braces.expand('a/{b,c}/d')); + * //=> ['a/b/d', 'a/c/d']; + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.expand = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + + let result = expand(input, options); + + // filter out empty strings if specified + if (options.noempty === true) { + result = result.filter(Boolean); + } + + // filter out duplicates if specified + if (options.nodupes === true) { + result = [...new Set(result)]; + } + + return result; +}; + +/** + * Processes a brace pattern and returns either an expanded array + * (if `options.expand` is true), a highly optimized regex-compatible string. + * This method is called by the main [braces](#braces) function. + * + * ```js + * const braces = require('braces'); + * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) + * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.create = (input, options = {}) => { + if (input === '' || input.length < 3) { + return [input]; + } + + return options.expand !== true + ? braces.compile(input, options) + : braces.expand(input, options); +}; + +/** + * Expose "braces" + */ + +module.exports = braces; diff --git a/node_modules/braces/lib/compile.js b/node_modules/braces/lib/compile.js new file mode 100644 index 00000000..3e984a4b --- /dev/null +++ b/node_modules/braces/lib/compile.js @@ -0,0 +1,57 @@ +'use strict'; + +const fill = require('fill-range'); +const utils = require('./utils'); + +const compile = (ast, options = {}) => { + let walk = (node, parent = {}) => { + let invalidBlock = utils.isInvalidBrace(parent); + let invalidNode = node.invalid === true && options.escapeInvalid === true; + let invalid = invalidBlock === true || invalidNode === true; + let prefix = options.escapeInvalid === true ? '\\' : ''; + let output = ''; + + if (node.isOpen === true) { + return prefix + node.value; + } + if (node.isClose === true) { + return prefix + node.value; + } + + if (node.type === 'open') { + return invalid ? (prefix + node.value) : '('; + } + + if (node.type === 'close') { + return invalid ? (prefix + node.value) : ')'; + } + + if (node.type === 'comma') { + return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|'); + } + + if (node.value) { + return node.value; + } + + if (node.nodes && node.ranges > 0) { + let args = utils.reduce(node.nodes); + let range = fill(...args, { ...options, wrap: false, toRegex: true }); + + if (range.length !== 0) { + return args.length > 1 && range.length > 1 ? `(${range})` : range; + } + } + + if (node.nodes) { + for (let child of node.nodes) { + output += walk(child, node); + } + } + return output; + }; + + return walk(ast); +}; + +module.exports = compile; diff --git a/node_modules/braces/lib/constants.js b/node_modules/braces/lib/constants.js new file mode 100644 index 00000000..a9379436 --- /dev/null +++ b/node_modules/braces/lib/constants.js @@ -0,0 +1,57 @@ +'use strict'; + +module.exports = { + MAX_LENGTH: 1024 * 64, + + // Digits + CHAR_0: '0', /* 0 */ + CHAR_9: '9', /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 'A', /* A */ + CHAR_LOWERCASE_A: 'a', /* a */ + CHAR_UPPERCASE_Z: 'Z', /* Z */ + CHAR_LOWERCASE_Z: 'z', /* z */ + + CHAR_LEFT_PARENTHESES: '(', /* ( */ + CHAR_RIGHT_PARENTHESES: ')', /* ) */ + + CHAR_ASTERISK: '*', /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: '&', /* & */ + CHAR_AT: '@', /* @ */ + CHAR_BACKSLASH: '\\', /* \ */ + CHAR_BACKTICK: '`', /* ` */ + CHAR_CARRIAGE_RETURN: '\r', /* \r */ + CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ + CHAR_COLON: ':', /* : */ + CHAR_COMMA: ',', /* , */ + CHAR_DOLLAR: '$', /* . */ + CHAR_DOT: '.', /* . */ + CHAR_DOUBLE_QUOTE: '"', /* " */ + CHAR_EQUAL: '=', /* = */ + CHAR_EXCLAMATION_MARK: '!', /* ! */ + CHAR_FORM_FEED: '\f', /* \f */ + CHAR_FORWARD_SLASH: '/', /* / */ + CHAR_HASH: '#', /* # */ + CHAR_HYPHEN_MINUS: '-', /* - */ + CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ + CHAR_LEFT_CURLY_BRACE: '{', /* { */ + CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ + CHAR_LINE_FEED: '\n', /* \n */ + CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ + CHAR_PERCENT: '%', /* % */ + CHAR_PLUS: '+', /* + */ + CHAR_QUESTION_MARK: '?', /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ + CHAR_RIGHT_CURLY_BRACE: '}', /* } */ + CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ + CHAR_SEMICOLON: ';', /* ; */ + CHAR_SINGLE_QUOTE: '\'', /* ' */ + CHAR_SPACE: ' ', /* */ + CHAR_TAB: '\t', /* \t */ + CHAR_UNDERSCORE: '_', /* _ */ + CHAR_VERTICAL_LINE: '|', /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ +}; diff --git a/node_modules/braces/lib/expand.js b/node_modules/braces/lib/expand.js new file mode 100644 index 00000000..376c748a --- /dev/null +++ b/node_modules/braces/lib/expand.js @@ -0,0 +1,113 @@ +'use strict'; + +const fill = require('fill-range'); +const stringify = require('./stringify'); +const utils = require('./utils'); + +const append = (queue = '', stash = '', enclose = false) => { + let result = []; + + queue = [].concat(queue); + stash = [].concat(stash); + + if (!stash.length) return queue; + if (!queue.length) { + return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; + } + + for (let item of queue) { + if (Array.isArray(item)) { + for (let value of item) { + result.push(append(value, stash, enclose)); + } + } else { + for (let ele of stash) { + if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; + result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele)); + } + } + } + return utils.flatten(result); +}; + +const expand = (ast, options = {}) => { + let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit; + + let walk = (node, parent = {}) => { + node.queue = []; + + let p = parent; + let q = parent.queue; + + while (p.type !== 'brace' && p.type !== 'root' && p.parent) { + p = p.parent; + q = p.queue; + } + + if (node.invalid || node.dollar) { + q.push(append(q.pop(), stringify(node, options))); + return; + } + + if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { + q.push(append(q.pop(), ['{}'])); + return; + } + + if (node.nodes && node.ranges > 0) { + let args = utils.reduce(node.nodes); + + if (utils.exceedsLimit(...args, options.step, rangeLimit)) { + throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); + } + + let range = fill(...args, options); + if (range.length === 0) { + range = stringify(node, options); + } + + q.push(append(q.pop(), range)); + node.nodes = []; + return; + } + + let enclose = utils.encloseBrace(node); + let queue = node.queue; + let block = node; + + while (block.type !== 'brace' && block.type !== 'root' && block.parent) { + block = block.parent; + queue = block.queue; + } + + for (let i = 0; i < node.nodes.length; i++) { + let child = node.nodes[i]; + + if (child.type === 'comma' && node.type === 'brace') { + if (i === 1) queue.push(''); + queue.push(''); + continue; + } + + if (child.type === 'close') { + q.push(append(q.pop(), queue, enclose)); + continue; + } + + if (child.value && child.type !== 'open') { + queue.push(append(queue.pop(), child.value)); + continue; + } + + if (child.nodes) { + walk(child, node); + } + } + + return queue; + }; + + return utils.flatten(walk(ast)); +}; + +module.exports = expand; diff --git a/node_modules/braces/lib/parse.js b/node_modules/braces/lib/parse.js new file mode 100644 index 00000000..145ea264 --- /dev/null +++ b/node_modules/braces/lib/parse.js @@ -0,0 +1,333 @@ +'use strict'; + +const stringify = require('./stringify'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + CHAR_BACKSLASH, /* \ */ + CHAR_BACKTICK, /* ` */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_RIGHT_SQUARE_BRACKET, /* ] */ + CHAR_DOUBLE_QUOTE, /* " */ + CHAR_SINGLE_QUOTE, /* ' */ + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE +} = require('./constants'); + +/** + * parse + */ + +const parse = (input, options = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + let opts = options || {}; + let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + if (input.length > max) { + throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); + } + + let ast = { type: 'root', input, nodes: [] }; + let stack = [ast]; + let block = ast; + let prev = ast; + let brackets = 0; + let length = input.length; + let index = 0; + let depth = 0; + let value; + let memo = {}; + + /** + * Helpers + */ + + const advance = () => input[index++]; + const push = node => { + if (node.type === 'text' && prev.type === 'dot') { + prev.type = 'text'; + } + + if (prev && prev.type === 'text' && node.type === 'text') { + prev.value += node.value; + return; + } + + block.nodes.push(node); + node.parent = block; + node.prev = prev; + prev = node; + return node; + }; + + push({ type: 'bos' }); + + while (index < length) { + block = stack[stack.length - 1]; + value = advance(); + + /** + * Invalid chars + */ + + if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { + continue; + } + + /** + * Escaped chars + */ + + if (value === CHAR_BACKSLASH) { + push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); + continue; + } + + /** + * Right square bracket (literal): ']' + */ + + if (value === CHAR_RIGHT_SQUARE_BRACKET) { + push({ type: 'text', value: '\\' + value }); + continue; + } + + /** + * Left square bracket: '[' + */ + + if (value === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + + let closed = true; + let next; + + while (index < length && (next = advance())) { + value += next; + + if (next === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + continue; + } + + if (next === CHAR_BACKSLASH) { + value += advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + brackets--; + + if (brackets === 0) { + break; + } + } + } + + push({ type: 'text', value }); + continue; + } + + /** + * Parentheses + */ + + if (value === CHAR_LEFT_PARENTHESES) { + block = push({ type: 'paren', nodes: [] }); + stack.push(block); + push({ type: 'text', value }); + continue; + } + + if (value === CHAR_RIGHT_PARENTHESES) { + if (block.type !== 'paren') { + push({ type: 'text', value }); + continue; + } + block = stack.pop(); + push({ type: 'text', value }); + block = stack[stack.length - 1]; + continue; + } + + /** + * Quotes: '|"|` + */ + + if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { + let open = value; + let next; + + if (options.keepQuotes !== true) { + value = ''; + } + + while (index < length && (next = advance())) { + if (next === CHAR_BACKSLASH) { + value += next + advance(); + continue; + } + + if (next === open) { + if (options.keepQuotes === true) value += next; + break; + } + + value += next; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Left curly brace: '{' + */ + + if (value === CHAR_LEFT_CURLY_BRACE) { + depth++; + + let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; + let brace = { + type: 'brace', + open: true, + close: false, + dollar, + depth, + commas: 0, + ranges: 0, + nodes: [] + }; + + block = push(brace); + stack.push(block); + push({ type: 'open', value }); + continue; + } + + /** + * Right curly brace: '}' + */ + + if (value === CHAR_RIGHT_CURLY_BRACE) { + if (block.type !== 'brace') { + push({ type: 'text', value }); + continue; + } + + let type = 'close'; + block = stack.pop(); + block.close = true; + + push({ type, value }); + depth--; + + block = stack[stack.length - 1]; + continue; + } + + /** + * Comma: ',' + */ + + if (value === CHAR_COMMA && depth > 0) { + if (block.ranges > 0) { + block.ranges = 0; + let open = block.nodes.shift(); + block.nodes = [open, { type: 'text', value: stringify(block) }]; + } + + push({ type: 'comma', value }); + block.commas++; + continue; + } + + /** + * Dot: '.' + */ + + if (value === CHAR_DOT && depth > 0 && block.commas === 0) { + let siblings = block.nodes; + + if (depth === 0 || siblings.length === 0) { + push({ type: 'text', value }); + continue; + } + + if (prev.type === 'dot') { + block.range = []; + prev.value += value; + prev.type = 'range'; + + if (block.nodes.length !== 3 && block.nodes.length !== 5) { + block.invalid = true; + block.ranges = 0; + prev.type = 'text'; + continue; + } + + block.ranges++; + block.args = []; + continue; + } + + if (prev.type === 'range') { + siblings.pop(); + + let before = siblings[siblings.length - 1]; + before.value += prev.value + value; + prev = before; + block.ranges--; + continue; + } + + push({ type: 'dot', value }); + continue; + } + + /** + * Text + */ + + push({ type: 'text', value }); + } + + // Mark imbalanced braces and brackets as invalid + do { + block = stack.pop(); + + if (block.type !== 'root') { + block.nodes.forEach(node => { + if (!node.nodes) { + if (node.type === 'open') node.isOpen = true; + if (node.type === 'close') node.isClose = true; + if (!node.nodes) node.type = 'text'; + node.invalid = true; + } + }); + + // get the location of the block on parent.nodes (block's siblings) + let parent = stack[stack.length - 1]; + let index = parent.nodes.indexOf(block); + // replace the (invalid) block with it's nodes + parent.nodes.splice(index, 1, ...block.nodes); + } + } while (stack.length > 0); + + push({ type: 'eos' }); + return ast; +}; + +module.exports = parse; diff --git a/node_modules/braces/lib/stringify.js b/node_modules/braces/lib/stringify.js new file mode 100644 index 00000000..414b7bcc --- /dev/null +++ b/node_modules/braces/lib/stringify.js @@ -0,0 +1,32 @@ +'use strict'; + +const utils = require('./utils'); + +module.exports = (ast, options = {}) => { + let stringify = (node, parent = {}) => { + let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); + let invalidNode = node.invalid === true && options.escapeInvalid === true; + let output = ''; + + if (node.value) { + if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { + return '\\' + node.value; + } + return node.value; + } + + if (node.value) { + return node.value; + } + + if (node.nodes) { + for (let child of node.nodes) { + output += stringify(child); + } + } + return output; + }; + + return stringify(ast); +}; + diff --git a/node_modules/braces/lib/utils.js b/node_modules/braces/lib/utils.js new file mode 100644 index 00000000..e3551a67 --- /dev/null +++ b/node_modules/braces/lib/utils.js @@ -0,0 +1,112 @@ +'use strict'; + +exports.isInteger = num => { + if (typeof num === 'number') { + return Number.isInteger(num); + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isInteger(Number(num)); + } + return false; +}; + +/** + * Find a node of the given type + */ + +exports.find = (node, type) => node.nodes.find(node => node.type === type); + +/** + * Find a node of the given type + */ + +exports.exceedsLimit = (min, max, step = 1, limit) => { + if (limit === false) return false; + if (!exports.isInteger(min) || !exports.isInteger(max)) return false; + return ((Number(max) - Number(min)) / Number(step)) >= limit; +}; + +/** + * Escape the given node with '\\' before node.value + */ + +exports.escapeNode = (block, n = 0, type) => { + let node = block.nodes[n]; + if (!node) return; + + if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { + if (node.escaped !== true) { + node.value = '\\' + node.value; + node.escaped = true; + } + } +}; + +/** + * Returns true if the given brace node should be enclosed in literal braces + */ + +exports.encloseBrace = node => { + if (node.type !== 'brace') return false; + if ((node.commas >> 0 + node.ranges >> 0) === 0) { + node.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a brace node is invalid. + */ + +exports.isInvalidBrace = block => { + if (block.type !== 'brace') return false; + if (block.invalid === true || block.dollar) return true; + if ((block.commas >> 0 + block.ranges >> 0) === 0) { + block.invalid = true; + return true; + } + if (block.open !== true || block.close !== true) { + block.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a node is an open or close node + */ + +exports.isOpenOrClose = node => { + if (node.type === 'open' || node.type === 'close') { + return true; + } + return node.open === true || node.close === true; +}; + +/** + * Reduce an array of text nodes. + */ + +exports.reduce = nodes => nodes.reduce((acc, node) => { + if (node.type === 'text') acc.push(node.value); + if (node.type === 'range') node.type = 'text'; + return acc; +}, []); + +/** + * Flatten an array + */ + +exports.flatten = (...args) => { + const result = []; + const flat = arr => { + for (let i = 0; i < arr.length; i++) { + let ele = arr[i]; + Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele); + } + return result; + }; + flat(args); + return result; +}; diff --git a/node_modules/braces/package.json b/node_modules/braces/package.json new file mode 100644 index 00000000..3f52e346 --- /dev/null +++ b/node_modules/braces/package.json @@ -0,0 +1,77 @@ +{ + "name": "braces", + "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", + "version": "3.0.2", + "homepage": "https://github.com/micromatch/braces", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Elan Shanker (https://github.com/es128)", + "Eugene Sharygin (https://github.com/eush77)", + "hemanth.hm (http://h3manth.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/braces", + "bugs": { + "url": "https://github.com/micromatch/braces/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "mocha", + "benchmark": "node benchmark" + }, + "dependencies": { + "fill-range": "^7.0.1" + }, + "devDependencies": { + "ansi-colors": "^3.2.4", + "bash-path": "^2.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "braces", + "expand", + "expansion", + "filepath", + "fill", + "fs", + "glob", + "globbing", + "letter", + "match", + "matches", + "matching", + "number", + "numerical", + "path", + "range", + "ranges", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "lint": { + "reflinks": true + }, + "plugins": [ + "gulp-format-md" + ] + } +} diff --git a/node_modules/buffer-alloc-unsafe/index.js b/node_modules/buffer-alloc-unsafe/index.js new file mode 100644 index 00000000..0bd335ff --- /dev/null +++ b/node_modules/buffer-alloc-unsafe/index.js @@ -0,0 +1,17 @@ +function allocUnsafe (size) { + if (typeof size !== 'number') { + throw new TypeError('"size" argument must be a number') + } + + if (size < 0) { + throw new RangeError('"size" argument must not be negative') + } + + if (Buffer.allocUnsafe) { + return Buffer.allocUnsafe(size) + } else { + return new Buffer(size) + } +} + +module.exports = allocUnsafe diff --git a/node_modules/buffer-alloc-unsafe/package.json b/node_modules/buffer-alloc-unsafe/package.json new file mode 100644 index 00000000..c2ab9042 --- /dev/null +++ b/node_modules/buffer-alloc-unsafe/package.json @@ -0,0 +1,24 @@ +{ + "name": "buffer-alloc-unsafe", + "version": "1.1.0", + "license": "MIT", + "repository": "LinusU/buffer-alloc-unsafe", + "files": [ + "index.js" + ], + "scripts": { + "test": "standard && node test" + }, + "devDependencies": { + "standard": "^7.1.2" + }, + "keywords": [ + "allocUnsafe", + "allocate", + "buffer allocUnsafe", + "buffer unsafe allocate", + "buffer", + "ponyfill", + "unsafe allocate" + ] +} diff --git a/node_modules/buffer-alloc-unsafe/readme.md b/node_modules/buffer-alloc-unsafe/readme.md new file mode 100644 index 00000000..8725ecf6 --- /dev/null +++ b/node_modules/buffer-alloc-unsafe/readme.md @@ -0,0 +1,46 @@ +# Buffer Alloc Unsafe + +A [ponyfill](https://ponyfill.com) for `Buffer.allocUnsafe`. + +Works as Node.js: `v7.0.0`
+Works on Node.js: `v0.10.0` + +## Installation + +```sh +npm install --save buffer-alloc-unsafe +``` + +## Usage + +```js +const allocUnsafe = require('buffer-alloc-unsafe') + +console.log(allocUnsafe(10)) +//=> + +console.log(allocUnsafe(10)) +//=> + +console.log(allocUnsafe(10)) +//=> + +allocUnsafe(-10) +//=> RangeError: "size" argument must not be negative +``` + +## API + +### allocUnsafe(size) + +- `size` <Integer> The desired length of the new `Buffer` + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must be +less than or equal to the value of `buffer.kMaxLength` and greater than or equal +to zero. Otherwise, a `RangeError` is thrown. + +## See also + +- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc` +- [buffer-fill](https://github.com/LinusU/buffer-fill) A ponyfill for `Buffer.fill` +- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from` diff --git a/node_modules/buffer-alloc/index.js b/node_modules/buffer-alloc/index.js new file mode 100644 index 00000000..fe658606 --- /dev/null +++ b/node_modules/buffer-alloc/index.js @@ -0,0 +1,32 @@ +var bufferFill = require('buffer-fill') +var allocUnsafe = require('buffer-alloc-unsafe') + +module.exports = function alloc (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('"size" argument must be a number') + } + + if (size < 0) { + throw new RangeError('"size" argument must not be negative') + } + + if (Buffer.alloc) { + return Buffer.alloc(size, fill, encoding) + } + + var buffer = allocUnsafe(size) + + if (size === 0) { + return buffer + } + + if (fill === undefined) { + return bufferFill(buffer, 0) + } + + if (typeof encoding !== 'string') { + encoding = undefined + } + + return bufferFill(buffer, fill, encoding) +} diff --git a/node_modules/buffer-alloc/package.json b/node_modules/buffer-alloc/package.json new file mode 100644 index 00000000..a8a3c3ac --- /dev/null +++ b/node_modules/buffer-alloc/package.json @@ -0,0 +1,26 @@ +{ + "name": "buffer-alloc", + "version": "1.2.0", + "license": "MIT", + "repository": "LinusU/buffer-alloc", + "files": [ + "index.js" + ], + "scripts": { + "test": "standard && node test" + }, + "dependencies": { + "buffer-alloc-unsafe": "^1.1.0", + "buffer-fill": "^1.0.0" + }, + "devDependencies": { + "standard": "^7.1.2" + }, + "keywords": [ + "alloc", + "allocate", + "buffer alloc", + "buffer allocate", + "buffer" + ] +} diff --git a/node_modules/buffer-alloc/readme.md b/node_modules/buffer-alloc/readme.md new file mode 100644 index 00000000..80c7d7bb --- /dev/null +++ b/node_modules/buffer-alloc/readme.md @@ -0,0 +1,43 @@ +# Buffer Alloc + +A [ponyfill](https://ponyfill.com) for `Buffer.alloc`. + +Works as Node.js: `v7.0.0`
+Works on Node.js: `v0.10.0` + +## Installation + +```sh +npm install --save buffer-alloc +``` + +## Usage + +```js +const alloc = require('buffer-alloc') + +console.log(alloc(4)) +//=> + +console.log(alloc(6, 0x41)) +//=> + +console.log(alloc(10, 'linus', 'utf8')) +//=> +``` + +## API + +### alloc(size[, fill[, encoding]]) + +- `size` <Integer> The desired length of the new `Buffer` +- `fill` <String> | <Buffer> | <Integer> A value to pre-fill the new `Buffer` with. **Default:** `0` +- `encoding` <String> If `fill` is a string, this is its encoding. **Default:** `'utf8'` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the `Buffer` will be zero-filled. + +## See also + +- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe` +- [buffer-fill](https://github.com/LinusU/buffer-fill) A ponyfill for `Buffer.fill` +- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from` diff --git a/node_modules/buffer-crc32/LICENSE b/node_modules/buffer-crc32/LICENSE new file mode 100644 index 00000000..4cef10eb --- /dev/null +++ b/node_modules/buffer-crc32/LICENSE @@ -0,0 +1,19 @@ +The MIT License + +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/buffer-crc32/README.md b/node_modules/buffer-crc32/README.md new file mode 100644 index 00000000..0d9d8b83 --- /dev/null +++ b/node_modules/buffer-crc32/README.md @@ -0,0 +1,47 @@ +# buffer-crc32 + +[![Build Status](https://secure.travis-ci.org/brianloveswords/buffer-crc32.png?branch=master)](http://travis-ci.org/brianloveswords/buffer-crc32) + +crc32 that works with binary data and fancy character sets, outputs +buffer, signed or unsigned data and has tests. + +Derived from the sample CRC implementation in the PNG specification: http://www.w3.org/TR/PNG/#D-CRCAppendix + +# install +``` +npm install buffer-crc32 +``` + +# example +```js +var crc32 = require('buffer-crc32'); +// works with buffers +var buf = Buffer([0x00, 0x73, 0x75, 0x70, 0x20, 0x62, 0x72, 0x6f, 0x00]) +crc32(buf) // -> + +// has convenience methods for getting signed or unsigned ints +crc32.signed(buf) // -> -1805997238 +crc32.unsigned(buf) // -> 2488970058 + +// will cast to buffer if given a string, so you can +// directly use foreign characters safely +crc32('自動販売機') // -> + +// and works in append mode too +var partialCrc = crc32('hey'); +var partialCrc = crc32(' ', partialCrc); +var partialCrc = crc32('sup', partialCrc); +var partialCrc = crc32(' ', partialCrc); +var finalCrc = crc32('bros', partialCrc); // -> +``` + +# tests +This was tested against the output of zlib's crc32 method. You can run +the tests with`npm test` (requires tap) + +# see also +https://github.com/alexgorbatchev/node-crc, `crc.buffer.crc32` also +supports buffer inputs and return unsigned ints (thanks @tjholowaychuk). + +# license +MIT/X11 diff --git a/node_modules/buffer-crc32/index.js b/node_modules/buffer-crc32/index.js new file mode 100644 index 00000000..6727dd39 --- /dev/null +++ b/node_modules/buffer-crc32/index.js @@ -0,0 +1,111 @@ +var Buffer = require('buffer').Buffer; + +var CRC_TABLE = [ + 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, + 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, + 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, + 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, + 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, + 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, + 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, + 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, + 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, + 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a, + 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, + 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, + 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, + 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, + 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, + 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, + 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, + 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, + 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, + 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, + 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, + 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, + 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, + 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, + 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, + 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, + 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, + 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, + 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, + 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, + 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, + 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, + 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, + 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c, + 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, + 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, + 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, + 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, + 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, + 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, + 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, + 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, + 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, + 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, + 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, + 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, + 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, + 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, + 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, + 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, + 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, + 0x2d02ef8d +]; + +if (typeof Int32Array !== 'undefined') { + CRC_TABLE = new Int32Array(CRC_TABLE); +} + +function ensureBuffer(input) { + if (Buffer.isBuffer(input)) { + return input; + } + + var hasNewBufferAPI = + typeof Buffer.alloc === "function" && + typeof Buffer.from === "function"; + + if (typeof input === "number") { + return hasNewBufferAPI ? Buffer.alloc(input) : new Buffer(input); + } + else if (typeof input === "string") { + return hasNewBufferAPI ? Buffer.from(input) : new Buffer(input); + } + else { + throw new Error("input must be buffer, number, or string, received " + + typeof input); + } +} + +function bufferizeInt(num) { + var tmp = ensureBuffer(4); + tmp.writeInt32BE(num, 0); + return tmp; +} + +function _crc32(buf, previous) { + buf = ensureBuffer(buf); + if (Buffer.isBuffer(previous)) { + previous = previous.readUInt32BE(0); + } + var crc = ~~previous ^ -1; + for (var n = 0; n < buf.length; n++) { + crc = CRC_TABLE[(crc ^ buf[n]) & 0xff] ^ (crc >>> 8); + } + return (crc ^ -1); +} + +function crc32() { + return bufferizeInt(_crc32.apply(null, arguments)); +} +crc32.signed = function () { + return _crc32.apply(null, arguments); +}; +crc32.unsigned = function () { + return _crc32.apply(null, arguments) >>> 0; +}; + +module.exports = crc32; diff --git a/node_modules/buffer-crc32/package.json b/node_modules/buffer-crc32/package.json new file mode 100644 index 00000000..e896bec5 --- /dev/null +++ b/node_modules/buffer-crc32/package.json @@ -0,0 +1,39 @@ +{ + "author": "Brian J. Brennan ", + "name": "buffer-crc32", + "description": "A pure javascript CRC32 algorithm that plays nice with binary data", + "version": "0.2.13", + "licenses": [ + { + "type": "MIT", + "url": "https://github.com/brianloveswords/buffer-crc32/raw/master/LICENSE" + } + ], + "contributors": [ + { + "name": "Vladimir Kuznetsov", + "github": "mistakster" + } + ], + "homepage": "https://github.com/brianloveswords/buffer-crc32", + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/buffer-crc32.git" + }, + "main": "index.js", + "scripts": { + "test": "./node_modules/.bin/tap tests/*.test.js" + }, + "dependencies": {}, + "devDependencies": { + "tap": "~0.2.5" + }, + "optionalDependencies": {}, + "engines": { + "node": "*" + }, + "license": "MIT", + "files": [ + "index.js" + ] +} diff --git a/node_modules/buffer-fill/index.js b/node_modules/buffer-fill/index.js new file mode 100644 index 00000000..428a9e1f --- /dev/null +++ b/node_modules/buffer-fill/index.js @@ -0,0 +1,113 @@ +/* Node.js 6.4.0 and up has full support */ +var hasFullSupport = (function () { + try { + if (!Buffer.isEncoding('latin1')) { + return false + } + + var buf = Buffer.alloc ? Buffer.alloc(4) : new Buffer(4) + + buf.fill('ab', 'ucs2') + + return (buf.toString('hex') === '61006200') + } catch (_) { + return false + } +}()) + +function isSingleByte (val) { + return (val.length === 1 && val.charCodeAt(0) < 256) +} + +function fillWithNumber (buffer, val, start, end) { + if (start < 0 || end > buffer.length) { + throw new RangeError('Out of range index') + } + + start = start >>> 0 + end = end === undefined ? buffer.length : end >>> 0 + + if (end > start) { + buffer.fill(val, start, end) + } + + return buffer +} + +function fillWithBuffer (buffer, val, start, end) { + if (start < 0 || end > buffer.length) { + throw new RangeError('Out of range index') + } + + if (end <= start) { + return buffer + } + + start = start >>> 0 + end = end === undefined ? buffer.length : end >>> 0 + + var pos = start + var len = val.length + while (pos <= (end - len)) { + val.copy(buffer, pos) + pos += len + } + + if (pos !== end) { + val.copy(buffer, pos, 0, end - pos) + } + + return buffer +} + +function fill (buffer, val, start, end, encoding) { + if (hasFullSupport) { + return buffer.fill(val, start, end, encoding) + } + + if (typeof val === 'number') { + return fillWithNumber(buffer, val, start, end) + } + + if (typeof val === 'string') { + if (typeof start === 'string') { + encoding = start + start = 0 + end = buffer.length + } else if (typeof end === 'string') { + encoding = end + end = buffer.length + } + + if (encoding !== undefined && typeof encoding !== 'string') { + throw new TypeError('encoding must be a string') + } + + if (encoding === 'latin1') { + encoding = 'binary' + } + + if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + + if (val === '') { + return fillWithNumber(buffer, 0, start, end) + } + + if (isSingleByte(val)) { + return fillWithNumber(buffer, val.charCodeAt(0), start, end) + } + + val = new Buffer(val, encoding) + } + + if (Buffer.isBuffer(val)) { + return fillWithBuffer(buffer, val, start, end) + } + + // Other values (e.g. undefined, boolean, object) results in zero-fill + return fillWithNumber(buffer, 0, start, end) +} + +module.exports = fill diff --git a/node_modules/buffer-fill/package.json b/node_modules/buffer-fill/package.json new file mode 100644 index 00000000..b8f67c58 --- /dev/null +++ b/node_modules/buffer-fill/package.json @@ -0,0 +1,16 @@ +{ + "name": "buffer-fill", + "version": "1.0.0", + "license": "MIT", + "repository": "LinusU/buffer-fill", + "files": [ + "index.js" + ], + "scripts": { + "test": "standard && node test" + }, + "devDependencies": { + "buffer-alloc-unsafe": "^1.1.0", + "standard": "^7.1.2" + } +} diff --git a/node_modules/buffer-fill/readme.md b/node_modules/buffer-fill/readme.md new file mode 100644 index 00000000..ac307383 --- /dev/null +++ b/node_modules/buffer-fill/readme.md @@ -0,0 +1,54 @@ +# Buffer Fill + +A [ponyfill](https://ponyfill.com) for `Buffer.fill`. + +Works as Node.js: `v6.4.0`
+Works on Node.js: `v0.10.0` + +## Installation + +```sh +npm install --save buffer-fill +``` + +## Usage + +```js +const fill = require('buffer-fill') +const buf = Buffer.allocUnsafe(5) + +console.log(buf.fill(8)) +//=> + +console.log(buf.fill(9, 2, 4)) +//=> + +console.log(buf.fill('linus', 'latin1')) +//=> + +console.log(buf.fill('\u0222')) +//=> +``` + +## API + +### fill(buf, value[, offset[, end]][, encoding]) + +- `value` <String> | <Buffer> | <Integer> The value to fill `buf` with +- `offset` <Integer> Where to start filling `buf`. **Default:** `0` +- `end` <Integer> Where to stop filling `buf` (not inclusive). **Default:** `buf.length` +- `encoding` <String> If `value` is a string, this is its encoding. **Default:** `'utf8'` +- Return: <Buffer> A reference to `buf` + +Fills `buf` with the specified `value`. If the `offset` and `end` are not given, +the entire `buf` will be filled. This is meant to be a small simplification to +allow the creation and filling of a `Buffer` to be done on a single line. + +If the final write of a `fill()` operation falls on a multi-byte character, then +only the first bytes of that character that fit into `buf` are written. + +## See also + +- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe` +- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc` +- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from` diff --git a/node_modules/buffer/AUTHORS.md b/node_modules/buffer/AUTHORS.md new file mode 100644 index 00000000..22eb1712 --- /dev/null +++ b/node_modules/buffer/AUTHORS.md @@ -0,0 +1,70 @@ +# Authors + +#### Ordered by first contribution. + +- Romain Beauxis (toots@rastageeks.org) +- Tobias Koppers (tobias.koppers@googlemail.com) +- Janus (ysangkok@gmail.com) +- Rainer Dreyer (rdrey1@gmail.com) +- Tõnis Tiigi (tonistiigi@gmail.com) +- James Halliday (mail@substack.net) +- Michael Williamson (mike@zwobble.org) +- elliottcable (github@elliottcable.name) +- rafael (rvalle@livelens.net) +- Andrew Kelley (superjoe30@gmail.com) +- Andreas Madsen (amwebdk@gmail.com) +- Mike Brevoort (mike.brevoort@pearson.com) +- Brian White (mscdex@mscdex.net) +- Feross Aboukhadijeh (feross@feross.org) +- Ruben Verborgh (ruben@verborgh.org) +- eliang (eliang.cs@gmail.com) +- Jesse Tane (jesse.tane@gmail.com) +- Alfonso Boza (alfonso@cloud.com) +- Mathias Buus (mathiasbuus@gmail.com) +- Devon Govett (devongovett@gmail.com) +- Daniel Cousens (github@dcousens.com) +- Joseph Dykstra (josephdykstra@gmail.com) +- Parsha Pourkhomami (parshap+git@gmail.com) +- Damjan Košir (damjan.kosir@gmail.com) +- daverayment (dave.rayment@gmail.com) +- kawanet (u-suke@kawa.net) +- Linus Unnebäck (linus@folkdatorn.se) +- Nolan Lawson (nolan.lawson@gmail.com) +- Calvin Metcalf (calvin.metcalf@gmail.com) +- Koki Takahashi (hakatasiloving@gmail.com) +- Guy Bedford (guybedford@gmail.com) +- Jan Schär (jscissr@gmail.com) +- RaulTsc (tomescu.raul@gmail.com) +- Matthieu Monsch (monsch@alum.mit.edu) +- Dan Ehrenberg (littledan@chromium.org) +- Kirill Fomichev (fanatid@ya.ru) +- Yusuke Kawasaki (u-suke@kawa.net) +- DC (dcposch@dcpos.ch) +- John-David Dalton (john.david.dalton@gmail.com) +- adventure-yunfei (adventure030@gmail.com) +- Emil Bay (github@tixz.dk) +- Sam Sudar (sudar.sam@gmail.com) +- Volker Mische (volker.mische@gmail.com) +- David Walton (support@geekstocks.com) +- Сковорода Никита Андреевич (chalkerx@gmail.com) +- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com) +- ukstv (sergey.ukustov@machinomy.com) +- Renée Kooi (renee@kooi.me) +- ranbochen (ranbochen@qq.com) +- Vladimir Borovik (bobahbdb@gmail.com) +- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com) +- kumavis (aaron@kumavis.me) +- Sergey Ukustov (sergey.ukustov@machinomy.com) +- Fei Liu (liu.feiwood@gmail.com) +- Blaine Bublitz (blaine.bublitz@gmail.com) +- clement (clement@seald.io) +- Koushik Dutta (koushd@gmail.com) +- Jordan Harband (ljharb@gmail.com) +- Niklas Mischkulnig (mischnic@users.noreply.github.com) +- Nikolai Vavilov (vvnicholas@gmail.com) +- Fedor Nezhivoi (gyzerok@users.noreply.github.com) +- Peter Newman (peternewman@users.noreply.github.com) +- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com) +- jkkang (jkkang@smartauth.kr) + +#### Generated by bin/update-authors.sh. diff --git a/node_modules/buffer/LICENSE b/node_modules/buffer/LICENSE new file mode 100644 index 00000000..d6bf75dc --- /dev/null +++ b/node_modules/buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh, and other contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/buffer/README.md b/node_modules/buffer/README.md new file mode 100644 index 00000000..9a23d7cf --- /dev/null +++ b/node_modules/buffer/README.md @@ -0,0 +1,410 @@ +# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg +[travis-url]: https://travis-ci.org/feross/buffer +[npm-image]: https://img.shields.io/npm/v/buffer.svg +[npm-url]: https://npmjs.org/package/buffer +[downloads-image]: https://img.shields.io/npm/dm/buffer.svg +[downloads-url]: https://npmjs.org/package/buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### The buffer module from [node.js](https://nodejs.org/), for the browser. + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg +[saucelabs-url]: https://saucelabs.com/u/buffer + +With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module. + +The goal is to provide an API that is 100% identical to +[node's Buffer API](https://nodejs.org/api/buffer.html). Read the +[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, +instance methods, and class methods that are supported. + +## features + +- Manipulate binary data like a boss, in all browsers! +- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`) +- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments) +- Excellent browser support (Chrome, Firefox, Edge, Safari 9+, IE 11, iOS 9+, Android, etc.) +- Preserves Node API exactly, with one minor difference (see below) +- Square-bracket `buf[4]` notation works! +- Does not modify any browser prototypes or put anything on `window` +- Comprehensive test suite (including all buffer tests from node.js core) + +## install + +To use this module directly (without browserify), install it: + +```bash +npm install buffer +``` + +This module was previously called **native-buffer-browserify**, but please use **buffer** +from now on. + +If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer). + +## usage + +The module's API is identical to node's `Buffer` API. Read the +[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, +instance methods, and class methods that are supported. + +As mentioned above, `require('buffer')` or use the `Buffer` global with +[browserify](http://browserify.org) and this module will automatically be included +in your bundle. Almost any npm module will work in the browser, even if it assumes that +the node `Buffer` API will be available. + +To depend on this module explicitly (without browserify), require it like this: + +```js +var Buffer = require('buffer/').Buffer // note: the trailing slash is important! +``` + +To require this module explicitly, use `require('buffer/')` which tells the node.js module +lookup algorithm (also used by browserify) to use the **npm module** named `buffer` +instead of the **node.js core** module named `buffer`! + + +## how does it work? + +The Buffer constructor returns instances of `Uint8Array` that have their prototype +changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`, +so the returned instances will have all the node `Buffer` methods and the +`Uint8Array` methods. Square bracket notation works as expected -- it returns a +single octet. + +The `Uint8Array` prototype remains unmodified. + + +## tracking the latest node api + +This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer +API is considered **stable** in the +[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index), +so it is unlikely that there will ever be breaking changes. +Nonetheless, when/if the Buffer API changes in node, this module's API will change +accordingly. + +## related packages + +- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer +- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer +- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package + +## conversion packages + +### convert typed array to buffer + +Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast. + +### convert buffer to typed array + +`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`. + +### convert blob to buffer + +Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`. + +### convert buffer to blob + +To convert a `Buffer` to a `Blob`, use the `Blob` constructor: + +```js +var blob = new Blob([ buffer ]) +``` + +Optionally, specify a mimetype: + +```js +var blob = new Blob([ buffer ], { type: 'text/html' }) +``` + +### convert arraybuffer to buffer + +To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast. + +```js +var buffer = Buffer.from(arrayBuffer) +``` + +### convert buffer to arraybuffer + +To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects): + +```js +var arrayBuffer = buffer.buffer.slice( + buffer.byteOffset, buffer.byteOffset + buffer.byteLength +) +``` + +Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module. + +## performance + +See perf tests in `/perf`. + +`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a +sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will +always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module, +which is included to compare against. + +NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README. + +### Chrome 38 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ | +| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | | +| | | | | +| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | | +| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | | +| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | | +| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | | +| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | | +| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ | +| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | | +| | | | | +| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ | +| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | | +| | | | | +| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ | +| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | | +| | | | | +| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | | +| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | | +| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ | + + +### Firefox 33 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | | +| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ | +| | | | | +| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | | +| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | | +| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | | +| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | | +| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | | +| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | | +| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ | +| | | | | +| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | | +| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ | +| | | | | +| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ | +| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | | +| | | | | +| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | | +| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | | +| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ | + +### Safari 8 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ | +| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | | +| | | | | +| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | | +| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | | +| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | | +| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | | +| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | | +| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | | +| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ | +| | | | | +| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | | +| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ | +| | | | | +| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | | +| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ | +| | | | | +| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | | +| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | | +| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ | + + +### Node 0.11.14 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | | +| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ | +| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | | +| | | | | +| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | | +| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ | +| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | | +| | | | | +| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | | +| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ | +| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | | +| | | | | +| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | | +| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ | +| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | | +| | | | | +| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | | +| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | | +| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | | +| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ | +| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | | +| | | | | +| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ | +| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | | +| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | | +| | | | | +| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ | +| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | | +| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | | +| | | | | +| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | | +| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | | +| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ | +| | | | | +| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | | +| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ | +| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | | +| | | | | +| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | | +| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ | +| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | | + +### iojs 1.8.1 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | | +| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | | +| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ | +| | | | | +| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | | +| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | | +| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | | +| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | | +| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | | +| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ | +| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | | +| | | | | +| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | | +| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | | +| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | | +| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ | +| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | | +| | | | | +| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ | +| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | | +| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | | +| | | | | +| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ | +| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | | +| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | | +| | | | | +| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | | +| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | | +| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ | +| | | | | +| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | | +| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | | +| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | | +| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ | +| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | | +| | | | | + +## Testing the project + +First, install the project: + + npm install + +Then, to run tests in Node.js, run: + + npm run test-node + +To test locally in a browser, you can run: + + npm run test-browser-es5-local # For ES5 browsers that don't support ES6 + npm run test-browser-es6-local # For ES6 compliant browsers + +This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap). + +To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run: + + npm test + +This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files. + +## JavaScript Standard Style + +This module uses [JavaScript Standard Style](https://github.com/feross/standard). + +[![JavaScript Style Guide](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) + +To test that the code conforms to the style, `npm install` and run: + + ./node_modules/.bin/standard + +## credit + +This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify). + +## Security Policies and Procedures + +The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues. + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis. diff --git a/node_modules/buffer/index.d.ts b/node_modules/buffer/index.d.ts new file mode 100644 index 00000000..5d1a804e --- /dev/null +++ b/node_modules/buffer/index.d.ts @@ -0,0 +1,186 @@ +export class Buffer extends Uint8Array { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + reverse(): this; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer | Uint8Array): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initializing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; +} diff --git a/node_modules/buffer/index.js b/node_modules/buffer/index.js new file mode 100644 index 00000000..609cf311 --- /dev/null +++ b/node_modules/buffer/index.js @@ -0,0 +1,1817 @@ +/*! + * The buffer module from node.js, for the browser. + * + * @author Feross Aboukhadijeh + * @license MIT + */ +/* eslint-disable no-proto */ + +'use strict' + +var base64 = require('base64-js') +var ieee754 = require('ieee754') +var customInspectSymbol = + (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation + ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation + : null + +exports.Buffer = Buffer +exports.SlowBuffer = SlowBuffer +exports.INSPECT_MAX_BYTES = 50 + +var K_MAX_LENGTH = 0x7fffffff +exports.kMaxLength = K_MAX_LENGTH + +/** + * If `Buffer.TYPED_ARRAY_SUPPORT`: + * === true Use Uint8Array implementation (fastest) + * === false Print warning and recommend using `buffer` v4.x which has an Object + * implementation (most compatible, even IE6) + * + * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, + * Opera 11.6+, iOS 4.2+. + * + * We report that the browser does not support typed arrays if the are not subclassable + * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array` + * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support + * for __proto__ and has a buggy typed array implementation. + */ +Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport() + +if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' && + typeof console.error === 'function') { + console.error( + 'This browser lacks typed array (Uint8Array) support which is required by ' + + '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.' + ) +} + +function typedArraySupport () { + // Can typed array instances can be augmented? + try { + var arr = new Uint8Array(1) + var proto = { foo: function () { return 42 } } + Object.setPrototypeOf(proto, Uint8Array.prototype) + Object.setPrototypeOf(arr, proto) + return arr.foo() === 42 + } catch (e) { + return false + } +} + +Object.defineProperty(Buffer.prototype, 'parent', { + enumerable: true, + get: function () { + if (!Buffer.isBuffer(this)) return undefined + return this.buffer + } +}) + +Object.defineProperty(Buffer.prototype, 'offset', { + enumerable: true, + get: function () { + if (!Buffer.isBuffer(this)) return undefined + return this.byteOffset + } +}) + +function createBuffer (length) { + if (length > K_MAX_LENGTH) { + throw new RangeError('The value "' + length + '" is invalid for option "size"') + } + // Return an augmented `Uint8Array` instance + var buf = new Uint8Array(length) + Object.setPrototypeOf(buf, Buffer.prototype) + return buf +} + +/** + * The Buffer constructor returns instances of `Uint8Array` that have their + * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of + * `Uint8Array`, so the returned instances will have all the node `Buffer` methods + * and the `Uint8Array` methods. Square bracket notation works as expected -- it + * returns a single octet. + * + * The `Uint8Array` prototype remains unmodified. + */ + +function Buffer (arg, encodingOrOffset, length) { + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new TypeError( + 'The "string" argument must be of type string. Received type number' + ) + } + return allocUnsafe(arg) + } + return from(arg, encodingOrOffset, length) +} + +Buffer.poolSize = 8192 // not used by this implementation + +function from (value, encodingOrOffset, length) { + if (typeof value === 'string') { + return fromString(value, encodingOrOffset) + } + + if (ArrayBuffer.isView(value)) { + return fromArrayView(value) + } + + if (value == null) { + throw new TypeError( + 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + + 'or Array-like Object. Received type ' + (typeof value) + ) + } + + if (isInstance(value, ArrayBuffer) || + (value && isInstance(value.buffer, ArrayBuffer))) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof SharedArrayBuffer !== 'undefined' && + (isInstance(value, SharedArrayBuffer) || + (value && isInstance(value.buffer, SharedArrayBuffer)))) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof value === 'number') { + throw new TypeError( + 'The "value" argument must not be of type number. Received type number' + ) + } + + var valueOf = value.valueOf && value.valueOf() + if (valueOf != null && valueOf !== value) { + return Buffer.from(valueOf, encodingOrOffset, length) + } + + var b = fromObject(value) + if (b) return b + + if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null && + typeof value[Symbol.toPrimitive] === 'function') { + return Buffer.from( + value[Symbol.toPrimitive]('string'), encodingOrOffset, length + ) + } + + throw new TypeError( + 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + + 'or Array-like Object. Received type ' + (typeof value) + ) +} + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + **/ +Buffer.from = function (value, encodingOrOffset, length) { + return from(value, encodingOrOffset, length) +} + +// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug: +// https://github.com/feross/buffer/pull/148 +Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype) +Object.setPrototypeOf(Buffer, Uint8Array) + +function assertSize (size) { + if (typeof size !== 'number') { + throw new TypeError('"size" argument must be of type number') + } else if (size < 0) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } +} + +function alloc (size, fill, encoding) { + assertSize(size) + if (size <= 0) { + return createBuffer(size) + } + if (fill !== undefined) { + // Only pay attention to encoding if it's a string. This + // prevents accidentally sending in a number that would + // be interpreted as a start offset. + return typeof encoding === 'string' + ? createBuffer(size).fill(fill, encoding) + : createBuffer(size).fill(fill) + } + return createBuffer(size) +} + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + **/ +Buffer.alloc = function (size, fill, encoding) { + return alloc(size, fill, encoding) +} + +function allocUnsafe (size) { + assertSize(size) + return createBuffer(size < 0 ? 0 : checked(size) | 0) +} + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. + * */ +Buffer.allocUnsafe = function (size) { + return allocUnsafe(size) +} +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. + */ +Buffer.allocUnsafeSlow = function (size) { + return allocUnsafe(size) +} + +function fromString (string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + + var length = byteLength(string, encoding) | 0 + var buf = createBuffer(length) + + var actual = buf.write(string, encoding) + + if (actual !== length) { + // Writing a hex string, for example, that contains invalid characters will + // cause everything after the first invalid character to be ignored. (e.g. + // 'abxxcd' will be treated as 'ab') + buf = buf.slice(0, actual) + } + + return buf +} + +function fromArrayLike (array) { + var length = array.length < 0 ? 0 : checked(array.length) | 0 + var buf = createBuffer(length) + for (var i = 0; i < length; i += 1) { + buf[i] = array[i] & 255 + } + return buf +} + +function fromArrayView (arrayView) { + if (isInstance(arrayView, Uint8Array)) { + var copy = new Uint8Array(arrayView) + return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength) + } + return fromArrayLike(arrayView) +} + +function fromArrayBuffer (array, byteOffset, length) { + if (byteOffset < 0 || array.byteLength < byteOffset) { + throw new RangeError('"offset" is outside of buffer bounds') + } + + if (array.byteLength < byteOffset + (length || 0)) { + throw new RangeError('"length" is outside of buffer bounds') + } + + var buf + if (byteOffset === undefined && length === undefined) { + buf = new Uint8Array(array) + } else if (length === undefined) { + buf = new Uint8Array(array, byteOffset) + } else { + buf = new Uint8Array(array, byteOffset, length) + } + + // Return an augmented `Uint8Array` instance + Object.setPrototypeOf(buf, Buffer.prototype) + + return buf +} + +function fromObject (obj) { + if (Buffer.isBuffer(obj)) { + var len = checked(obj.length) | 0 + var buf = createBuffer(len) + + if (buf.length === 0) { + return buf + } + + obj.copy(buf, 0, 0, len) + return buf + } + + if (obj.length !== undefined) { + if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) { + return createBuffer(0) + } + return fromArrayLike(obj) + } + + if (obj.type === 'Buffer' && Array.isArray(obj.data)) { + return fromArrayLike(obj.data) + } +} + +function checked (length) { + // Note: cannot use `length < K_MAX_LENGTH` here because that fails when + // length is NaN (which is otherwise coerced to zero.) + if (length >= K_MAX_LENGTH) { + throw new RangeError('Attempt to allocate Buffer larger than maximum ' + + 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes') + } + return length | 0 +} + +function SlowBuffer (length) { + if (+length != length) { // eslint-disable-line eqeqeq + length = 0 + } + return Buffer.alloc(+length) +} + +Buffer.isBuffer = function isBuffer (b) { + return b != null && b._isBuffer === true && + b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false +} + +Buffer.compare = function compare (a, b) { + if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength) + if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength) + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + throw new TypeError( + 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array' + ) + } + + if (a === b) return 0 + + var x = a.length + var y = b.length + + for (var i = 0, len = Math.min(x, y); i < len; ++i) { + if (a[i] !== b[i]) { + x = a[i] + y = b[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +Buffer.isEncoding = function isEncoding (encoding) { + switch (String(encoding).toLowerCase()) { + case 'hex': + case 'utf8': + case 'utf-8': + case 'ascii': + case 'latin1': + case 'binary': + case 'base64': + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return true + default: + return false + } +} + +Buffer.concat = function concat (list, length) { + if (!Array.isArray(list)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } + + if (list.length === 0) { + return Buffer.alloc(0) + } + + var i + if (length === undefined) { + length = 0 + for (i = 0; i < list.length; ++i) { + length += list[i].length + } + } + + var buffer = Buffer.allocUnsafe(length) + var pos = 0 + for (i = 0; i < list.length; ++i) { + var buf = list[i] + if (isInstance(buf, Uint8Array)) { + if (pos + buf.length > buffer.length) { + Buffer.from(buf).copy(buffer, pos) + } else { + Uint8Array.prototype.set.call( + buffer, + buf, + pos + ) + } + } else if (!Buffer.isBuffer(buf)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } else { + buf.copy(buffer, pos) + } + pos += buf.length + } + return buffer +} + +function byteLength (string, encoding) { + if (Buffer.isBuffer(string)) { + return string.length + } + if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) { + return string.byteLength + } + if (typeof string !== 'string') { + throw new TypeError( + 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' + + 'Received type ' + typeof string + ) + } + + var len = string.length + var mustMatch = (arguments.length > 2 && arguments[2] === true) + if (!mustMatch && len === 0) return 0 + + // Use a for loop to avoid recursion + var loweredCase = false + for (;;) { + switch (encoding) { + case 'ascii': + case 'latin1': + case 'binary': + return len + case 'utf8': + case 'utf-8': + return utf8ToBytes(string).length + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return len * 2 + case 'hex': + return len >>> 1 + case 'base64': + return base64ToBytes(string).length + default: + if (loweredCase) { + return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8 + } + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} +Buffer.byteLength = byteLength + +function slowToString (encoding, start, end) { + var loweredCase = false + + // No need to verify that "this.length <= MAX_UINT32" since it's a read-only + // property of a typed array. + + // This behaves neither like String nor Uint8Array in that we set start/end + // to their upper/lower bounds if the value passed is out of range. + // undefined is handled specially as per ECMA-262 6th Edition, + // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. + if (start === undefined || start < 0) { + start = 0 + } + // Return early if start > this.length. Done here to prevent potential uint32 + // coercion fail below. + if (start > this.length) { + return '' + } + + if (end === undefined || end > this.length) { + end = this.length + } + + if (end <= 0) { + return '' + } + + // Force coercion to uint32. This will also coerce falsey/NaN values to 0. + end >>>= 0 + start >>>= 0 + + if (end <= start) { + return '' + } + + if (!encoding) encoding = 'utf8' + + while (true) { + switch (encoding) { + case 'hex': + return hexSlice(this, start, end) + + case 'utf8': + case 'utf-8': + return utf8Slice(this, start, end) + + case 'ascii': + return asciiSlice(this, start, end) + + case 'latin1': + case 'binary': + return latin1Slice(this, start, end) + + case 'base64': + return base64Slice(this, start, end) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return utf16leSlice(this, start, end) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = (encoding + '').toLowerCase() + loweredCase = true + } + } +} + +// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package) +// to detect a Buffer instance. It's not possible to use `instanceof Buffer` +// reliably in a browserify context because there could be multiple different +// copies of the 'buffer' package in use. This method works even for Buffer +// instances that were created from another copy of the `buffer` package. +// See: https://github.com/feross/buffer/issues/154 +Buffer.prototype._isBuffer = true + +function swap (b, n, m) { + var i = b[n] + b[n] = b[m] + b[m] = i +} + +Buffer.prototype.swap16 = function swap16 () { + var len = this.length + if (len % 2 !== 0) { + throw new RangeError('Buffer size must be a multiple of 16-bits') + } + for (var i = 0; i < len; i += 2) { + swap(this, i, i + 1) + } + return this +} + +Buffer.prototype.swap32 = function swap32 () { + var len = this.length + if (len % 4 !== 0) { + throw new RangeError('Buffer size must be a multiple of 32-bits') + } + for (var i = 0; i < len; i += 4) { + swap(this, i, i + 3) + swap(this, i + 1, i + 2) + } + return this +} + +Buffer.prototype.swap64 = function swap64 () { + var len = this.length + if (len % 8 !== 0) { + throw new RangeError('Buffer size must be a multiple of 64-bits') + } + for (var i = 0; i < len; i += 8) { + swap(this, i, i + 7) + swap(this, i + 1, i + 6) + swap(this, i + 2, i + 5) + swap(this, i + 3, i + 4) + } + return this +} + +Buffer.prototype.toString = function toString () { + var length = this.length + if (length === 0) return '' + if (arguments.length === 0) return utf8Slice(this, 0, length) + return slowToString.apply(this, arguments) +} + +Buffer.prototype.toLocaleString = Buffer.prototype.toString + +Buffer.prototype.equals = function equals (b) { + if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') + if (this === b) return true + return Buffer.compare(this, b) === 0 +} + +Buffer.prototype.inspect = function inspect () { + var str = '' + var max = exports.INSPECT_MAX_BYTES + str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim() + if (this.length > max) str += ' ... ' + return '' +} +if (customInspectSymbol) { + Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect +} + +Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { + if (isInstance(target, Uint8Array)) { + target = Buffer.from(target, target.offset, target.byteLength) + } + if (!Buffer.isBuffer(target)) { + throw new TypeError( + 'The "target" argument must be one of type Buffer or Uint8Array. ' + + 'Received type ' + (typeof target) + ) + } + + if (start === undefined) { + start = 0 + } + if (end === undefined) { + end = target ? target.length : 0 + } + if (thisStart === undefined) { + thisStart = 0 + } + if (thisEnd === undefined) { + thisEnd = this.length + } + + if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { + throw new RangeError('out of range index') + } + + if (thisStart >= thisEnd && start >= end) { + return 0 + } + if (thisStart >= thisEnd) { + return -1 + } + if (start >= end) { + return 1 + } + + start >>>= 0 + end >>>= 0 + thisStart >>>= 0 + thisEnd >>>= 0 + + if (this === target) return 0 + + var x = thisEnd - thisStart + var y = end - start + var len = Math.min(x, y) + + var thisCopy = this.slice(thisStart, thisEnd) + var targetCopy = target.slice(start, end) + + for (var i = 0; i < len; ++i) { + if (thisCopy[i] !== targetCopy[i]) { + x = thisCopy[i] + y = targetCopy[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant is val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { + // Empty buffer means no match + if (buffer.length === 0) return -1 + + // Normalize byteOffset + if (typeof byteOffset === 'string') { + encoding = byteOffset + byteOffset = 0 + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000 + } + byteOffset = +byteOffset // Coerce to Number. + if (numberIsNaN(byteOffset)) { + // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer + byteOffset = dir ? 0 : (buffer.length - 1) + } + + // Normalize byteOffset: negative offsets start from the end of the buffer + if (byteOffset < 0) byteOffset = buffer.length + byteOffset + if (byteOffset >= buffer.length) { + if (dir) return -1 + else byteOffset = buffer.length - 1 + } else if (byteOffset < 0) { + if (dir) byteOffset = 0 + else return -1 + } + + // Normalize val + if (typeof val === 'string') { + val = Buffer.from(val, encoding) + } + + // Finally, search either indexOf (if dir is true) or lastIndexOf + if (Buffer.isBuffer(val)) { + // Special case: looking for empty string/buffer always fails + if (val.length === 0) { + return -1 + } + return arrayIndexOf(buffer, val, byteOffset, encoding, dir) + } else if (typeof val === 'number') { + val = val & 0xFF // Search for a byte value [0-255] + if (typeof Uint8Array.prototype.indexOf === 'function') { + if (dir) { + return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) + } else { + return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) + } + } + return arrayIndexOf(buffer, [val], byteOffset, encoding, dir) + } + + throw new TypeError('val must be string, number or Buffer') +} + +function arrayIndexOf (arr, val, byteOffset, encoding, dir) { + var indexSize = 1 + var arrLength = arr.length + var valLength = val.length + + if (encoding !== undefined) { + encoding = String(encoding).toLowerCase() + if (encoding === 'ucs2' || encoding === 'ucs-2' || + encoding === 'utf16le' || encoding === 'utf-16le') { + if (arr.length < 2 || val.length < 2) { + return -1 + } + indexSize = 2 + arrLength /= 2 + valLength /= 2 + byteOffset /= 2 + } + } + + function read (buf, i) { + if (indexSize === 1) { + return buf[i] + } else { + return buf.readUInt16BE(i * indexSize) + } + } + + var i + if (dir) { + var foundIndex = -1 + for (i = byteOffset; i < arrLength; i++) { + if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { + if (foundIndex === -1) foundIndex = i + if (i - foundIndex + 1 === valLength) return foundIndex * indexSize + } else { + if (foundIndex !== -1) i -= i - foundIndex + foundIndex = -1 + } + } + } else { + if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength + for (i = byteOffset; i >= 0; i--) { + var found = true + for (var j = 0; j < valLength; j++) { + if (read(arr, i + j) !== read(val, j)) { + found = false + break + } + } + if (found) return i + } + } + + return -1 +} + +Buffer.prototype.includes = function includes (val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1 +} + +Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true) +} + +Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false) +} + +function hexWrite (buf, string, offset, length) { + offset = Number(offset) || 0 + var remaining = buf.length - offset + if (!length) { + length = remaining + } else { + length = Number(length) + if (length > remaining) { + length = remaining + } + } + + var strLen = string.length + + if (length > strLen / 2) { + length = strLen / 2 + } + for (var i = 0; i < length; ++i) { + var parsed = parseInt(string.substr(i * 2, 2), 16) + if (numberIsNaN(parsed)) return i + buf[offset + i] = parsed + } + return i +} + +function utf8Write (buf, string, offset, length) { + return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) +} + +function asciiWrite (buf, string, offset, length) { + return blitBuffer(asciiToBytes(string), buf, offset, length) +} + +function base64Write (buf, string, offset, length) { + return blitBuffer(base64ToBytes(string), buf, offset, length) +} + +function ucs2Write (buf, string, offset, length) { + return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) +} + +Buffer.prototype.write = function write (string, offset, length, encoding) { + // Buffer#write(string) + if (offset === undefined) { + encoding = 'utf8' + length = this.length + offset = 0 + // Buffer#write(string, encoding) + } else if (length === undefined && typeof offset === 'string') { + encoding = offset + length = this.length + offset = 0 + // Buffer#write(string, offset[, length][, encoding]) + } else if (isFinite(offset)) { + offset = offset >>> 0 + if (isFinite(length)) { + length = length >>> 0 + if (encoding === undefined) encoding = 'utf8' + } else { + encoding = length + length = undefined + } + } else { + throw new Error( + 'Buffer.write(string, encoding, offset[, length]) is no longer supported' + ) + } + + var remaining = this.length - offset + if (length === undefined || length > remaining) length = remaining + + if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { + throw new RangeError('Attempt to write outside buffer bounds') + } + + if (!encoding) encoding = 'utf8' + + var loweredCase = false + for (;;) { + switch (encoding) { + case 'hex': + return hexWrite(this, string, offset, length) + + case 'utf8': + case 'utf-8': + return utf8Write(this, string, offset, length) + + case 'ascii': + case 'latin1': + case 'binary': + return asciiWrite(this, string, offset, length) + + case 'base64': + // Warning: maxLength not taken into account in base64Write + return base64Write(this, string, offset, length) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return ucs2Write(this, string, offset, length) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} + +Buffer.prototype.toJSON = function toJSON () { + return { + type: 'Buffer', + data: Array.prototype.slice.call(this._arr || this, 0) + } +} + +function base64Slice (buf, start, end) { + if (start === 0 && end === buf.length) { + return base64.fromByteArray(buf) + } else { + return base64.fromByteArray(buf.slice(start, end)) + } +} + +function utf8Slice (buf, start, end) { + end = Math.min(buf.length, end) + var res = [] + + var i = start + while (i < end) { + var firstByte = buf[i] + var codePoint = null + var bytesPerSequence = (firstByte > 0xEF) + ? 4 + : (firstByte > 0xDF) + ? 3 + : (firstByte > 0xBF) + ? 2 + : 1 + + if (i + bytesPerSequence <= end) { + var secondByte, thirdByte, fourthByte, tempCodePoint + + switch (bytesPerSequence) { + case 1: + if (firstByte < 0x80) { + codePoint = firstByte + } + break + case 2: + secondByte = buf[i + 1] + if ((secondByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) + if (tempCodePoint > 0x7F) { + codePoint = tempCodePoint + } + } + break + case 3: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) + if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { + codePoint = tempCodePoint + } + } + break + case 4: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + fourthByte = buf[i + 3] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) + if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { + codePoint = tempCodePoint + } + } + } + } + + if (codePoint === null) { + // we did not generate a valid codePoint so insert a + // replacement char (U+FFFD) and advance only 1 byte + codePoint = 0xFFFD + bytesPerSequence = 1 + } else if (codePoint > 0xFFFF) { + // encode to utf16 (surrogate pair dance) + codePoint -= 0x10000 + res.push(codePoint >>> 10 & 0x3FF | 0xD800) + codePoint = 0xDC00 | codePoint & 0x3FF + } + + res.push(codePoint) + i += bytesPerSequence + } + + return decodeCodePointsArray(res) +} + +// Based on http://stackoverflow.com/a/22747272/680742, the browser with +// the lowest limit is Chrome, with 0x10000 args. +// We go 1 magnitude less, for safety +var MAX_ARGUMENTS_LENGTH = 0x1000 + +function decodeCodePointsArray (codePoints) { + var len = codePoints.length + if (len <= MAX_ARGUMENTS_LENGTH) { + return String.fromCharCode.apply(String, codePoints) // avoid extra slice() + } + + // Decode in chunks to avoid "call stack size exceeded". + var res = '' + var i = 0 + while (i < len) { + res += String.fromCharCode.apply( + String, + codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) + ) + } + return res +} + +function asciiSlice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i] & 0x7F) + } + return ret +} + +function latin1Slice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i]) + } + return ret +} + +function hexSlice (buf, start, end) { + var len = buf.length + + if (!start || start < 0) start = 0 + if (!end || end < 0 || end > len) end = len + + var out = '' + for (var i = start; i < end; ++i) { + out += hexSliceLookupTable[buf[i]] + } + return out +} + +function utf16leSlice (buf, start, end) { + var bytes = buf.slice(start, end) + var res = '' + // If bytes.length is odd, the last 8 bits must be ignored (same as node.js) + for (var i = 0; i < bytes.length - 1; i += 2) { + res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256)) + } + return res +} + +Buffer.prototype.slice = function slice (start, end) { + var len = this.length + start = ~~start + end = end === undefined ? len : ~~end + + if (start < 0) { + start += len + if (start < 0) start = 0 + } else if (start > len) { + start = len + } + + if (end < 0) { + end += len + if (end < 0) end = 0 + } else if (end > len) { + end = len + } + + if (end < start) end = start + + var newBuf = this.subarray(start, end) + // Return an augmented `Uint8Array` instance + Object.setPrototypeOf(newBuf, Buffer.prototype) + + return newBuf +} + +/* + * Need to make sure that buffer isn't trying to write out of bounds. + */ +function checkOffset (offset, ext, length) { + if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') + if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') +} + +Buffer.prototype.readUintLE = +Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + + return val +} + +Buffer.prototype.readUintBE = +Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + checkOffset(offset, byteLength, this.length) + } + + var val = this[offset + --byteLength] + var mul = 1 + while (byteLength > 0 && (mul *= 0x100)) { + val += this[offset + --byteLength] * mul + } + + return val +} + +Buffer.prototype.readUint8 = +Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 1, this.length) + return this[offset] +} + +Buffer.prototype.readUint16LE = +Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + return this[offset] | (this[offset + 1] << 8) +} + +Buffer.prototype.readUint16BE = +Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + return (this[offset] << 8) | this[offset + 1] +} + +Buffer.prototype.readUint32LE = +Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return ((this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16)) + + (this[offset + 3] * 0x1000000) +} + +Buffer.prototype.readUint32BE = +Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] * 0x1000000) + + ((this[offset + 1] << 16) | + (this[offset + 2] << 8) | + this[offset + 3]) +} + +Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var i = byteLength + var mul = 1 + var val = this[offset + --i] + while (i > 0 && (mul *= 0x100)) { + val += this[offset + --i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 1, this.length) + if (!(this[offset] & 0x80)) return (this[offset]) + return ((0xff - this[offset] + 1) * -1) +} + +Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset] | (this[offset + 1] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset + 1] | (this[offset] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16) | + (this[offset + 3] << 24) +} + +Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] << 24) | + (this[offset + 1] << 16) | + (this[offset + 2] << 8) | + (this[offset + 3]) +} + +Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, true, 23, 4) +} + +Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, false, 23, 4) +} + +Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, true, 52, 8) +} + +Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, false, 52, 8) +} + +function checkInt (buf, value, offset, ext, max, min) { + if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') + if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') + if (offset + ext > buf.length) throw new RangeError('Index out of range') +} + +Buffer.prototype.writeUintLE = +Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var mul = 1 + var i = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUintBE = +Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var i = byteLength - 1 + var mul = 1 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUint8 = +Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeUint16LE = +Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + return offset + 2 +} + +Buffer.prototype.writeUint16BE = +Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + return offset + 2 +} + +Buffer.prototype.writeUint32LE = +Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + this[offset + 3] = (value >>> 24) + this[offset + 2] = (value >>> 16) + this[offset + 1] = (value >>> 8) + this[offset] = (value & 0xff) + return offset + 4 +} + +Buffer.prototype.writeUint32BE = +Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + return offset + 4 +} + +Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + var limit = Math.pow(2, (8 * byteLength) - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = 0 + var mul = 1 + var sub = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + var limit = Math.pow(2, (8 * byteLength) - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = byteLength - 1 + var mul = 1 + var sub = 0 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) + if (value < 0) value = 0xff + value + 1 + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + return offset + 2 +} + +Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + return offset + 2 +} + +Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + this[offset + 2] = (value >>> 16) + this[offset + 3] = (value >>> 24) + return offset + 4 +} + +Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + if (value < 0) value = 0xffffffff + value + 1 + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + return offset + 4 +} + +function checkIEEE754 (buf, value, offset, ext, max, min) { + if (offset + ext > buf.length) throw new RangeError('Index out of range') + if (offset < 0) throw new RangeError('Index out of range') +} + +function writeFloat (buf, value, offset, littleEndian, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) + } + ieee754.write(buf, value, offset, littleEndian, 23, 4) + return offset + 4 +} + +Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { + return writeFloat(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { + return writeFloat(this, value, offset, false, noAssert) +} + +function writeDouble (buf, value, offset, littleEndian, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) + } + ieee754.write(buf, value, offset, littleEndian, 52, 8) + return offset + 8 +} + +Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { + return writeDouble(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { + return writeDouble(this, value, offset, false, noAssert) +} + +// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) +Buffer.prototype.copy = function copy (target, targetStart, start, end) { + if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer') + if (!start) start = 0 + if (!end && end !== 0) end = this.length + if (targetStart >= target.length) targetStart = target.length + if (!targetStart) targetStart = 0 + if (end > 0 && end < start) end = start + + // Copy 0 bytes; we're done + if (end === start) return 0 + if (target.length === 0 || this.length === 0) return 0 + + // Fatal error conditions + if (targetStart < 0) { + throw new RangeError('targetStart out of bounds') + } + if (start < 0 || start >= this.length) throw new RangeError('Index out of range') + if (end < 0) throw new RangeError('sourceEnd out of bounds') + + // Are we oob? + if (end > this.length) end = this.length + if (target.length - targetStart < end - start) { + end = target.length - targetStart + start + } + + var len = end - start + + if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') { + // Use built-in when available, missing from IE11 + this.copyWithin(targetStart, start, end) + } else { + Uint8Array.prototype.set.call( + target, + this.subarray(start, end), + targetStart + ) + } + + return len +} + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill (val, start, end, encoding) { + // Handle string cases: + if (typeof val === 'string') { + if (typeof start === 'string') { + encoding = start + start = 0 + end = this.length + } else if (typeof end === 'string') { + encoding = end + end = this.length + } + if (encoding !== undefined && typeof encoding !== 'string') { + throw new TypeError('encoding must be a string') + } + if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + if (val.length === 1) { + var code = val.charCodeAt(0) + if ((encoding === 'utf8' && code < 128) || + encoding === 'latin1') { + // Fast path: If `val` fits into a single byte, use that numeric value. + val = code + } + } + } else if (typeof val === 'number') { + val = val & 255 + } else if (typeof val === 'boolean') { + val = Number(val) + } + + // Invalid ranges are not set to a default, so can range check early. + if (start < 0 || this.length < start || this.length < end) { + throw new RangeError('Out of range index') + } + + if (end <= start) { + return this + } + + start = start >>> 0 + end = end === undefined ? this.length : end >>> 0 + + if (!val) val = 0 + + var i + if (typeof val === 'number') { + for (i = start; i < end; ++i) { + this[i] = val + } + } else { + var bytes = Buffer.isBuffer(val) + ? val + : Buffer.from(val, encoding) + var len = bytes.length + if (len === 0) { + throw new TypeError('The value "' + val + + '" is invalid for argument "value"') + } + for (i = 0; i < end - start; ++i) { + this[i + start] = bytes[i % len] + } + } + + return this +} + +// HELPER FUNCTIONS +// ================ + +var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g + +function base64clean (str) { + // Node takes equal signs as end of the Base64 encoding + str = str.split('=')[0] + // Node strips out invalid characters like \n and \t from the string, base64-js does not + str = str.trim().replace(INVALID_BASE64_RE, '') + // Node converts strings with length < 2 to '' + if (str.length < 2) return '' + // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not + while (str.length % 4 !== 0) { + str = str + '=' + } + return str +} + +function utf8ToBytes (string, units) { + units = units || Infinity + var codePoint + var length = string.length + var leadSurrogate = null + var bytes = [] + + for (var i = 0; i < length; ++i) { + codePoint = string.charCodeAt(i) + + // is surrogate component + if (codePoint > 0xD7FF && codePoint < 0xE000) { + // last char was a lead + if (!leadSurrogate) { + // no lead yet + if (codePoint > 0xDBFF) { + // unexpected trail + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } else if (i + 1 === length) { + // unpaired lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } + + // valid lead + leadSurrogate = codePoint + + continue + } + + // 2 leads in a row + if (codePoint < 0xDC00) { + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + leadSurrogate = codePoint + continue + } + + // valid surrogate pair + codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 + } else if (leadSurrogate) { + // valid bmp char, but last char was a lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + } + + leadSurrogate = null + + // encode utf8 + if (codePoint < 0x80) { + if ((units -= 1) < 0) break + bytes.push(codePoint) + } else if (codePoint < 0x800) { + if ((units -= 2) < 0) break + bytes.push( + codePoint >> 0x6 | 0xC0, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x10000) { + if ((units -= 3) < 0) break + bytes.push( + codePoint >> 0xC | 0xE0, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x110000) { + if ((units -= 4) < 0) break + bytes.push( + codePoint >> 0x12 | 0xF0, + codePoint >> 0xC & 0x3F | 0x80, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else { + throw new Error('Invalid code point') + } + } + + return bytes +} + +function asciiToBytes (str) { + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + // Node's code seems to be doing this and not & 0x7F.. + byteArray.push(str.charCodeAt(i) & 0xFF) + } + return byteArray +} + +function utf16leToBytes (str, units) { + var c, hi, lo + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + if ((units -= 2) < 0) break + + c = str.charCodeAt(i) + hi = c >> 8 + lo = c % 256 + byteArray.push(lo) + byteArray.push(hi) + } + + return byteArray +} + +function base64ToBytes (str) { + return base64.toByteArray(base64clean(str)) +} + +function blitBuffer (src, dst, offset, length) { + for (var i = 0; i < length; ++i) { + if ((i + offset >= dst.length) || (i >= src.length)) break + dst[i + offset] = src[i] + } + return i +} + +// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass +// the `instanceof` check but they should be treated as of that type. +// See: https://github.com/feross/buffer/issues/166 +function isInstance (obj, type) { + return obj instanceof type || + (obj != null && obj.constructor != null && obj.constructor.name != null && + obj.constructor.name === type.name) +} +function numberIsNaN (obj) { + // For IE11 support + return obj !== obj // eslint-disable-line no-self-compare +} + +// Create lookup table for `toString('hex')` +// See: https://github.com/feross/buffer/issues/219 +var hexSliceLookupTable = (function () { + var alphabet = '0123456789abcdef' + var table = new Array(256) + for (var i = 0; i < 16; ++i) { + var i16 = i * 16 + for (var j = 0; j < 16; ++j) { + table[i16 + j] = alphabet[i] + alphabet[j] + } + } + return table +})() diff --git a/node_modules/buffer/package.json b/node_modules/buffer/package.json new file mode 100644 index 00000000..3b1b4986 --- /dev/null +++ b/node_modules/buffer/package.json @@ -0,0 +1,96 @@ +{ + "name": "buffer", + "description": "Node.js Buffer API, for the browser", + "version": "5.7.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/buffer/issues" + }, + "contributors": [ + "Romain Beauxis ", + "James Halliday " + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + }, + "devDependencies": { + "airtap": "^3.0.0", + "benchmark": "^2.1.4", + "browserify": "^17.0.0", + "concat-stream": "^2.0.0", + "hyperquest": "^2.1.3", + "is-buffer": "^2.0.4", + "is-nan": "^1.3.0", + "split": "^1.0.1", + "standard": "*", + "tape": "^5.0.1", + "through2": "^4.0.2", + "uglify-js": "^3.11.3" + }, + "homepage": "https://github.com/feross/buffer", + "jspm": { + "map": { + "./index.js": { + "node": "@node/buffer" + } + } + }, + "keywords": [ + "arraybuffer", + "browser", + "browserify", + "buffer", + "compatible", + "dataview", + "uint8array" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/buffer.git" + }, + "scripts": { + "perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html", + "perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js", + "size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c", + "test": "standard && node ./bin/test.js", + "test-browser-es5": "airtap -- test/*.js", + "test-browser-es5-local": "airtap --local -- test/*.js", + "test-browser-es6": "airtap -- test/*.js test/node/*.js", + "test-browser-es6-local": "airtap --local -- test/*.js test/node/*.js", + "test-node": "tape test/*.js test/node/*.js", + "update-authors": "./bin/update-authors.sh" + }, + "standard": { + "ignore": [ + "test/node/**/*.js", + "test/common.js", + "test/_polyfill.js", + "perf/**/*.js" + ], + "globals": [ + "SharedArrayBuffer" + ] + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/bytes/History.md b/node_modules/bytes/History.md new file mode 100644 index 00000000..d60ce0e6 --- /dev/null +++ b/node_modules/bytes/History.md @@ -0,0 +1,97 @@ +3.1.2 / 2022-01-27 +================== + + * Fix return value for un-parsable strings + +3.1.1 / 2021-11-15 +================== + + * Fix "thousandsSeparator" incorrecting formatting fractional part + +3.1.0 / 2019-01-22 +================== + + * Add petabyte (`pb`) support + +3.0.0 / 2017-08-31 +================== + + * Change "kB" to "KB" in format output + * Remove support for Node.js 0.6 + * Remove support for ComponentJS + +2.5.0 / 2017-03-24 +================== + + * Add option "unit" + +2.4.0 / 2016-06-01 +================== + + * Add option "unitSeparator" + +2.3.0 / 2016-02-15 +================== + + * Drop partial bytes on all parsed units + * Fix non-finite numbers to `.format` to return `null` + * Fix parsing byte string that looks like hex + * perf: hoist regular expressions + +2.2.0 / 2015-11-13 +================== + + * add option "decimalPlaces" + * add option "fixedDecimals" + +2.1.0 / 2015-05-21 +================== + + * add `.format` export + * add `.parse` export + +2.0.2 / 2015-05-20 +================== + + * remove map recreation + * remove unnecessary object construction + +2.0.1 / 2015-05-07 +================== + + * fix browserify require + * remove node.extend dependency + +2.0.0 / 2015-04-12 +================== + + * add option "case" + * add option "thousandsSeparator" + * return "null" on invalid parse input + * support proper round-trip: bytes(bytes(num)) === num + * units no longer case sensitive when parsing + +1.0.0 / 2014-05-05 +================== + + * add negative support. fixes #6 + +0.3.0 / 2014-03-19 +================== + + * added terabyte support + +0.2.1 / 2013-04-01 +================== + + * add .component + +0.2.0 / 2012-10-28 +================== + + * bytes(200).should.eql('200b') + +0.1.0 / 2012-07-04 +================== + + * add bytes to string conversion [yields] diff --git a/node_modules/bytes/LICENSE b/node_modules/bytes/LICENSE new file mode 100644 index 00000000..63e95a96 --- /dev/null +++ b/node_modules/bytes/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2012-2014 TJ Holowaychuk +Copyright (c) 2015 Jed Watson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bytes/Readme.md b/node_modules/bytes/Readme.md new file mode 100644 index 00000000..5790e23e --- /dev/null +++ b/node_modules/bytes/Readme.md @@ -0,0 +1,152 @@ +# Bytes utility + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Utility to parse a string bytes (ex: `1TB`) to bytes (`1099511627776`) and vice-versa. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```bash +$ npm install bytes +``` + +## Usage + +```js +var bytes = require('bytes'); +``` + +#### bytes(number|string value, [options]): number|string|null + +Default export function. Delegates to either `bytes.format` or `bytes.parse` based on the type of `value`. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number`|`string` | Number value to format or string value to parse | +| options | `Object` | Conversion options for `format` | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`number`|`null` | Return null upon error. Numeric value in bytes, or string value otherwise. | + +**Example** + +```js +bytes(1024); +// output: '1KB' + +bytes('1KB'); +// output: 1024 +``` + +#### bytes.format(number value, [options]): string|null + +Format the given value in bytes into a string. If the value is negative, it is kept as such. If it is a float, it is + rounded. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number` | Value in bytes | +| options | `Object` | Conversion options | + +**Options** + +| Property | Type | Description | +|-------------------|--------|-----------------------------------------------------------------------------------------| +| decimalPlaces | `number`|`null` | Maximum number of decimal places to include in output. Default value to `2`. | +| fixedDecimals | `boolean`|`null` | Whether to always display the maximum number of decimal places. Default value to `false` | +| thousandsSeparator | `string`|`null` | Example of values: `' '`, `','` and `'.'`... Default value to `''`. | +| unit | `string`|`null` | The unit in which the result will be returned (B/KB/MB/GB/TB). Default value to `''` (which means auto detect). | +| unitSeparator | `string`|`null` | Separator to use between number and unit. Default value to `''`. | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`null` | Return null upon error. String value otherwise. | + +**Example** + +```js +bytes.format(1024); +// output: '1KB' + +bytes.format(1000); +// output: '1000B' + +bytes.format(1000, {thousandsSeparator: ' '}); +// output: '1 000B' + +bytes.format(1024 * 1.7, {decimalPlaces: 0}); +// output: '2KB' + +bytes.format(1024, {unitSeparator: ' '}); +// output: '1 KB' +``` + +#### bytes.parse(string|number value): number|null + +Parse the string value into an integer in bytes. If no unit is given, or `value` +is a number, it is assumed the value is in bytes. + +Supported units and abbreviations are as follows and are case-insensitive: + + * `b` for bytes + * `kb` for kilobytes + * `mb` for megabytes + * `gb` for gigabytes + * `tb` for terabytes + * `pb` for petabytes + +The units are in powers of two, not ten. This means 1kb = 1024b according to this parser. + +**Arguments** + +| Name | Type | Description | +|---------------|--------|--------------------| +| value | `string`|`number` | String to parse, or number in bytes. | + +**Returns** + +| Name | Type | Description | +|---------|-------------|-------------------------| +| results | `number`|`null` | Return null upon error. Value in bytes otherwise. | + +**Example** + +```js +bytes.parse('1KB'); +// output: 1024 + +bytes.parse('1024'); +// output: 1024 + +bytes.parse(1024); +// output: 1024 +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/visionmedia/bytes.js/master?label=ci +[ci-url]: https://github.com/visionmedia/bytes.js/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/visionmedia/bytes.js/master +[coveralls-url]: https://coveralls.io/r/visionmedia/bytes.js?branch=master +[downloads-image]: https://badgen.net/npm/dm/bytes +[downloads-url]: https://npmjs.org/package/bytes +[npm-image]: https://badgen.net/npm/v/bytes +[npm-url]: https://npmjs.org/package/bytes diff --git a/node_modules/bytes/index.js b/node_modules/bytes/index.js new file mode 100644 index 00000000..6f2d0f89 --- /dev/null +++ b/node_modules/bytes/index.js @@ -0,0 +1,170 @@ +/*! + * bytes + * Copyright(c) 2012-2014 TJ Holowaychuk + * Copyright(c) 2015 Jed Watson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = bytes; +module.exports.format = format; +module.exports.parse = parse; + +/** + * Module variables. + * @private + */ + +var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g; + +var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/; + +var map = { + b: 1, + kb: 1 << 10, + mb: 1 << 20, + gb: 1 << 30, + tb: Math.pow(1024, 4), + pb: Math.pow(1024, 5), +}; + +var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i; + +/** + * Convert the given value in bytes into a string or parse to string to an integer in bytes. + * + * @param {string|number} value + * @param {{ + * case: [string], + * decimalPlaces: [number] + * fixedDecimals: [boolean] + * thousandsSeparator: [string] + * unitSeparator: [string] + * }} [options] bytes options. + * + * @returns {string|number|null} + */ + +function bytes(value, options) { + if (typeof value === 'string') { + return parse(value); + } + + if (typeof value === 'number') { + return format(value, options); + } + + return null; +} + +/** + * Format the given value in bytes into a string. + * + * If the value is negative, it is kept as such. If it is a float, + * it is rounded. + * + * @param {number} value + * @param {object} [options] + * @param {number} [options.decimalPlaces=2] + * @param {number} [options.fixedDecimals=false] + * @param {string} [options.thousandsSeparator=] + * @param {string} [options.unit=] + * @param {string} [options.unitSeparator=] + * + * @returns {string|null} + * @public + */ + +function format(value, options) { + if (!Number.isFinite(value)) { + return null; + } + + var mag = Math.abs(value); + var thousandsSeparator = (options && options.thousandsSeparator) || ''; + var unitSeparator = (options && options.unitSeparator) || ''; + var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2; + var fixedDecimals = Boolean(options && options.fixedDecimals); + var unit = (options && options.unit) || ''; + + if (!unit || !map[unit.toLowerCase()]) { + if (mag >= map.pb) { + unit = 'PB'; + } else if (mag >= map.tb) { + unit = 'TB'; + } else if (mag >= map.gb) { + unit = 'GB'; + } else if (mag >= map.mb) { + unit = 'MB'; + } else if (mag >= map.kb) { + unit = 'KB'; + } else { + unit = 'B'; + } + } + + var val = value / map[unit.toLowerCase()]; + var str = val.toFixed(decimalPlaces); + + if (!fixedDecimals) { + str = str.replace(formatDecimalsRegExp, '$1'); + } + + if (thousandsSeparator) { + str = str.split('.').map(function (s, i) { + return i === 0 + ? s.replace(formatThousandsRegExp, thousandsSeparator) + : s + }).join('.'); + } + + return str + unitSeparator + unit; +} + +/** + * Parse the string value into an integer in bytes. + * + * If no unit is given, it is assumed the value is in bytes. + * + * @param {number|string} val + * + * @returns {number|null} + * @public + */ + +function parse(val) { + if (typeof val === 'number' && !isNaN(val)) { + return val; + } + + if (typeof val !== 'string') { + return null; + } + + // Test if the string passed is valid + var results = parseRegExp.exec(val); + var floatValue; + var unit = 'b'; + + if (!results) { + // Nothing could be extracted from the given string + floatValue = parseInt(val, 10); + unit = 'b' + } else { + // Retrieve the value and the unit + floatValue = parseFloat(results[1]); + unit = results[4].toLowerCase(); + } + + if (isNaN(floatValue)) { + return null; + } + + return Math.floor(map[unit] * floatValue); +} diff --git a/node_modules/bytes/package.json b/node_modules/bytes/package.json new file mode 100644 index 00000000..f2b6a8b0 --- /dev/null +++ b/node_modules/bytes/package.json @@ -0,0 +1,42 @@ +{ + "name": "bytes", + "description": "Utility to parse a string bytes to bytes and vice-versa", + "version": "3.1.2", + "author": "TJ Holowaychuk (http://tjholowaychuk.com)", + "contributors": [ + "Jed Watson ", + "Théo FIDRY " + ], + "license": "MIT", + "keywords": [ + "byte", + "bytes", + "utility", + "parse", + "parser", + "convert", + "converter" + ], + "repository": "visionmedia/bytes.js", + "devDependencies": { + "eslint": "7.32.0", + "eslint-plugin-markdown": "2.2.1", + "mocha": "9.2.0", + "nyc": "15.1.0" + }, + "files": [ + "History.md", + "LICENSE", + "Readme.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --check-leaks --reporter spec", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/cacheable-request/LICENSE b/node_modules/cacheable-request/LICENSE new file mode 100644 index 00000000..f27ee9b4 --- /dev/null +++ b/node_modules/cacheable-request/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Luke Childs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/cacheable-request/README.md b/node_modules/cacheable-request/README.md new file mode 100644 index 00000000..84a55682 --- /dev/null +++ b/node_modules/cacheable-request/README.md @@ -0,0 +1,190 @@ +# cacheable-request + +> Wrap native HTTP requests with RFC compliant cache support + +[![Build Status](https://travis-ci.org/lukechilds/cacheable-request.svg?branch=master)](https://travis-ci.org/lukechilds/cacheable-request) +[![Coverage Status](https://coveralls.io/repos/github/lukechilds/cacheable-request/badge.svg?branch=master)](https://coveralls.io/github/lukechilds/cacheable-request?branch=master) +[![npm](https://img.shields.io/npm/dm/cacheable-request.svg)](https://www.npmjs.com/package/cacheable-request) +[![npm](https://img.shields.io/npm/v/cacheable-request.svg)](https://www.npmjs.com/package/cacheable-request) + +[RFC 7234](http://httpwg.org/specs/rfc7234.html) compliant HTTP caching for native Node.js HTTP/HTTPS requests. Caching works out of the box in memory or is easily pluggable with a wide range of storage adapters. + +**Note:** This is a low level wrapper around the core HTTP modules, it's not a high level request library. + +## Features + +- Only stores cacheable responses as defined by RFC 7234 +- Fresh cache entries are served directly from cache +- Stale cache entries are revalidated with `If-None-Match`/`If-Modified-Since` headers +- 304 responses from revalidation requests use cached body +- Updates `Age` header on cached responses +- Can completely bypass cache on a per request basis +- In memory cache by default +- Official support for Redis, MongoDB, SQLite, PostgreSQL and MySQL storage adapters +- Easily plug in your own or third-party storage adapters +- If DB connection fails, cache is automatically bypassed ([disabled by default](#optsautomaticfailover)) +- Adds cache support to any existing HTTP code with minimal changes +- Uses [http-cache-semantics](https://github.com/pornel/http-cache-semantics) internally for HTTP RFC 7234 compliance + +## Install + +```shell +npm install --save cacheable-request +``` + +## Usage + +```js +const http = require('http'); +const CacheableRequest = require('cacheable-request'); + +// Then instead of +const req = http.request('http://example.com', cb); +req.end(); + +// You can do +const cacheableRequest = new CacheableRequest(http.request); +const cacheReq = cacheableRequest('http://example.com', cb); +cacheReq.on('request', req => req.end()); +// Future requests to 'example.com' will be returned from cache if still valid + +// You pass in any other http.request API compatible method to be wrapped with cache support: +const cacheableRequest = new CacheableRequest(https.request); +const cacheableRequest = new CacheableRequest(electron.net); +``` + +## Storage Adapters + +`cacheable-request` uses [Keyv](https://github.com/lukechilds/keyv) to support a wide range of storage adapters. + +For example, to use Redis as a cache backend, you just need to install the official Redis Keyv storage adapter: + +``` +npm install --save @keyv/redis +``` + +And then you can pass `CacheableRequest` your connection string: + +```js +const cacheableRequest = new CacheableRequest(http.request, 'redis://user:pass@localhost:6379'); +``` + +[View all official Keyv storage adapters.](https://github.com/lukechilds/keyv#official-storage-adapters) + +Keyv also supports anything that follows the Map API so it's easy to write your own storage adapter or use a third-party solution. + +e.g The following are all valid storage adapters + +```js +const storageAdapter = new Map(); +// or +const storageAdapter = require('./my-storage-adapter'); +// or +const QuickLRU = require('quick-lru'); +const storageAdapter = new QuickLRU({ maxSize: 1000 }); + +const cacheableRequest = new CacheableRequest(http.request, storageAdapter); +``` + +View the [Keyv docs](https://github.com/lukechilds/keyv) for more information on how to use storage adapters. + +## API + +### new cacheableRequest(request, [storageAdapter]) + +Returns the provided request function wrapped with cache support. + +#### request + +Type: `function` + +Request function to wrap with cache support. Should be [`http.request`](https://nodejs.org/api/http.html#http_http_request_options_callback) or a similar API compatible request function. + +#### storageAdapter + +Type: `Keyv storage adapter`
+Default: `new Map()` + +A [Keyv](https://github.com/lukechilds/keyv) storage adapter instance, or connection string if using with an official Keyv storage adapter. + +### Instance + +#### cacheableRequest(opts, [cb]) + +Returns an event emitter. + +##### opts + +Type: `object`, `string` + +Any of the default request functions options plus: + +###### opts.cache + +Type: `boolean`
+Default: `true` + +If the cache should be used. Setting this to false will completely bypass the cache for the current request. + +###### opts.strictTtl + +Type: `boolean`
+Default: `false` + +If set to `false`, after a cached resource's TTL expires it is kept in the cache and will be revalidated on the next request with `If-None-Match`/`If-Modified-Since` headers. + +If set to `true` once a cached resource has expired it is deleted and will have to be re-requested. + +###### opts.automaticFailover + +Type: `boolean`
+Default: `false` + +When set to `true`, if the DB connection fails we will automatically fallback to a network request. DB errors will still be emitted to notify you of the problem even though the request callback may succeed. + +##### cb + +Type: `function` + +The callback function which will receive the response as an argument. + +The response can be either a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage) or a [responselike object](https://github.com/lukechilds/responselike). The response will also have a `fromCache` property set with a boolean value. + +##### .on('request', request) + +`request` event to get the request object of the request. + +**Note:** This event will only fire if an HTTP request is actually made, not when a response is retrieved from cache. However, you should always handle the `request` event to end the request and handle any potential request errors. + +##### .on('response', response) + +`response` event to get the response object from the HTTP request or cache. + +##### .on('error', error) + +`error` event emitted in case of an error with the cache. + +Errors emitted here will be an instance of `CacheableRequest.RequestError` or `CacheableRequest.CacheError`. You will only ever receive a `RequestError` if the request function throws (normally caused by invalid user input). Normal request errors should be handled inside the `request` event. + +To properly handle all error scenarios you should use the following pattern: + +```js +cacheableRequest('example.com', cb) + .on('error', err => { + if (err instanceof CacheableRequest.CacheError) { + handleCacheError(err); // Cache error + } else if (err instanceof CacheableRequest.RequestError) { + handleRequestError(err); // Request function thrown + } + }) + .on('request', req => { + req.on('error', handleRequestError); // Request error emitted + req.end(); + }); +``` + +**Note:** Database connection errors are emitted here, however `cacheable-request` will attempt to re-request the resource and bypass the cache on a connection error. Therefore a database connection error doesn't necessarily mean the request won't be fulfilled. + +## License + +MIT © Luke Childs diff --git a/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js b/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js new file mode 100644 index 00000000..ae45d3d9 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js @@ -0,0 +1,51 @@ +'use strict'; +const PassThrough = require('stream').PassThrough; + +module.exports = opts => { + opts = Object.assign({}, opts); + + const array = opts.array; + let encoding = opts.encoding; + const buffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } + + if (buffer) { + encoding = null; + } + + let len = 0; + const ret = []; + const stream = new PassThrough({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + stream.on('data', chunk => { + ret.push(chunk); + + if (objectMode) { + len = ret.length; + } else { + len += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return ret; + } + + return buffer ? Buffer.concat(ret, len) : ret.join(''); + }; + + stream.getBufferedLength = () => len; + + return stream; +}; diff --git a/node_modules/cacheable-request/node_modules/get-stream/index.js b/node_modules/cacheable-request/node_modules/get-stream/index.js new file mode 100644 index 00000000..2dc5ee96 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/index.js @@ -0,0 +1,51 @@ +'use strict'; +const bufferStream = require('./buffer-stream'); + +function getStream(inputStream, opts) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + opts = Object.assign({maxBuffer: Infinity}, opts); + + const maxBuffer = opts.maxBuffer; + let stream; + let clean; + + const p = new Promise((resolve, reject) => { + const error = err => { + if (err) { // null check + err.bufferedData = stream.getBufferedValue(); + } + + reject(err); + }; + + stream = bufferStream(opts); + inputStream.once('error', error); + inputStream.pipe(stream); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + reject(new Error('maxBuffer exceeded')); + } + }); + stream.once('error', error); + stream.on('end', resolve); + + clean = () => { + // some streams doesn't implement the `stream.Readable` interface correctly + if (inputStream.unpipe) { + inputStream.unpipe(stream); + } + }; + }); + + p.then(clean, clean); + + return p.then(() => stream.getBufferedValue()); +} + +module.exports = getStream; +module.exports.buffer = (stream, opts) => getStream(stream, Object.assign({}, opts, {encoding: 'buffer'})); +module.exports.array = (stream, opts) => getStream(stream, Object.assign({}, opts, {array: true})); diff --git a/node_modules/cacheable-request/node_modules/get-stream/license b/node_modules/cacheable-request/node_modules/get-stream/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/cacheable-request/node_modules/get-stream/package.json b/node_modules/cacheable-request/node_modules/get-stream/package.json new file mode 100644 index 00000000..2f2adf0d --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/package.json @@ -0,0 +1,48 @@ +{ + "name": "get-stream", + "version": "3.0.0", + "description": "Get a stream as a string, buffer, or array", + "license": "MIT", + "repository": "sindresorhus/get-stream", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "buffer-stream.js" + ], + "keywords": [ + "get", + "stream", + "promise", + "concat", + "string", + "str", + "text", + "buffer", + "read", + "data", + "consume", + "readable", + "readablestream", + "array", + "object", + "obj" + ], + "devDependencies": { + "ava": "*", + "into-stream": "^3.0.0", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/cacheable-request/node_modules/get-stream/readme.md b/node_modules/cacheable-request/node_modules/get-stream/readme.md new file mode 100644 index 00000000..73b188fb --- /dev/null +++ b/node_modules/cacheable-request/node_modules/get-stream/readme.md @@ -0,0 +1,117 @@ +# get-stream [![Build Status](https://travis-ci.org/sindresorhus/get-stream.svg?branch=master)](https://travis-ci.org/sindresorhus/get-stream) + +> Get a stream as a string, buffer, or array + + +## Install + +``` +$ npm install --save get-stream +``` + + +## Usage + +```js +const fs = require('fs'); +const getStream = require('get-stream'); +const stream = fs.createReadStream('unicorn.txt'); + +getStream(stream).then(str => { + console.log(str); + /* + ,,))))))));, + __)))))))))))))), + \|/ -\(((((''''((((((((. + -*-==//////(('' . `)))))), + /|\ ))| o ;-. '((((( ,(, + ( `| / ) ;))))' ,_))^;(~ + | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + ; ''''```` `: `:::|\,__,%% );`'; ~ + | _ ) / `:|`----' `-' + ______/\/~ | / / + /~;;.____/;;' / ___--,-( `;;;/ + / // _;______;'------~~~~~ /;;/\ / + // | | / ; \;;,\ + (<_ | ; /',/-----' _> + \_| ||_ //~;~~~~~~~~~ + `\_| (,~~ + \~\ + ~~ + */ +}); +``` + + +## API + +The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + +### getStream(stream, [options]) + +Get the `stream` as a string. + +#### options + +##### encoding + +Type: `string`
+Default: `utf8` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + +##### maxBuffer + +Type: `number`
+Default: `Infinity` + +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected. + +### getStream.buffer(stream, [options]) + +Get the `stream` as a buffer. + +It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + +### getStream.array(stream, [options]) + +Get the `stream` as an array of values. + +It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + +- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + +- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + +- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + + +## Errors + +If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. + +```js +getStream(streamThatErrorsAtTheEnd('unicorn')) + .catch(err => { + console.log(err.bufferedData); + //=> 'unicorn' + }); +``` + + +## FAQ + +### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? + +This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. + + +## Related + +- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/index.js b/node_modules/cacheable-request/node_modules/lowercase-keys/index.js new file mode 100644 index 00000000..b8d88983 --- /dev/null +++ b/node_modules/cacheable-request/node_modules/lowercase-keys/index.js @@ -0,0 +1,11 @@ +'use strict'; +module.exports = function (obj) { + var ret = {}; + var keys = Object.keys(Object(obj)); + + for (var i = 0; i < keys.length; i++) { + ret[keys[i].toLowerCase()] = obj[keys[i]]; + } + + return ret; +}; diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/package.json b/node_modules/cacheable-request/node_modules/lowercase-keys/package.json new file mode 100644 index 00000000..7e4e396f --- /dev/null +++ b/node_modules/cacheable-request/node_modules/lowercase-keys/package.json @@ -0,0 +1,35 @@ +{ + "name": "lowercase-keys", + "version": "1.0.0", + "description": "Lowercase the keys of an object", + "license": "MIT", + "repository": "sindresorhus/lowercase-keys", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "object", + "assign", + "extend", + "properties", + "lowercase", + "lower-case", + "case", + "keys", + "key" + ], + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md b/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md new file mode 100644 index 00000000..dc65770a --- /dev/null +++ b/node_modules/cacheable-request/node_modules/lowercase-keys/readme.md @@ -0,0 +1,33 @@ +# lowercase-keys [![Build Status](https://travis-ci.org/sindresorhus/lowercase-keys.svg?branch=master)](https://travis-ci.org/sindresorhus/lowercase-keys) + +> Lowercase the keys of an object + + +## Install + +``` +$ npm install --save lowercase-keys +``` + + +## Usage + +```js +var lowercaseKeys = require('lowercase-keys'); + +lowercaseKeys({FOO: true, bAr: false}); +//=> {foo: true, bar: false} +``` + + +## API + +### lowercaseKeys(object) + +Lowercases the keys and returns a new object. + + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/cacheable-request/package.json b/node_modules/cacheable-request/package.json new file mode 100644 index 00000000..04483124 --- /dev/null +++ b/node_modules/cacheable-request/package.json @@ -0,0 +1,57 @@ +{ + "name": "cacheable-request", + "version": "2.1.4", + "description": "Wrap native HTTP requests with RFC compliant cache support", + "main": "src/index.js", + "scripts": { + "test": "xo && nyc ava", + "coverage": "nyc report --reporter=text-lcov | coveralls" + }, + "xo": { + "extends": "xo-lukechilds" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/lukechilds/cacheable-request.git" + }, + "keywords": [ + "HTTP", + "HTTPS", + "cache", + "caching", + "layer", + "cacheable", + "RFC 7234", + "RFC", + "7234", + "compliant" + ], + "author": "Luke Childs (http://lukechilds.co.uk)", + "license": "MIT", + "bugs": { + "url": "https://github.com/lukechilds/cacheable-request/issues" + }, + "homepage": "https://github.com/lukechilds/cacheable-request", + "dependencies": { + "clone-response": "1.0.2", + "get-stream": "3.0.0", + "http-cache-semantics": "3.8.1", + "keyv": "3.0.0", + "lowercase-keys": "1.0.0", + "normalize-url": "2.0.1", + "responselike": "1.0.2" + }, + "devDependencies": { + "@keyv/sqlite": "^1.2.6", + "ava": "^0.24.0", + "coveralls": "^3.0.0", + "create-test-server": "^2.0.0", + "delay": "^2.0.0", + "eslint-config-xo-lukechilds": "^1.0.0", + "nyc": "^11.0.2", + "pify": "^3.0.0", + "sqlite3": "^3.1.9", + "this": "^1.0.2", + "xo": "^0.19.0" + } +} diff --git a/node_modules/cacheable-request/src/index.js b/node_modules/cacheable-request/src/index.js new file mode 100644 index 00000000..1935b2db --- /dev/null +++ b/node_modules/cacheable-request/src/index.js @@ -0,0 +1,155 @@ +'use strict'; + +const EventEmitter = require('events'); +const urlLib = require('url'); +const normalizeUrl = require('normalize-url'); +const getStream = require('get-stream'); +const CachePolicy = require('http-cache-semantics'); +const Response = require('responselike'); +const lowercaseKeys = require('lowercase-keys'); +const cloneResponse = require('clone-response'); +const Keyv = require('keyv'); + +class CacheableRequest { + constructor(request, cacheAdapter) { + if (typeof request !== 'function') { + throw new TypeError('Parameter `request` must be a function'); + } + + this.cache = new Keyv({ + uri: typeof cacheAdapter === 'string' && cacheAdapter, + store: typeof cacheAdapter !== 'string' && cacheAdapter, + namespace: 'cacheable-request' + }); + + return this.createCacheableRequest(request); + } + + createCacheableRequest(request) { + return (opts, cb) => { + if (typeof opts === 'string') { + opts = urlLib.parse(opts); + } + opts = Object.assign({ + headers: {}, + method: 'GET', + cache: true, + strictTtl: false, + automaticFailover: false + }, opts); + opts.headers = lowercaseKeys(opts.headers); + + const ee = new EventEmitter(); + const url = normalizeUrl(urlLib.format(opts)); + const key = `${opts.method}:${url}`; + let revalidate = false; + let madeRequest = false; + + const makeRequest = opts => { + madeRequest = true; + const handler = response => { + if (revalidate) { + const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response); + if (!revalidatedPolicy.modified) { + const headers = revalidatedPolicy.policy.responseHeaders(); + response = new Response(response.statusCode, headers, revalidate.body, revalidate.url); + response.cachePolicy = revalidatedPolicy.policy; + response.fromCache = true; + } + } + + if (!response.fromCache) { + response.cachePolicy = new CachePolicy(opts, response); + response.fromCache = false; + } + + let clonedResponse; + if (opts.cache && response.cachePolicy.storable()) { + clonedResponse = cloneResponse(response); + getStream.buffer(response) + .then(body => { + const value = { + cachePolicy: response.cachePolicy.toObject(), + url: response.url, + statusCode: response.fromCache ? revalidate.statusCode : response.statusCode, + body + }; + const ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined; + return this.cache.set(key, value, ttl); + }) + .catch(err => ee.emit('error', new CacheableRequest.CacheError(err))); + } else if (opts.cache && revalidate) { + this.cache.delete(key) + .catch(err => ee.emit('error', new CacheableRequest.CacheError(err))); + } + + ee.emit('response', clonedResponse || response); + if (typeof cb === 'function') { + cb(clonedResponse || response); + } + }; + + try { + const req = request(opts, handler); + ee.emit('request', req); + } catch (err) { + ee.emit('error', new CacheableRequest.RequestError(err)); + } + }; + + const get = opts => Promise.resolve() + .then(() => opts.cache ? this.cache.get(key) : undefined) + .then(cacheEntry => { + if (typeof cacheEntry === 'undefined') { + return makeRequest(opts); + } + + const policy = CachePolicy.fromObject(cacheEntry.cachePolicy); + if (policy.satisfiesWithoutRevalidation(opts)) { + const headers = policy.responseHeaders(); + const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url); + response.cachePolicy = policy; + response.fromCache = true; + + ee.emit('response', response); + if (typeof cb === 'function') { + cb(response); + } + } else { + revalidate = cacheEntry; + opts.headers = policy.revalidationHeaders(opts); + makeRequest(opts); + } + }); + + this.cache.on('error', err => ee.emit('error', new CacheableRequest.CacheError(err))); + + get(opts).catch(err => { + if (opts.automaticFailover && !madeRequest) { + makeRequest(opts); + } + ee.emit('error', new CacheableRequest.CacheError(err)); + }); + + return ee; + }; + } +} + +CacheableRequest.RequestError = class extends Error { + constructor(err) { + super(err.message); + this.name = 'RequestError'; + Object.assign(this, err); + } +}; + +CacheableRequest.CacheError = class extends Error { + constructor(err) { + super(err.message); + this.name = 'CacheError'; + Object.assign(this, err); + } +}; + +module.exports = CacheableRequest; diff --git a/node_modules/call-bind/.eslintignore b/node_modules/call-bind/.eslintignore new file mode 100644 index 00000000..404abb22 --- /dev/null +++ b/node_modules/call-bind/.eslintignore @@ -0,0 +1 @@ +coverage/ diff --git a/node_modules/call-bind/.eslintrc b/node_modules/call-bind/.eslintrc new file mode 100644 index 00000000..e5d3c9a9 --- /dev/null +++ b/node_modules/call-bind/.eslintrc @@ -0,0 +1,17 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "func-name-matching": 0, + "id-length": 0, + "new-cap": [2, { + "capIsNewExceptions": [ + "GetIntrinsic", + ], + }], + "no-magic-numbers": 0, + "operator-linebreak": [2, "before"], + }, +} diff --git a/node_modules/call-bind/.github/FUNDING.yml b/node_modules/call-bind/.github/FUNDING.yml new file mode 100644 index 00000000..c70c2ecd --- /dev/null +++ b/node_modules/call-bind/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/call-bind +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/call-bind/.nycrc b/node_modules/call-bind/.nycrc new file mode 100644 index 00000000..1826526e --- /dev/null +++ b/node_modules/call-bind/.nycrc @@ -0,0 +1,13 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "lines": 86, + "statements": 85.93, + "functions": 82.43, + "branches": 76.06, + "exclude": [ + "coverage", + "test" + ] +} diff --git a/node_modules/call-bind/CHANGELOG.md b/node_modules/call-bind/CHANGELOG.md new file mode 100644 index 00000000..62a37279 --- /dev/null +++ b/node_modules/call-bind/CHANGELOG.md @@ -0,0 +1,42 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.2](https://github.com/ljharb/call-bind/compare/v1.0.1...v1.0.2) - 2021-01-11 + +### Commits + +- [Fix] properly include the receiver in the bound length [`dbae7bc`](https://github.com/ljharb/call-bind/commit/dbae7bc676c079a0d33c0a43e9ef92cb7b01345d) + +## [v1.0.1](https://github.com/ljharb/call-bind/compare/v1.0.0...v1.0.1) - 2021-01-08 + +### Commits + +- [Tests] migrate tests to Github Actions [`b6db284`](https://github.com/ljharb/call-bind/commit/b6db284c36f8ccd195b88a6764fe84b7223a0da1) +- [meta] do not publish github action workflow files [`ec7fe46`](https://github.com/ljharb/call-bind/commit/ec7fe46e60cfa4764ee943d2755f5e5a366e578e) +- [Fix] preserve original function’s length when possible [`adbceaa`](https://github.com/ljharb/call-bind/commit/adbceaa3cac4b41ea78bb19d7ccdbaaf7e0bdadb) +- [Tests] gather coverage data on every job [`d69e23c`](https://github.com/ljharb/call-bind/commit/d69e23cc65f101ba1d4c19bb07fa8eb0ec624be8) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`2fd3586`](https://github.com/ljharb/call-bind/commit/2fd3586c5d47b335364c14293114c6b625ae1f71) +- [Deps] update `get-intrinsic` [`f23e931`](https://github.com/ljharb/call-bind/commit/f23e9318cc271c2add8bb38cfded85ee7baf8eee) +- [Deps] update `get-intrinsic` [`72d9f44`](https://github.com/ljharb/call-bind/commit/72d9f44e184465ba8dd3fb48260bbcff234985f2) +- [meta] fix FUNDING.yml [`e723573`](https://github.com/ljharb/call-bind/commit/e723573438c5a68dcec31fb5d96ea6b7e4a93be8) +- [eslint] ignore coverage output [`15e76d2`](https://github.com/ljharb/call-bind/commit/15e76d28a5f43e504696401e5b31ebb78ee1b532) +- [meta] add Automatic Rebase and Require Allow Edits workflows [`8fa4dab`](https://github.com/ljharb/call-bind/commit/8fa4dabb23ba3dd7bb92c9571c1241c08b56e4b6) + +## v1.0.0 - 2020-10-30 + +### Commits + +- Initial commit [`306cf98`](https://github.com/ljharb/call-bind/commit/306cf98c7ec9e7ef66b653ec152277ac1381eb50) +- Tests [`e10d0bb`](https://github.com/ljharb/call-bind/commit/e10d0bbdadc7a10ecedc9a1c035112d3e368b8df) +- Implementation [`43852ed`](https://github.com/ljharb/call-bind/commit/43852eda0f187327b7fad2423ca972149a52bd65) +- npm init [`408f860`](https://github.com/ljharb/call-bind/commit/408f860b773a2f610805fd3613d0d71bac1b6249) +- [meta] add Automatic Rebase and Require Allow Edits workflows [`fb349b2`](https://github.com/ljharb/call-bind/commit/fb349b2e48defbec8b5ec8a8395cc8f69f220b13) +- [meta] add `auto-changelog` [`c4001fc`](https://github.com/ljharb/call-bind/commit/c4001fc43031799ef908211c98d3b0fb2b60fde4) +- [meta] add "funding"; create `FUNDING.yml` [`d4d6d29`](https://github.com/ljharb/call-bind/commit/d4d6d2974a14bc2e98830468eda7fe6d6a776717) +- [Tests] add `npm run lint` [`dedfb98`](https://github.com/ljharb/call-bind/commit/dedfb98bd0ecefb08ddb9a94061bd10cde4332af) +- Only apps should have lockfiles [`54ac776`](https://github.com/ljharb/call-bind/commit/54ac77653db45a7361dc153d2f478e743f110650) +- [meta] add `safe-publish-latest` [`9ea8e43`](https://github.com/ljharb/call-bind/commit/9ea8e435b950ce9b705559cd651039f9bf40140f) diff --git a/node_modules/call-bind/LICENSE b/node_modules/call-bind/LICENSE new file mode 100644 index 00000000..48f05d01 --- /dev/null +++ b/node_modules/call-bind/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/call-bind/README.md b/node_modules/call-bind/README.md new file mode 100644 index 00000000..53649eb4 --- /dev/null +++ b/node_modules/call-bind/README.md @@ -0,0 +1,2 @@ +# call-bind +Robustly `.call.bind()` a function. diff --git a/node_modules/call-bind/callBound.js b/node_modules/call-bind/callBound.js new file mode 100644 index 00000000..8374adfd --- /dev/null +++ b/node_modules/call-bind/callBound.js @@ -0,0 +1,15 @@ +'use strict'; + +var GetIntrinsic = require('get-intrinsic'); + +var callBind = require('./'); + +var $indexOf = callBind(GetIntrinsic('String.prototype.indexOf')); + +module.exports = function callBoundIntrinsic(name, allowMissing) { + var intrinsic = GetIntrinsic(name, !!allowMissing); + if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) { + return callBind(intrinsic); + } + return intrinsic; +}; diff --git a/node_modules/call-bind/index.js b/node_modules/call-bind/index.js new file mode 100644 index 00000000..6fa3e4af --- /dev/null +++ b/node_modules/call-bind/index.js @@ -0,0 +1,47 @@ +'use strict'; + +var bind = require('function-bind'); +var GetIntrinsic = require('get-intrinsic'); + +var $apply = GetIntrinsic('%Function.prototype.apply%'); +var $call = GetIntrinsic('%Function.prototype.call%'); +var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply); + +var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%', true); +var $defineProperty = GetIntrinsic('%Object.defineProperty%', true); +var $max = GetIntrinsic('%Math.max%'); + +if ($defineProperty) { + try { + $defineProperty({}, 'a', { value: 1 }); + } catch (e) { + // IE 8 has a broken defineProperty + $defineProperty = null; + } +} + +module.exports = function callBind(originalFunction) { + var func = $reflectApply(bind, $call, arguments); + if ($gOPD && $defineProperty) { + var desc = $gOPD(func, 'length'); + if (desc.configurable) { + // original length, plus the receiver, minus any additional arguments (after the receiver) + $defineProperty( + func, + 'length', + { value: 1 + $max(0, originalFunction.length - (arguments.length - 1)) } + ); + } + } + return func; +}; + +var applyBind = function applyBind() { + return $reflectApply(bind, $apply, arguments); +}; + +if ($defineProperty) { + $defineProperty(module.exports, 'apply', { value: applyBind }); +} else { + module.exports.apply = applyBind; +} diff --git a/node_modules/call-bind/package.json b/node_modules/call-bind/package.json new file mode 100644 index 00000000..4360556a --- /dev/null +++ b/node_modules/call-bind/package.json @@ -0,0 +1,80 @@ +{ + "name": "call-bind", + "version": "1.0.2", + "description": "Robustly `.call.bind()` a function", + "main": "index.js", + "exports": { + ".": [ + { + "default": "./index.js" + }, + "./index.js" + ], + "./callBound": [ + { + "default": "./callBound.js" + }, + "./callBound.js" + ], + "./package.json": "./package.json" + }, + "scripts": { + "prepublish": "safe-publish-latest", + "lint": "eslint --ext=.js,.mjs .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/*'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/call-bind.git" + }, + "keywords": [ + "javascript", + "ecmascript", + "es", + "js", + "callbind", + "callbound", + "call", + "bind", + "bound", + "call-bind", + "call-bound", + "function", + "es-abstract" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/call-bind/issues" + }, + "homepage": "https://github.com/ljharb/call-bind#readme", + "devDependencies": { + "@ljharb/eslint-config": "^17.3.0", + "aud": "^1.1.3", + "auto-changelog": "^2.2.1", + "eslint": "^7.17.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^1.1.4", + "tape": "^5.1.1" + }, + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + } +} diff --git a/node_modules/call-bind/test/callBound.js b/node_modules/call-bind/test/callBound.js new file mode 100644 index 00000000..209ce3cc --- /dev/null +++ b/node_modules/call-bind/test/callBound.js @@ -0,0 +1,55 @@ +'use strict'; + +var test = require('tape'); + +var callBound = require('../callBound'); + +test('callBound', function (t) { + // static primitive + t.equal(callBound('Array.length'), Array.length, 'Array.length yields itself'); + t.equal(callBound('%Array.length%'), Array.length, '%Array.length% yields itself'); + + // static non-function object + t.equal(callBound('Array.prototype'), Array.prototype, 'Array.prototype yields itself'); + t.equal(callBound('%Array.prototype%'), Array.prototype, '%Array.prototype% yields itself'); + t.equal(callBound('Array.constructor'), Array.constructor, 'Array.constructor yields itself'); + t.equal(callBound('%Array.constructor%'), Array.constructor, '%Array.constructor% yields itself'); + + // static function + t.equal(callBound('Date.parse'), Date.parse, 'Date.parse yields itself'); + t.equal(callBound('%Date.parse%'), Date.parse, '%Date.parse% yields itself'); + + // prototype primitive + t.equal(callBound('Error.prototype.message'), Error.prototype.message, 'Error.prototype.message yields itself'); + t.equal(callBound('%Error.prototype.message%'), Error.prototype.message, '%Error.prototype.message% yields itself'); + + // prototype function + t.notEqual(callBound('Object.prototype.toString'), Object.prototype.toString, 'Object.prototype.toString does not yield itself'); + t.notEqual(callBound('%Object.prototype.toString%'), Object.prototype.toString, '%Object.prototype.toString% does not yield itself'); + t.equal(callBound('Object.prototype.toString')(true), Object.prototype.toString.call(true), 'call-bound Object.prototype.toString calls into the original'); + t.equal(callBound('%Object.prototype.toString%')(true), Object.prototype.toString.call(true), 'call-bound %Object.prototype.toString% calls into the original'); + + t['throws']( + function () { callBound('does not exist'); }, + SyntaxError, + 'nonexistent intrinsic throws' + ); + t['throws']( + function () { callBound('does not exist', true); }, + SyntaxError, + 'allowMissing arg still throws for unknown intrinsic' + ); + + /* globals WeakRef: false */ + t.test('real but absent intrinsic', { skip: typeof WeakRef !== 'undefined' }, function (st) { + st['throws']( + function () { callBound('WeakRef'); }, + TypeError, + 'real but absent intrinsic throws' + ); + st.equal(callBound('WeakRef', true), undefined, 'allowMissing arg avoids exception'); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/call-bind/test/index.js b/node_modules/call-bind/test/index.js new file mode 100644 index 00000000..bf6769c7 --- /dev/null +++ b/node_modules/call-bind/test/index.js @@ -0,0 +1,66 @@ +'use strict'; + +var callBind = require('../'); +var bind = require('function-bind'); + +var test = require('tape'); + +/* + * older engines have length nonconfigurable + * in io.js v3, it is configurable except on bound functions, hence the .bind() + */ +var functionsHaveConfigurableLengths = !!( + Object.getOwnPropertyDescriptor + && Object.getOwnPropertyDescriptor(bind.call(function () {}), 'length').configurable +); + +test('callBind', function (t) { + var sentinel = { sentinel: true }; + var func = function (a, b) { + // eslint-disable-next-line no-invalid-this + return [this, a, b]; + }; + t.equal(func.length, 2, 'original function length is 2'); + t.deepEqual(func(), [undefined, undefined, undefined], 'unbound func with too few args'); + t.deepEqual(func(1, 2), [undefined, 1, 2], 'unbound func with right args'); + t.deepEqual(func(1, 2, 3), [undefined, 1, 2], 'unbound func with too many args'); + + var bound = callBind(func); + t.equal(bound.length, func.length + 1, 'function length is preserved', { skip: !functionsHaveConfigurableLengths }); + t.deepEqual(bound(), [undefined, undefined, undefined], 'bound func with too few args'); + t.deepEqual(bound(1, 2), [1, 2, undefined], 'bound func with right args'); + t.deepEqual(bound(1, 2, 3), [1, 2, 3], 'bound func with too many args'); + + var boundR = callBind(func, sentinel); + t.equal(boundR.length, func.length, 'function length is preserved', { skip: !functionsHaveConfigurableLengths }); + t.deepEqual(boundR(), [sentinel, undefined, undefined], 'bound func with receiver, with too few args'); + t.deepEqual(boundR(1, 2), [sentinel, 1, 2], 'bound func with receiver, with right args'); + t.deepEqual(boundR(1, 2, 3), [sentinel, 1, 2], 'bound func with receiver, with too many args'); + + var boundArg = callBind(func, sentinel, 1); + t.equal(boundArg.length, func.length - 1, 'function length is preserved', { skip: !functionsHaveConfigurableLengths }); + t.deepEqual(boundArg(), [sentinel, 1, undefined], 'bound func with receiver and arg, with too few args'); + t.deepEqual(boundArg(2), [sentinel, 1, 2], 'bound func with receiver and arg, with right arg'); + t.deepEqual(boundArg(2, 3), [sentinel, 1, 2], 'bound func with receiver and arg, with too many args'); + + t.test('callBind.apply', function (st) { + var aBound = callBind.apply(func); + st.deepEqual(aBound(sentinel), [sentinel, undefined, undefined], 'apply-bound func with no args'); + st.deepEqual(aBound(sentinel, [1], 4), [sentinel, 1, undefined], 'apply-bound func with too few args'); + st.deepEqual(aBound(sentinel, [1, 2], 4), [sentinel, 1, 2], 'apply-bound func with right args'); + + var aBoundArg = callBind.apply(func); + st.deepEqual(aBoundArg(sentinel, [1, 2, 3], 4), [sentinel, 1, 2], 'apply-bound func with too many args'); + st.deepEqual(aBoundArg(sentinel, [1, 2], 4), [sentinel, 1, 2], 'apply-bound func with right args'); + st.deepEqual(aBoundArg(sentinel, [1], 4), [sentinel, 1, undefined], 'apply-bound func with too few args'); + + var aBoundR = callBind.apply(func, sentinel); + st.deepEqual(aBoundR([1, 2, 3], 4), [sentinel, 1, 2], 'apply-bound func with receiver and too many args'); + st.deepEqual(aBoundR([1, 2], 4), [sentinel, 1, 2], 'apply-bound func with receiver and right args'); + st.deepEqual(aBoundR([1], 4), [sentinel, 1, undefined], 'apply-bound func with receiver and too few args'); + + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/caw/index.js b/node_modules/caw/index.js new file mode 100644 index 00000000..69e7f554 --- /dev/null +++ b/node_modules/caw/index.js @@ -0,0 +1,37 @@ +'use strict'; +const url = require('url'); +const getProxy = require('get-proxy'); +const isurl = require('isurl'); +const tunnelAgent = require('tunnel-agent'); +const urlToOptions = require('url-to-options'); + +module.exports = (proxy, opts) => { + proxy = proxy || getProxy(); + opts = Object.assign({}, opts); + + if (typeof proxy === 'object') { + opts = proxy; + proxy = getProxy(); + } + + if (!proxy) { + return null; + } + + proxy = isurl.lenient(proxy) ? urlToOptions(proxy) : url.parse(proxy); + + const uriProtocol = opts.protocol === 'https' ? 'https' : 'http'; + const proxyProtocol = proxy.protocol === 'https:' ? 'Https' : 'Http'; + const port = proxy.port || (proxyProtocol === 'Https' ? 443 : 80); + const method = `${uriProtocol}Over${proxyProtocol}`; + + delete opts.protocol; + + return tunnelAgent[method](Object.assign({ + proxy: { + port, + host: proxy.hostname, + proxyAuth: proxy.auth + } + }, opts)); +}; diff --git a/node_modules/caw/license b/node_modules/caw/license new file mode 100644 index 00000000..db6bc32c --- /dev/null +++ b/node_modules/caw/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Kevin Mårtensson (github.com/kevva) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/caw/package.json b/node_modules/caw/package.json new file mode 100644 index 00000000..15274e72 --- /dev/null +++ b/node_modules/caw/package.json @@ -0,0 +1,49 @@ +{ + "name": "caw", + "version": "2.0.1", + "description": "Construct HTTP/HTTPS agents for tunneling proxies", + "license": "MIT", + "repository": "kevva/caw", + "author": { + "email": "kevinmartensson@gmail.com", + "name": "Kevin Mårtensson", + "url": "github.com/kevva" + }, + "scripts": { + "test": "xo && ava" + }, + "engines": { + "node": ">=4" + }, + "files": [ + "index.js" + ], + "keywords": [ + "http", + "https", + "proxy", + "tunnel" + ], + "dependencies": { + "get-proxy": "^2.0.0", + "isurl": "^1.0.0-alpha5", + "tunnel-agent": "^0.6.0", + "url-to-options": "^1.0.1" + }, + "devDependencies": { + "ava": "*", + "create-cert": "^1.0.4", + "get-port": "^3.1.0", + "got": "^7.0.0", + "pify": "^3.0.0", + "proxyquire": "^1.7.9", + "sinon": "^2.3.1", + "universal-url": "1.0.0-alpha", + "xo": "*" + }, + "xo": { + "rules": { + "ava/no-skip-test": 0 + } + } +} diff --git a/node_modules/caw/readme.md b/node_modules/caw/readme.md new file mode 100644 index 00000000..447191f3 --- /dev/null +++ b/node_modules/caw/readme.md @@ -0,0 +1,51 @@ +# caw [![Build Status](https://travis-ci.org/kevva/caw.svg?branch=master)](https://travis-ci.org/kevva/caw) + +> Construct HTTP/HTTPS agents for tunneling proxies + + +## Install + +``` +$ npm install caw +``` + + +## Usage + +```js +const caw = require('caw'); +const got = require('got'); + +got('todomvc.com', { + agent: caw() +}, () => {}); +``` + + +## API + +### caw([proxy], [options]) + +#### proxy + +Type: `string` + +Proxy URL. If not set, it'll try getting it using [`get-proxy`](https://github.com/kevva/get-proxy). + +#### options + +Type: `Object` + +Besides the options below, you can pass in options allowed in [tunnel-agent](https://github.com/request/tunnel-agent). + +##### protocol + +Type: `string`
+Default: `http` + +Endpoint protocol. + + +## License + +MIT © [Kevin Mårtensson](https://github.com/kevva) diff --git a/node_modules/chalk/index.d.ts b/node_modules/chalk/index.d.ts new file mode 100644 index 00000000..9cd88f38 --- /dev/null +++ b/node_modules/chalk/index.d.ts @@ -0,0 +1,415 @@ +/** +Basic foreground colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type ForegroundColor = + | 'black' + | 'red' + | 'green' + | 'yellow' + | 'blue' + | 'magenta' + | 'cyan' + | 'white' + | 'gray' + | 'grey' + | 'blackBright' + | 'redBright' + | 'greenBright' + | 'yellowBright' + | 'blueBright' + | 'magentaBright' + | 'cyanBright' + | 'whiteBright'; + +/** +Basic background colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type BackgroundColor = + | 'bgBlack' + | 'bgRed' + | 'bgGreen' + | 'bgYellow' + | 'bgBlue' + | 'bgMagenta' + | 'bgCyan' + | 'bgWhite' + | 'bgGray' + | 'bgGrey' + | 'bgBlackBright' + | 'bgRedBright' + | 'bgGreenBright' + | 'bgYellowBright' + | 'bgBlueBright' + | 'bgMagentaBright' + | 'bgCyanBright' + | 'bgWhiteBright'; + +/** +Basic colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type Color = ForegroundColor | BackgroundColor; + +declare type Modifiers = + | 'reset' + | 'bold' + | 'dim' + | 'italic' + | 'underline' + | 'inverse' + | 'hidden' + | 'strikethrough' + | 'visible'; + +declare namespace chalk { + /** + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + type Level = 0 | 1 | 2 | 3; + + interface Options { + /** + Specify the color support for Chalk. + + By default, color support is automatically detected based on the environment. + + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + level?: Level; + } + + /** + Return a new Chalk instance. + */ + type Instance = new (options?: Options) => Chalk; + + /** + Detect whether the terminal supports color. + */ + interface ColorSupport { + /** + The color level used by Chalk. + */ + level: Level; + + /** + Return whether Chalk supports basic 16 colors. + */ + hasBasic: boolean; + + /** + Return whether Chalk supports ANSI 256 colors. + */ + has256: boolean; + + /** + Return whether Chalk supports Truecolor 16 million colors. + */ + has16m: boolean; + } + + interface ChalkFunction { + /** + Use a template string. + + @remarks Template literals are unsupported for nested calls (see [issue #341](https://github.com/chalk/chalk/issues/341)) + + @example + ``` + import chalk = require('chalk'); + + log(chalk` + CPU: {red ${cpu.totalPercent}%} + RAM: {green ${ram.used / ram.total * 100}%} + DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} + `); + ``` + + @example + ``` + import chalk = require('chalk'); + + log(chalk.red.bgBlack`2 + 3 = {bold ${2 + 3}}`) + ``` + */ + (text: TemplateStringsArray, ...placeholders: unknown[]): string; + + (...text: unknown[]): string; + } + + interface Chalk extends ChalkFunction { + /** + Return a new Chalk instance. + */ + Instance: Instance; + + /** + The color support for Chalk. + + By default, color support is automatically detected based on the environment. + + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + level: Level; + + /** + Use HEX value to set text color. + + @param color - Hexadecimal value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.hex('#DEADED'); + ``` + */ + hex(color: string): Chalk; + + /** + Use keyword color value to set text color. + + @param color - Keyword value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.keyword('orange'); + ``` + */ + keyword(color: string): Chalk; + + /** + Use RGB values to set text color. + */ + rgb(red: number, green: number, blue: number): Chalk; + + /** + Use HSL values to set text color. + */ + hsl(hue: number, saturation: number, lightness: number): Chalk; + + /** + Use HSV values to set text color. + */ + hsv(hue: number, saturation: number, value: number): Chalk; + + /** + Use HWB values to set text color. + */ + hwb(hue: number, whiteness: number, blackness: number): Chalk; + + /** + Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set text color. + + 30 <= code && code < 38 || 90 <= code && code < 98 + For example, 31 for red, 91 for redBright. + */ + ansi(code: number): Chalk; + + /** + Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color. + */ + ansi256(index: number): Chalk; + + /** + Use HEX value to set background color. + + @param color - Hexadecimal value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.bgHex('#DEADED'); + ``` + */ + bgHex(color: string): Chalk; + + /** + Use keyword color value to set background color. + + @param color - Keyword value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.bgKeyword('orange'); + ``` + */ + bgKeyword(color: string): Chalk; + + /** + Use RGB values to set background color. + */ + bgRgb(red: number, green: number, blue: number): Chalk; + + /** + Use HSL values to set background color. + */ + bgHsl(hue: number, saturation: number, lightness: number): Chalk; + + /** + Use HSV values to set background color. + */ + bgHsv(hue: number, saturation: number, value: number): Chalk; + + /** + Use HWB values to set background color. + */ + bgHwb(hue: number, whiteness: number, blackness: number): Chalk; + + /** + Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set background color. + + 30 <= code && code < 38 || 90 <= code && code < 98 + For example, 31 for red, 91 for redBright. + Use the foreground code, not the background code (for example, not 41, nor 101). + */ + bgAnsi(code: number): Chalk; + + /** + Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set background color. + */ + bgAnsi256(index: number): Chalk; + + /** + Modifier: Resets the current color chain. + */ + readonly reset: Chalk; + + /** + Modifier: Make text bold. + */ + readonly bold: Chalk; + + /** + Modifier: Emitting only a small amount of light. + */ + readonly dim: Chalk; + + /** + Modifier: Make text italic. (Not widely supported) + */ + readonly italic: Chalk; + + /** + Modifier: Make text underline. (Not widely supported) + */ + readonly underline: Chalk; + + /** + Modifier: Inverse background and foreground colors. + */ + readonly inverse: Chalk; + + /** + Modifier: Prints the text, but makes it invisible. + */ + readonly hidden: Chalk; + + /** + Modifier: Puts a horizontal line through the center of the text. (Not widely supported) + */ + readonly strikethrough: Chalk; + + /** + Modifier: Prints the text only when Chalk has a color support level > 0. + Can be useful for things that are purely cosmetic. + */ + readonly visible: Chalk; + + readonly black: Chalk; + readonly red: Chalk; + readonly green: Chalk; + readonly yellow: Chalk; + readonly blue: Chalk; + readonly magenta: Chalk; + readonly cyan: Chalk; + readonly white: Chalk; + + /* + Alias for `blackBright`. + */ + readonly gray: Chalk; + + /* + Alias for `blackBright`. + */ + readonly grey: Chalk; + + readonly blackBright: Chalk; + readonly redBright: Chalk; + readonly greenBright: Chalk; + readonly yellowBright: Chalk; + readonly blueBright: Chalk; + readonly magentaBright: Chalk; + readonly cyanBright: Chalk; + readonly whiteBright: Chalk; + + readonly bgBlack: Chalk; + readonly bgRed: Chalk; + readonly bgGreen: Chalk; + readonly bgYellow: Chalk; + readonly bgBlue: Chalk; + readonly bgMagenta: Chalk; + readonly bgCyan: Chalk; + readonly bgWhite: Chalk; + + /* + Alias for `bgBlackBright`. + */ + readonly bgGray: Chalk; + + /* + Alias for `bgBlackBright`. + */ + readonly bgGrey: Chalk; + + readonly bgBlackBright: Chalk; + readonly bgRedBright: Chalk; + readonly bgGreenBright: Chalk; + readonly bgYellowBright: Chalk; + readonly bgBlueBright: Chalk; + readonly bgMagentaBright: Chalk; + readonly bgCyanBright: Chalk; + readonly bgWhiteBright: Chalk; + } +} + +/** +Main Chalk object that allows to chain styles together. +Call the last one as a method with a string argument. +Order doesn't matter, and later styles take precedent in case of a conflict. +This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`. +*/ +declare const chalk: chalk.Chalk & chalk.ChalkFunction & { + supportsColor: chalk.ColorSupport | false; + Level: chalk.Level; + Color: Color; + ForegroundColor: ForegroundColor; + BackgroundColor: BackgroundColor; + Modifiers: Modifiers; + stderr: chalk.Chalk & {supportsColor: chalk.ColorSupport | false}; +}; + +export = chalk; diff --git a/node_modules/chalk/license b/node_modules/chalk/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/chalk/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/chalk/node_modules/has-flag/index.d.ts b/node_modules/chalk/node_modules/has-flag/index.d.ts new file mode 100644 index 00000000..a0a48c89 --- /dev/null +++ b/node_modules/chalk/node_modules/has-flag/index.d.ts @@ -0,0 +1,39 @@ +/** +Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag. + +@param flag - CLI flag to look for. The `--` prefix is optional. +@param argv - CLI arguments. Default: `process.argv`. +@returns Whether the flag exists. + +@example +``` +// $ ts-node foo.ts -f --unicorn --foo=bar -- --rainbow + +// foo.ts +import hasFlag = require('has-flag'); + +hasFlag('unicorn'); +//=> true + +hasFlag('--unicorn'); +//=> true + +hasFlag('f'); +//=> true + +hasFlag('-f'); +//=> true + +hasFlag('foo=bar'); +//=> true + +hasFlag('foo'); +//=> false + +hasFlag('rainbow'); +//=> false +``` +*/ +declare function hasFlag(flag: string, argv?: string[]): boolean; + +export = hasFlag; diff --git a/node_modules/chalk/node_modules/has-flag/index.js b/node_modules/chalk/node_modules/has-flag/index.js new file mode 100644 index 00000000..b6f80b1f --- /dev/null +++ b/node_modules/chalk/node_modules/has-flag/index.js @@ -0,0 +1,8 @@ +'use strict'; + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; diff --git a/node_modules/chalk/node_modules/has-flag/license b/node_modules/chalk/node_modules/has-flag/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/chalk/node_modules/has-flag/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/chalk/node_modules/has-flag/package.json b/node_modules/chalk/node_modules/has-flag/package.json new file mode 100644 index 00000000..a9cba4b8 --- /dev/null +++ b/node_modules/chalk/node_modules/has-flag/package.json @@ -0,0 +1,46 @@ +{ + "name": "has-flag", + "version": "4.0.0", + "description": "Check if argv has a specific flag", + "license": "MIT", + "repository": "sindresorhus/has-flag", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "has", + "check", + "detect", + "contains", + "find", + "flag", + "cli", + "command-line", + "argv", + "process", + "arg", + "args", + "argument", + "arguments", + "getopt", + "minimist", + "optimist" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/chalk/node_modules/has-flag/readme.md b/node_modules/chalk/node_modules/has-flag/readme.md new file mode 100644 index 00000000..3f72dff2 --- /dev/null +++ b/node_modules/chalk/node_modules/has-flag/readme.md @@ -0,0 +1,89 @@ +# has-flag [![Build Status](https://travis-ci.org/sindresorhus/has-flag.svg?branch=master)](https://travis-ci.org/sindresorhus/has-flag) + +> Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag + +Correctly stops looking after an `--` argument terminator. + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
+ +--- + + +## Install + +``` +$ npm install has-flag +``` + + +## Usage + +```js +// foo.js +const hasFlag = require('has-flag'); + +hasFlag('unicorn'); +//=> true + +hasFlag('--unicorn'); +//=> true + +hasFlag('f'); +//=> true + +hasFlag('-f'); +//=> true + +hasFlag('foo=bar'); +//=> true + +hasFlag('foo'); +//=> false + +hasFlag('rainbow'); +//=> false +``` + +``` +$ node foo.js -f --unicorn --foo=bar -- --rainbow +``` + + +## API + +### hasFlag(flag, [argv]) + +Returns a boolean for whether the flag exists. + +#### flag + +Type: `string` + +CLI flag to look for. The `--` prefix is optional. + +#### argv + +Type: `string[]`
+Default: `process.argv` + +CLI arguments. + + +## Security + +To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/chalk/node_modules/supports-color/browser.js b/node_modules/chalk/node_modules/supports-color/browser.js new file mode 100644 index 00000000..62afa3a7 --- /dev/null +++ b/node_modules/chalk/node_modules/supports-color/browser.js @@ -0,0 +1,5 @@ +'use strict'; +module.exports = { + stdout: false, + stderr: false +}; diff --git a/node_modules/chalk/node_modules/supports-color/index.js b/node_modules/chalk/node_modules/supports-color/index.js new file mode 100644 index 00000000..6fada390 --- /dev/null +++ b/node_modules/chalk/node_modules/supports-color/index.js @@ -0,0 +1,135 @@ +'use strict'; +const os = require('os'); +const tty = require('tty'); +const hasFlag = require('has-flag'); + +const {env} = process; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false') || + hasFlag('color=never')) { + forceColor = 0; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = 1; +} + +if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + forceColor = 1; + } else if (env.FORCE_COLOR === 'false') { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } + + const min = forceColor || 0; + + if (env.TERM === 'dumb') { + return min; + } + + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) +}; diff --git a/node_modules/chalk/node_modules/supports-color/license b/node_modules/chalk/node_modules/supports-color/license new file mode 100644 index 00000000..e7af2f77 --- /dev/null +++ b/node_modules/chalk/node_modules/supports-color/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/chalk/node_modules/supports-color/package.json b/node_modules/chalk/node_modules/supports-color/package.json new file mode 100644 index 00000000..f7182edc --- /dev/null +++ b/node_modules/chalk/node_modules/supports-color/package.json @@ -0,0 +1,53 @@ +{ + "name": "supports-color", + "version": "7.2.0", + "description": "Detect whether a terminal supports color", + "license": "MIT", + "repository": "chalk/supports-color", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "browser.js" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "ansi", + "styles", + "tty", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "support", + "supports", + "capability", + "detect", + "truecolor", + "16m" + ], + "dependencies": { + "has-flag": "^4.0.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "import-fresh": "^3.0.0", + "xo": "^0.24.0" + }, + "browser": "browser.js" +} diff --git a/node_modules/chalk/node_modules/supports-color/readme.md b/node_modules/chalk/node_modules/supports-color/readme.md new file mode 100644 index 00000000..36542285 --- /dev/null +++ b/node_modules/chalk/node_modules/supports-color/readme.md @@ -0,0 +1,76 @@ +# supports-color [![Build Status](https://travis-ci.org/chalk/supports-color.svg?branch=master)](https://travis-ci.org/chalk/supports-color) + +> Detect whether a terminal supports color + + +## Install + +``` +$ npm install supports-color +``` + + +## Usage + +```js +const supportsColor = require('supports-color'); + +if (supportsColor.stdout) { + console.log('Terminal stdout supports color'); +} + +if (supportsColor.stdout.has256) { + console.log('Terminal stdout supports 256 colors'); +} + +if (supportsColor.stderr.has16m) { + console.log('Terminal stderr supports 16 million colors (truecolor)'); +} +``` + + +## API + +Returns an `Object` with a `stdout` and `stderr` property for testing either streams. Each property is an `Object`, or `false` if color is not supported. + +The `stdout`/`stderr` objects specifies a level of support for color through a `.level` property and a corresponding flag: + +- `.level = 1` and `.hasBasic = true`: Basic color support (16 colors) +- `.level = 2` and `.has256 = true`: 256 color support +- `.level = 3` and `.has16m = true`: Truecolor support (16 million colors) + + +## Info + +It obeys the `--color` and `--no-color` CLI flags. + +For situations where using `--color` is not possible, use the environment variable `FORCE_COLOR=1` (level 1), `FORCE_COLOR=2` (level 2), or `FORCE_COLOR=3` (level 3) to forcefully enable color, or `FORCE_COLOR=0` to forcefully disable. The use of `FORCE_COLOR` overrides all other color support checks. + +Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively. + + +## Related + +- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
+ +--- diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json new file mode 100644 index 00000000..47c23f29 --- /dev/null +++ b/node_modules/chalk/package.json @@ -0,0 +1,68 @@ +{ + "name": "chalk", + "version": "4.1.2", + "description": "Terminal string styling done right", + "license": "MIT", + "repository": "chalk/chalk", + "funding": "https://github.com/chalk/chalk?sponsor=1", + "main": "source", + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && nyc ava && tsd", + "bench": "matcha benchmark.js" + }, + "files": [ + "source", + "index.d.ts" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "str", + "ansi", + "style", + "styles", + "tty", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "devDependencies": { + "ava": "^2.4.0", + "coveralls": "^3.0.7", + "execa": "^4.0.0", + "import-fresh": "^3.1.0", + "matcha": "^0.7.0", + "nyc": "^15.0.0", + "resolve-from": "^5.0.0", + "tsd": "^0.7.4", + "xo": "^0.28.2" + }, + "xo": { + "rules": { + "unicorn/prefer-string-slice": "off", + "unicorn/prefer-includes": "off", + "@typescript-eslint/member-ordering": "off", + "no-redeclare": "off", + "unicorn/string-content": "off", + "unicorn/better-regex": "off" + } + } +} diff --git a/node_modules/chalk/readme.md b/node_modules/chalk/readme.md new file mode 100644 index 00000000..a055d21c --- /dev/null +++ b/node_modules/chalk/readme.md @@ -0,0 +1,341 @@ +

+
+
+ Chalk +
+
+
+

+ +> Terminal string styling done right + +[![Build Status](https://travis-ci.org/chalk/chalk.svg?branch=master)](https://travis-ci.org/chalk/chalk) [![Coverage Status](https://coveralls.io/repos/github/chalk/chalk/badge.svg?branch=master)](https://coveralls.io/github/chalk/chalk?branch=master) [![npm dependents](https://badgen.net/npm/dependents/chalk)](https://www.npmjs.com/package/chalk?activeTab=dependents) [![Downloads](https://badgen.net/npm/dt/chalk)](https://www.npmjs.com/package/chalk) [![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) [![XO code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/xojs/xo) ![TypeScript-ready](https://img.shields.io/npm/types/chalk.svg) [![run on repl.it](https://repl.it/badge/github/chalk/chalk)](https://repl.it/github/chalk/chalk) + + + +
+ +--- + + + +--- + +
+ +## Highlights + +- Expressive API +- Highly performant +- Ability to nest styles +- [256/Truecolor color support](#256-and-truecolor-color-support) +- Auto-detects color support +- Doesn't extend `String.prototype` +- Clean and focused +- Actively maintained +- [Used by ~50,000 packages](https://www.npmjs.com/browse/depended/chalk) as of January 1, 2020 + +## Install + +```console +$ npm install chalk +``` + +## Usage + +```js +const chalk = require('chalk'); + +console.log(chalk.blue('Hello world!')); +``` + +Chalk comes with an easy to use composable API where you just chain and nest the styles you want. + +```js +const chalk = require('chalk'); +const log = console.log; + +// Combine styled and normal strings +log(chalk.blue('Hello') + ' World' + chalk.red('!')); + +// Compose multiple styles using the chainable API +log(chalk.blue.bgRed.bold('Hello world!')); + +// Pass in multiple arguments +log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz')); + +// Nest styles +log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!')); + +// Nest styles of the same type even (color, underline, background) +log(chalk.green( + 'I am a green line ' + + chalk.blue.underline.bold('with a blue substring') + + ' that becomes green again!' +)); + +// ES2015 template literal +log(` +CPU: ${chalk.red('90%')} +RAM: ${chalk.green('40%')} +DISK: ${chalk.yellow('70%')} +`); + +// ES2015 tagged template literal +log(chalk` +CPU: {red ${cpu.totalPercent}%} +RAM: {green ${ram.used / ram.total * 100}%} +DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} +`); + +// Use RGB colors in terminal emulators that support it. +log(chalk.keyword('orange')('Yay for orange colored text!')); +log(chalk.rgb(123, 45, 67).underline('Underlined reddish color')); +log(chalk.hex('#DEADED').bold('Bold gray!')); +``` + +Easily define your own themes: + +```js +const chalk = require('chalk'); + +const error = chalk.bold.red; +const warning = chalk.keyword('orange'); + +console.log(error('Error!')); +console.log(warning('Warning!')); +``` + +Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args): + +```js +const name = 'Sindre'; +console.log(chalk.green('Hello %s'), name); +//=> 'Hello Sindre' +``` + +## API + +### chalk.`