update, text, response
This commit is contained in:
141
node_modules/readable-web-to-node-stream/README.md
generated
vendored
141
node_modules/readable-web-to-node-stream/README.md
generated
vendored
@@ -1,71 +1,70 @@
|
||||

|
||||
[](https://npmjs.org/package/readable-web-to-node-stream)
|
||||
[](https://npmcharts.com/compare/readable-web-to-node-stream)
|
||||
[](https://david-dm.org/Borewit/readable-web-to-node-stream)
|
||||
[](https://snyk.io/test/github/Borewit/readable-web-to-node-stream?targetFile=package.json)
|
||||
[](https://www.codacy.com/gh/Borewit/peek-readable/dashboard?utm_source=github.com&utm_medium=referral&utm_content=Borewit/peek-readable&utm_campaign=Badge_Grade)
|
||||
[](https://coveralls.io/github/Borewit/readable-web-to-node-stream?branch=master)
|
||||
[](https://bundlephobia.com/result?p=readable-web-to-node-stream)
|
||||
|
||||
# readable-web-to-node-stream
|
||||
|
||||
Converts a [Web-API readable stream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader) into a [Node.js readable stream](https://nodejs.org/api/stream.html#stream_readable_streams).
|
||||
|
||||
## Installation
|
||||
Install via [npm](http://npmjs.org/):
|
||||
|
||||
```bash
|
||||
npm install readable-web-to-node-stream
|
||||
```
|
||||
or or [yarn](https://yarnpkg.com/):
|
||||
```bash
|
||||
yarn add readable-web-to-node-stream
|
||||
```
|
||||
|
||||
## Compatibility
|
||||
|
||||
Source is written in TypeScript and compiled to ECMAScript 2017 (ES8).
|
||||
|
||||
Unit tests are performed on the following browsers:
|
||||
|
||||
* Google Chrome 74.0
|
||||
* Firefox 68.0
|
||||
* Safari 12.0
|
||||
* Opera 60.0
|
||||
|
||||
## Example
|
||||
|
||||
Import readable-web-stream-to-node in JavaScript:
|
||||
```js
|
||||
const {ReadableWebToNodeStream} = require('readable-web-to-node-stream');
|
||||
|
||||
async function download(url) {
|
||||
const response = await fetch(url);
|
||||
const readableWebStream = response.body;
|
||||
const nodeStream = new ReadableWebToNodeStream(readableWebStream);
|
||||
}
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
**constructor(stream: ReadableStream): Promise<void>**
|
||||
|
||||
`stream: ReadableStream`: the [Web-API readable stream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader).
|
||||
|
||||
**close(): Promise<void>**
|
||||
Will cancel close the Readable-node stream, and will release Web-API-readable-stream.
|
||||
|
||||
**waitForReadToComplete(): Promise<void>**
|
||||
If there is no unresolved read call to Web-API ReadableStream immediately returns, otherwise it will wait until the read is resolved.
|
||||
|
||||
## Licence
|
||||
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2019 Borewit
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
[](https://github.com/Borewit/readable-web-to-node-stream/actions/workflows/xvfb-ci.yml)
|
||||
[](https://npmjs.org/package/readable-web-to-node-stream)
|
||||
[](https://npmcharts.com/compare/readable-web-to-node-stream)
|
||||
[](https://snyk.io/test/github/Borewit/readable-web-to-node-stream?targetFile=package.json)
|
||||
[](https://www.codacy.com/gh/Borewit/peek-readable/dashboard?utm_source=github.com&utm_medium=referral&utm_content=Borewit/peek-readable&utm_campaign=Badge_Grade)
|
||||
[](https://coveralls.io/github/Borewit/readable-web-to-node-stream?branch=master)
|
||||
[](https://bundlephobia.com/result?p=readable-web-to-node-stream)
|
||||
|
||||
# readable-web-to-node-stream
|
||||
|
||||
Converts a [Web-API readable stream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader) into a [Node.js readable stream](https://nodejs.org/api/stream.html#stream_readable_streams).
|
||||
|
||||
To covert the other way around, from [Node.js readable stream](https://nodejs.org/api/stream.html#stream_readable_streams) to [Web-API readable stream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader),
|
||||
you may use [node-readable-to-web-readable-stream](https://github.com/Borewit/node-readable-to-web-readable-stream).
|
||||
|
||||
## Installation
|
||||
Install via [npm](http://npmjs.org/):
|
||||
|
||||
```bash
|
||||
npm install readable-web-to-node-stream
|
||||
```
|
||||
or [yarn](https://yarnpkg.com/):
|
||||
```bash
|
||||
yarn add readable-web-to-node-stream
|
||||
```
|
||||
|
||||
## Compatibility
|
||||
|
||||
Source is written in TypeScript and compiled to ECMAScript 2017 (ES8).
|
||||
|
||||
Unit tests are performed on the following browsers:
|
||||
* Latest Google Chrome 74.0
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
Import readable-web-stream-to-node in JavaScript:
|
||||
```js
|
||||
const {ReadableWebToNodeStream} = require('readable-web-to-node-stream');
|
||||
|
||||
async function download(url) {
|
||||
const response = await fetch(url);
|
||||
const readableWebStream = response.body;
|
||||
const nodeStream = new ReadableWebToNodeStream(readableWebStream);
|
||||
}
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
**constructor(stream: ReadableStream): Promise<void>**
|
||||
|
||||
`stream: ReadableStream`: the [Web-API readable stream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader).
|
||||
|
||||
**close(): Promise<void>**
|
||||
Will cancel close the Readable-node stream, and will release Web-API-readable-stream.
|
||||
|
||||
**waitForReadToComplete(): Promise<void>**
|
||||
If there is no unresolved read call to Web-API ReadableStream immediately returns, otherwise it will wait until the read is resolved.
|
||||
|
||||
## Licence
|
||||
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2019 Borewit
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
6
node_modules/readable-web-to-node-stream/lib/index.d.ts
generated
vendored
6
node_modules/readable-web-to-node-stream/lib/index.d.ts
generated
vendored
@@ -16,16 +16,16 @@ export declare class ReadableWebToNodeStream extends Readable {
|
||||
private pendingRead;
|
||||
/**
|
||||
*
|
||||
* @param stream ReadableStream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
|
||||
* @param stream ReadableStream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
|
||||
*/
|
||||
constructor(stream: ReadableStream);
|
||||
constructor(stream: ReadableStream | ReadableStream<Uint8Array>);
|
||||
/**
|
||||
* Implementation of readable._read(size).
|
||||
* When readable._read() is called, if data is available from the resource,
|
||||
* the implementation should begin pushing that data into the read queue
|
||||
* https://nodejs.org/api/stream.html#stream_readable_read_size_1
|
||||
*/
|
||||
_read(): Promise<void>;
|
||||
_read(): void;
|
||||
/**
|
||||
* If there is no unresolved read call to Web-API ReadableStream immediately returns;
|
||||
* otherwise will wait until the read is resolved.
|
||||
|
||||
30
node_modules/readable-web-to-node-stream/lib/index.js
generated
vendored
30
node_modules/readable-web-to-node-stream/lib/index.js
generated
vendored
@@ -11,7 +11,7 @@ const readable_stream_1 = require("readable-stream");
|
||||
class ReadableWebToNodeStream extends readable_stream_1.Readable {
|
||||
/**
|
||||
*
|
||||
* @param stream ReadableStream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
|
||||
* @param stream ReadableStream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
|
||||
*/
|
||||
constructor(stream) {
|
||||
super();
|
||||
@@ -25,24 +25,28 @@ class ReadableWebToNodeStream extends readable_stream_1.Readable {
|
||||
* the implementation should begin pushing that data into the read queue
|
||||
* https://nodejs.org/api/stream.html#stream_readable_read_size_1
|
||||
*/
|
||||
async _read() {
|
||||
_read() {
|
||||
// Should start pushing data into the queue
|
||||
// Read data from the underlying Web-API-readable-stream
|
||||
if (this.released) {
|
||||
this.push(null); // Signal EOF
|
||||
return;
|
||||
}
|
||||
this.pendingRead = this.reader.read();
|
||||
const data = await this.pendingRead;
|
||||
// clear the promise before pushing pushing new data to the queue and allow sequential calls to _read()
|
||||
delete this.pendingRead;
|
||||
if (data.done || this.released) {
|
||||
this.push(null); // Signal EOF
|
||||
}
|
||||
else {
|
||||
this.bytesRead += data.value.length;
|
||||
this.push(data.value); // Push new data to the queue
|
||||
}
|
||||
this.pendingRead = this.reader
|
||||
.read()
|
||||
.then((data) => {
|
||||
delete this.pendingRead;
|
||||
if (data.done || this.released) {
|
||||
this.push(null); // Signal EOF
|
||||
}
|
||||
else {
|
||||
this.bytesRead += data.value.length;
|
||||
this.push(data.value); // Push new data to the queue
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this.destroy(err);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* If there is no unresolved read call to Web-API ReadableStream immediately returns;
|
||||
|
||||
147
node_modules/readable-web-to-node-stream/lib/index.spec.js
generated
vendored
147
node_modules/readable-web-to-node-stream/lib/index.spec.js
generated
vendored
@@ -1,147 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.parseReadableStream = void 0;
|
||||
localStorage.debug = 'readable-web-to-node-stream';
|
||||
const assert = require("assert");
|
||||
const mmb = require("music-metadata-browser");
|
||||
const index_1 = require("./index");
|
||||
async function httpGetByUrl(url) {
|
||||
const response = await fetch(url);
|
||||
const headers = [];
|
||||
response.headers.forEach(header => {
|
||||
headers.push(header);
|
||||
});
|
||||
assert.ok(response.ok, `HTTP error status=${response.status}: ${response.statusText}`);
|
||||
assert.ok(response.body, 'HTTP-stream');
|
||||
return response;
|
||||
}
|
||||
async function parseReadableStream(stream, fileInfo, options) {
|
||||
const ns = new index_1.ReadableWebToNodeStream(stream);
|
||||
const res = await mmb.parseNodeStream(ns, fileInfo, options);
|
||||
await ns.close();
|
||||
return res;
|
||||
}
|
||||
exports.parseReadableStream = parseReadableStream;
|
||||
const tiuqottigeloot_vol24_Tracks = [
|
||||
{
|
||||
url: '/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/01%20-%20Diablo%20Swing%20Orchestra%20-%20Heroines.mp3',
|
||||
duration: 322.612245,
|
||||
metaData: {
|
||||
title: 'Heroines',
|
||||
artist: 'Diablo Swing Orchestra'
|
||||
}
|
||||
},
|
||||
{
|
||||
url: '/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/02%20-%20Eclectek%20-%20We%20Are%20Going%20To%20Eclecfunk%20Your%20Ass.mp3',
|
||||
duration: 190.093061,
|
||||
metaData: {
|
||||
title: 'We Are Going to Eclecfunk Your Ass',
|
||||
artist: 'Eclectek'
|
||||
}
|
||||
} /* ,
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/03%20-%20Auto-Pilot%20-%20Seventeen.mp3',
|
||||
duration: 214.622041,
|
||||
metaData: {
|
||||
title: 'Seventeen',
|
||||
artist: 'Auto-Pilot'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/04%20-%20Muha%20-%20Microphone.mp3',
|
||||
duration: 181.838367,
|
||||
metaData: {
|
||||
title: 'Microphone',
|
||||
artist: 'Muha'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/05%20-%20Just%20Plain%20Ant%20-%20Stumble.mp3',
|
||||
duration: 86.047347,
|
||||
metaData: {
|
||||
title: 'Stumble',
|
||||
artist: 'Just Plain Ant'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/06%20-%20Sleaze%20-%20God%20Damn.mp3',
|
||||
duration: 226.795102,
|
||||
metaData: {
|
||||
title: 'God Damn',
|
||||
artist: 'Sleaze'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/07%20-%20Juanitos%20-%20Hola%20Hola%20Bossa%20Nova.mp3',
|
||||
duration: 207.072653,
|
||||
metaData: {
|
||||
title: 'Hola Hola Bossa Nova',
|
||||
artist: 'Juanitos'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/08%20-%20Entertainment%20For%20The%20Braindead%20-%20Resolutions%20(Chris%20Summer%20Remix).mp3',
|
||||
duration: 314.331429,
|
||||
metaData: {
|
||||
title: 'Resolutions (Chris Summer remix)',
|
||||
artist: 'Entertainment for the Braindead'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/09%20-%20Nobara%20Hayakawa%20-%20Trail.mp3',
|
||||
duration: 204.042449,
|
||||
metaData: {
|
||||
title: 'Trail',
|
||||
artist: 'Nobara Hayakawa'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/10%20-%20Paper%20Navy%20-%20Tongue%20Tied.mp3',
|
||||
duration: 201.116735,
|
||||
metaData: {
|
||||
title: 'Tongue Tied',
|
||||
artist: 'Paper Navy'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/11%20-%2060%20Tigres%20-%20Garage.mp3',
|
||||
duration: 245.394286,
|
||||
metaData: {
|
||||
title: 'Garage',
|
||||
artist: '60 Tigres'
|
||||
}
|
||||
},
|
||||
{
|
||||
url:
|
||||
'/Various%20Artists%20-%202009%20-%20netBloc%20Vol%2024_%20tiuqottigeloot%20%5BMP3-V2%5D/12%20-%20CM%20aka%20Creative%20-%20The%20Cycle%20(Featuring%20Mista%20Mista).mp3',
|
||||
duration: 221.44,
|
||||
metaData: {
|
||||
title: 'The Cycle (feat. Mista Mista)',
|
||||
artist: 'CM aka Creative'
|
||||
}
|
||||
} */
|
||||
];
|
||||
describe('Parse WebAmp tracks', () => {
|
||||
tiuqottigeloot_vol24_Tracks.forEach(track => {
|
||||
it(`track ${track.metaData.artist} - ${track.metaData.title}`, async () => {
|
||||
const url = 'https://raw.githubusercontent.com/Borewit/test-audio/958e057' + track.url;
|
||||
const response = await httpGetByUrl(url);
|
||||
const metadata = await parseReadableStream(response.body, {
|
||||
size: parseInt(response.headers.get('Content-Length'), 10),
|
||||
mimeType: response.headers.get('Content-Type')
|
||||
});
|
||||
expect(metadata.common.artist).toEqual(track.metaData.artist);
|
||||
expect(metadata.common.title).toEqual(track.metaData.title);
|
||||
}, 20000);
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=index.spec.js.map
|
||||
73
node_modules/readable-web-to-node-stream/node_modules/buffer/AUTHORS.md
generated
vendored
Normal file
73
node_modules/readable-web-to-node-stream/node_modules/buffer/AUTHORS.md
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
# Authors
|
||||
|
||||
#### Ordered by first contribution.
|
||||
|
||||
- Romain Beauxis (toots@rastageeks.org)
|
||||
- Tobias Koppers (tobias.koppers@googlemail.com)
|
||||
- Janus (ysangkok@gmail.com)
|
||||
- Rainer Dreyer (rdrey1@gmail.com)
|
||||
- Tõnis Tiigi (tonistiigi@gmail.com)
|
||||
- James Halliday (mail@substack.net)
|
||||
- Michael Williamson (mike@zwobble.org)
|
||||
- elliottcable (github@elliottcable.name)
|
||||
- rafael (rvalle@livelens.net)
|
||||
- Andrew Kelley (superjoe30@gmail.com)
|
||||
- Andreas Madsen (amwebdk@gmail.com)
|
||||
- Mike Brevoort (mike.brevoort@pearson.com)
|
||||
- Brian White (mscdex@mscdex.net)
|
||||
- Feross Aboukhadijeh (feross@feross.org)
|
||||
- Ruben Verborgh (ruben@verborgh.org)
|
||||
- eliang (eliang.cs@gmail.com)
|
||||
- Jesse Tane (jesse.tane@gmail.com)
|
||||
- Alfonso Boza (alfonso@cloud.com)
|
||||
- Mathias Buus (mathiasbuus@gmail.com)
|
||||
- Devon Govett (devongovett@gmail.com)
|
||||
- Daniel Cousens (github@dcousens.com)
|
||||
- Joseph Dykstra (josephdykstra@gmail.com)
|
||||
- Parsha Pourkhomami (parshap+git@gmail.com)
|
||||
- Damjan Košir (damjan.kosir@gmail.com)
|
||||
- daverayment (dave.rayment@gmail.com)
|
||||
- kawanet (u-suke@kawa.net)
|
||||
- Linus Unnebäck (linus@folkdatorn.se)
|
||||
- Nolan Lawson (nolan.lawson@gmail.com)
|
||||
- Calvin Metcalf (calvin.metcalf@gmail.com)
|
||||
- Koki Takahashi (hakatasiloving@gmail.com)
|
||||
- Guy Bedford (guybedford@gmail.com)
|
||||
- Jan Schär (jscissr@gmail.com)
|
||||
- RaulTsc (tomescu.raul@gmail.com)
|
||||
- Matthieu Monsch (monsch@alum.mit.edu)
|
||||
- Dan Ehrenberg (littledan@chromium.org)
|
||||
- Kirill Fomichev (fanatid@ya.ru)
|
||||
- Yusuke Kawasaki (u-suke@kawa.net)
|
||||
- DC (dcposch@dcpos.ch)
|
||||
- John-David Dalton (john.david.dalton@gmail.com)
|
||||
- adventure-yunfei (adventure030@gmail.com)
|
||||
- Emil Bay (github@tixz.dk)
|
||||
- Sam Sudar (sudar.sam@gmail.com)
|
||||
- Volker Mische (volker.mische@gmail.com)
|
||||
- David Walton (support@geekstocks.com)
|
||||
- Сковорода Никита Андреевич (chalkerx@gmail.com)
|
||||
- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com)
|
||||
- ukstv (sergey.ukustov@machinomy.com)
|
||||
- Renée Kooi (renee@kooi.me)
|
||||
- ranbochen (ranbochen@qq.com)
|
||||
- Vladimir Borovik (bobahbdb@gmail.com)
|
||||
- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com)
|
||||
- kumavis (aaron@kumavis.me)
|
||||
- Sergey Ukustov (sergey.ukustov@machinomy.com)
|
||||
- Fei Liu (liu.feiwood@gmail.com)
|
||||
- Blaine Bublitz (blaine.bublitz@gmail.com)
|
||||
- clement (clement@seald.io)
|
||||
- Koushik Dutta (koushd@gmail.com)
|
||||
- Jordan Harband (ljharb@gmail.com)
|
||||
- Niklas Mischkulnig (mischnic@users.noreply.github.com)
|
||||
- Nikolai Vavilov (vvnicholas@gmail.com)
|
||||
- Fedor Nezhivoi (gyzerok@users.noreply.github.com)
|
||||
- shuse2 (shus.toda@gmail.com)
|
||||
- Peter Newman (peternewman@users.noreply.github.com)
|
||||
- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com)
|
||||
- jkkang (jkkang@smartauth.kr)
|
||||
- Deklan Webster (deklanw@gmail.com)
|
||||
- Martin Heidegger (martin.heidegger@gmail.com)
|
||||
|
||||
#### Generated by bin/update-authors.sh.
|
||||
21
node_modules/readable-web-to-node-stream/node_modules/buffer/LICENSE
generated
vendored
Normal file
21
node_modules/readable-web-to-node-stream/node_modules/buffer/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Feross Aboukhadijeh, and other contributors.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
410
node_modules/readable-web-to-node-stream/node_modules/buffer/README.md
generated
vendored
Normal file
410
node_modules/readable-web-to-node-stream/node_modules/buffer/README.md
generated
vendored
Normal file
@@ -0,0 +1,410 @@
|
||||
# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
|
||||
|
||||
[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg
|
||||
[travis-url]: https://travis-ci.org/feross/buffer
|
||||
[npm-image]: https://img.shields.io/npm/v/buffer.svg
|
||||
[npm-url]: https://npmjs.org/package/buffer
|
||||
[downloads-image]: https://img.shields.io/npm/dm/buffer.svg
|
||||
[downloads-url]: https://npmjs.org/package/buffer
|
||||
[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
|
||||
[standard-url]: https://standardjs.com
|
||||
|
||||
#### The buffer module from [node.js](https://nodejs.org/), for the browser.
|
||||
|
||||
[![saucelabs][saucelabs-image]][saucelabs-url]
|
||||
|
||||
[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg
|
||||
[saucelabs-url]: https://saucelabs.com/u/buffer
|
||||
|
||||
With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module.
|
||||
|
||||
The goal is to provide an API that is 100% identical to
|
||||
[node's Buffer API](https://nodejs.org/api/buffer.html). Read the
|
||||
[official docs](https://nodejs.org/api/buffer.html) for the full list of properties,
|
||||
instance methods, and class methods that are supported.
|
||||
|
||||
## features
|
||||
|
||||
- Manipulate binary data like a boss, in all browsers!
|
||||
- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`)
|
||||
- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments)
|
||||
- Excellent browser support (Chrome, Firefox, Edge, Safari 11+, iOS 11+, Android, etc.)
|
||||
- Preserves Node API exactly, with one minor difference (see below)
|
||||
- Square-bracket `buf[4]` notation works!
|
||||
- Does not modify any browser prototypes or put anything on `window`
|
||||
- Comprehensive test suite (including all buffer tests from node.js core)
|
||||
|
||||
## install
|
||||
|
||||
To use this module directly (without browserify), install it:
|
||||
|
||||
```bash
|
||||
npm install buffer
|
||||
```
|
||||
|
||||
This module was previously called **native-buffer-browserify**, but please use **buffer**
|
||||
from now on.
|
||||
|
||||
If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer).
|
||||
|
||||
## usage
|
||||
|
||||
The module's API is identical to node's `Buffer` API. Read the
|
||||
[official docs](https://nodejs.org/api/buffer.html) for the full list of properties,
|
||||
instance methods, and class methods that are supported.
|
||||
|
||||
As mentioned above, `require('buffer')` or use the `Buffer` global with
|
||||
[browserify](http://browserify.org) and this module will automatically be included
|
||||
in your bundle. Almost any npm module will work in the browser, even if it assumes that
|
||||
the node `Buffer` API will be available.
|
||||
|
||||
To depend on this module explicitly (without browserify), require it like this:
|
||||
|
||||
```js
|
||||
var Buffer = require('buffer/').Buffer // note: the trailing slash is important!
|
||||
```
|
||||
|
||||
To require this module explicitly, use `require('buffer/')` which tells the node.js module
|
||||
lookup algorithm (also used by browserify) to use the **npm module** named `buffer`
|
||||
instead of the **node.js core** module named `buffer`!
|
||||
|
||||
|
||||
## how does it work?
|
||||
|
||||
The Buffer constructor returns instances of `Uint8Array` that have their prototype
|
||||
changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`,
|
||||
so the returned instances will have all the node `Buffer` methods and the
|
||||
`Uint8Array` methods. Square bracket notation works as expected -- it returns a
|
||||
single octet.
|
||||
|
||||
The `Uint8Array` prototype remains unmodified.
|
||||
|
||||
|
||||
## tracking the latest node api
|
||||
|
||||
This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer
|
||||
API is considered **stable** in the
|
||||
[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index),
|
||||
so it is unlikely that there will ever be breaking changes.
|
||||
Nonetheless, when/if the Buffer API changes in node, this module's API will change
|
||||
accordingly.
|
||||
|
||||
## related packages
|
||||
|
||||
- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer
|
||||
- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer
|
||||
- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package
|
||||
|
||||
## conversion packages
|
||||
|
||||
### convert typed array to buffer
|
||||
|
||||
Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast.
|
||||
|
||||
### convert buffer to typed array
|
||||
|
||||
`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`.
|
||||
|
||||
### convert blob to buffer
|
||||
|
||||
Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`.
|
||||
|
||||
### convert buffer to blob
|
||||
|
||||
To convert a `Buffer` to a `Blob`, use the `Blob` constructor:
|
||||
|
||||
```js
|
||||
var blob = new Blob([ buffer ])
|
||||
```
|
||||
|
||||
Optionally, specify a mimetype:
|
||||
|
||||
```js
|
||||
var blob = new Blob([ buffer ], { type: 'text/html' })
|
||||
```
|
||||
|
||||
### convert arraybuffer to buffer
|
||||
|
||||
To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast.
|
||||
|
||||
```js
|
||||
var buffer = Buffer.from(arrayBuffer)
|
||||
```
|
||||
|
||||
### convert buffer to arraybuffer
|
||||
|
||||
To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects):
|
||||
|
||||
```js
|
||||
var arrayBuffer = buffer.buffer.slice(
|
||||
buffer.byteOffset, buffer.byteOffset + buffer.byteLength
|
||||
)
|
||||
```
|
||||
|
||||
Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module.
|
||||
|
||||
## performance
|
||||
|
||||
See perf tests in `/perf`.
|
||||
|
||||
`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a
|
||||
sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will
|
||||
always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module,
|
||||
which is included to compare against.
|
||||
|
||||
NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README.
|
||||
|
||||
### Chrome 38
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ |
|
||||
| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | |
|
||||
| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | |
|
||||
| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | |
|
||||
| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | |
|
||||
| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | |
|
||||
| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ |
|
||||
| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ |
|
||||
| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ |
|
||||
| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | |
|
||||
| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | |
|
||||
| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ |
|
||||
|
||||
|
||||
### Firefox 33
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | |
|
||||
| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | |
|
||||
| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | |
|
||||
| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | |
|
||||
| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | |
|
||||
| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | |
|
||||
| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | |
|
||||
| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | |
|
||||
| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ |
|
||||
| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | |
|
||||
| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | |
|
||||
| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ |
|
||||
|
||||
### Safari 8
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ |
|
||||
| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | |
|
||||
| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | |
|
||||
| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | |
|
||||
| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | |
|
||||
| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | |
|
||||
| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | |
|
||||
| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | |
|
||||
| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | |
|
||||
| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | |
|
||||
| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | |
|
||||
| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ |
|
||||
|
||||
|
||||
### Node 0.11.14
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | |
|
||||
| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ |
|
||||
| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | |
|
||||
| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ |
|
||||
| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | |
|
||||
| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ |
|
||||
| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | |
|
||||
| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ |
|
||||
| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | |
|
||||
| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | |
|
||||
| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | |
|
||||
| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ |
|
||||
| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ |
|
||||
| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | |
|
||||
| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ |
|
||||
| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | |
|
||||
| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | |
|
||||
| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | |
|
||||
| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | |
|
||||
| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ |
|
||||
| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | |
|
||||
| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ |
|
||||
| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | |
|
||||
|
||||
### iojs 1.8.1
|
||||
|
||||
| Method | Operations | Accuracy | Sampled | Fastest |
|
||||
|:-------|:-----------|:---------|:--------|:-------:|
|
||||
| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | |
|
||||
| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | |
|
||||
| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | |
|
||||
| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | |
|
||||
| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | |
|
||||
| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | |
|
||||
| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | |
|
||||
| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ |
|
||||
| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | |
|
||||
| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | |
|
||||
| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | |
|
||||
| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ |
|
||||
| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ |
|
||||
| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | |
|
||||
| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ |
|
||||
| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | |
|
||||
| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | |
|
||||
| | | | |
|
||||
| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | |
|
||||
| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | |
|
||||
| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | |
|
||||
| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | |
|
||||
| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ |
|
||||
| | | | |
|
||||
| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | |
|
||||
| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ |
|
||||
| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | |
|
||||
| | | | |
|
||||
|
||||
## Testing the project
|
||||
|
||||
First, install the project:
|
||||
|
||||
npm install
|
||||
|
||||
Then, to run tests in Node.js, run:
|
||||
|
||||
npm run test-node
|
||||
|
||||
To test locally in a browser, you can run:
|
||||
|
||||
npm run test-browser-es5-local # For ES5 browsers that don't support ES6
|
||||
npm run test-browser-es6-local # For ES6 compliant browsers
|
||||
|
||||
This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap).
|
||||
|
||||
To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run:
|
||||
|
||||
npm test
|
||||
|
||||
This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files.
|
||||
|
||||
## JavaScript Standard Style
|
||||
|
||||
This module uses [JavaScript Standard Style](https://github.com/feross/standard).
|
||||
|
||||
[](https://github.com/feross/standard)
|
||||
|
||||
To test that the code conforms to the style, `npm install` and run:
|
||||
|
||||
./node_modules/.bin/standard
|
||||
|
||||
## credit
|
||||
|
||||
This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify).
|
||||
|
||||
## Security Policies and Procedures
|
||||
|
||||
The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues.
|
||||
|
||||
## license
|
||||
|
||||
MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis.
|
||||
194
node_modules/readable-web-to-node-stream/node_modules/buffer/index.d.ts
generated
vendored
Normal file
194
node_modules/readable-web-to-node-stream/node_modules/buffer/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
export class Buffer extends Uint8Array {
|
||||
length: number
|
||||
write(string: string, offset?: number, length?: number, encoding?: string): number;
|
||||
toString(encoding?: string, start?: number, end?: number): string;
|
||||
toJSON(): { type: 'Buffer', data: any[] };
|
||||
equals(otherBuffer: Buffer): boolean;
|
||||
compare(otherBuffer: Uint8Array, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
|
||||
copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
|
||||
slice(start?: number, end?: number): Buffer;
|
||||
writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
|
||||
readUInt8(offset: number, noAssert?: boolean): number;
|
||||
readUInt16LE(offset: number, noAssert?: boolean): number;
|
||||
readUInt16BE(offset: number, noAssert?: boolean): number;
|
||||
readUInt32LE(offset: number, noAssert?: boolean): number;
|
||||
readUInt32BE(offset: number, noAssert?: boolean): number;
|
||||
readBigUInt64LE(offset: number): BigInt;
|
||||
readBigUInt64BE(offset: number): BigInt;
|
||||
readInt8(offset: number, noAssert?: boolean): number;
|
||||
readInt16LE(offset: number, noAssert?: boolean): number;
|
||||
readInt16BE(offset: number, noAssert?: boolean): number;
|
||||
readInt32LE(offset: number, noAssert?: boolean): number;
|
||||
readInt32BE(offset: number, noAssert?: boolean): number;
|
||||
readBigInt64LE(offset: number): BigInt;
|
||||
readBigInt64BE(offset: number): BigInt;
|
||||
readFloatLE(offset: number, noAssert?: boolean): number;
|
||||
readFloatBE(offset: number, noAssert?: boolean): number;
|
||||
readDoubleLE(offset: number, noAssert?: boolean): number;
|
||||
readDoubleBE(offset: number, noAssert?: boolean): number;
|
||||
reverse(): this;
|
||||
swap16(): Buffer;
|
||||
swap32(): Buffer;
|
||||
swap64(): Buffer;
|
||||
writeUInt8(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeBigUInt64LE(value: number, offset: number): BigInt;
|
||||
writeBigUInt64BE(value: number, offset: number): BigInt;
|
||||
writeInt8(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeBigInt64LE(value: number, offset: number): BigInt;
|
||||
writeBigInt64BE(value: number, offset: number): BigInt;
|
||||
writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
|
||||
writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
|
||||
fill(value: any, offset?: number, end?: number): this;
|
||||
indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
|
||||
lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
|
||||
includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
|
||||
|
||||
/**
|
||||
* Allocates a new buffer containing the given {str}.
|
||||
*
|
||||
* @param str String to store in buffer.
|
||||
* @param encoding encoding to use, optional. Default is 'utf8'
|
||||
*/
|
||||
constructor (str: string, encoding?: string);
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets.
|
||||
*
|
||||
* @param size count of octets to allocate.
|
||||
*/
|
||||
constructor (size: number);
|
||||
/**
|
||||
* Allocates a new buffer containing the given {array} of octets.
|
||||
*
|
||||
* @param array The octets to store.
|
||||
*/
|
||||
constructor (array: Uint8Array);
|
||||
/**
|
||||
* Produces a Buffer backed by the same allocated memory as
|
||||
* the given {ArrayBuffer}.
|
||||
*
|
||||
*
|
||||
* @param arrayBuffer The ArrayBuffer with which to share memory.
|
||||
*/
|
||||
constructor (arrayBuffer: ArrayBuffer);
|
||||
/**
|
||||
* Allocates a new buffer containing the given {array} of octets.
|
||||
*
|
||||
* @param array The octets to store.
|
||||
*/
|
||||
constructor (array: any[]);
|
||||
/**
|
||||
* Copies the passed {buffer} data onto a new {Buffer} instance.
|
||||
*
|
||||
* @param buffer The buffer to copy.
|
||||
*/
|
||||
constructor (buffer: Buffer);
|
||||
prototype: Buffer;
|
||||
/**
|
||||
* Allocates a new Buffer using an {array} of octets.
|
||||
*
|
||||
* @param array
|
||||
*/
|
||||
static from(array: any[]): Buffer;
|
||||
/**
|
||||
* When passed a reference to the .buffer property of a TypedArray instance,
|
||||
* the newly created Buffer will share the same allocated memory as the TypedArray.
|
||||
* The optional {byteOffset} and {length} arguments specify a memory range
|
||||
* within the {arrayBuffer} that will be shared by the Buffer.
|
||||
*
|
||||
* @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
|
||||
* @param byteOffset
|
||||
* @param length
|
||||
*/
|
||||
static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
|
||||
/**
|
||||
* Copies the passed {buffer} data onto a new Buffer instance.
|
||||
*
|
||||
* @param buffer
|
||||
*/
|
||||
static from(buffer: Buffer | Uint8Array): Buffer;
|
||||
/**
|
||||
* Creates a new Buffer containing the given JavaScript string {str}.
|
||||
* If provided, the {encoding} parameter identifies the character encoding.
|
||||
* If not provided, {encoding} defaults to 'utf8'.
|
||||
*
|
||||
* @param str
|
||||
*/
|
||||
static from(str: string, encoding?: string): Buffer;
|
||||
/**
|
||||
* Returns true if {obj} is a Buffer
|
||||
*
|
||||
* @param obj object to test.
|
||||
*/
|
||||
static isBuffer(obj: any): obj is Buffer;
|
||||
/**
|
||||
* Returns true if {encoding} is a valid encoding argument.
|
||||
* Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
|
||||
*
|
||||
* @param encoding string to test.
|
||||
*/
|
||||
static isEncoding(encoding: string): boolean;
|
||||
/**
|
||||
* Gives the actual byte length of a string. encoding defaults to 'utf8'.
|
||||
* This is not the same as String.prototype.length since that returns the number of characters in a string.
|
||||
*
|
||||
* @param string string to test.
|
||||
* @param encoding encoding used to evaluate (defaults to 'utf8')
|
||||
*/
|
||||
static byteLength(string: string, encoding?: string): number;
|
||||
/**
|
||||
* Returns a buffer which is the result of concatenating all the buffers in the list together.
|
||||
*
|
||||
* If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
|
||||
* If the list has exactly one item, then the first item of the list is returned.
|
||||
* If the list has more than one item, then a new Buffer is created.
|
||||
*
|
||||
* @param list An array of Buffer objects to concatenate
|
||||
* @param totalLength Total length of the buffers when concatenated.
|
||||
* If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
|
||||
*/
|
||||
static concat(list: Uint8Array[], totalLength?: number): Buffer;
|
||||
/**
|
||||
* The same as buf1.compare(buf2).
|
||||
*/
|
||||
static compare(buf1: Uint8Array, buf2: Uint8Array): number;
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets.
|
||||
*
|
||||
* @param size count of octets to allocate.
|
||||
* @param fill if specified, buffer will be initialized by calling buf.fill(fill).
|
||||
* If parameter is omitted, buffer will be filled with zeros.
|
||||
* @param encoding encoding used for call to buf.fill while initializing
|
||||
*/
|
||||
static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
|
||||
/**
|
||||
* Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
|
||||
* of the newly created Buffer are unknown and may contain sensitive data.
|
||||
*
|
||||
* @param size count of octets to allocate
|
||||
*/
|
||||
static allocUnsafe(size: number): Buffer;
|
||||
/**
|
||||
* Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
|
||||
* of the newly created Buffer are unknown and may contain sensitive data.
|
||||
*
|
||||
* @param size count of octets to allocate
|
||||
*/
|
||||
static allocUnsafeSlow(size: number): Buffer;
|
||||
}
|
||||
2106
node_modules/readable-web-to-node-stream/node_modules/buffer/index.js
generated
vendored
Normal file
2106
node_modules/readable-web-to-node-stream/node_modules/buffer/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
93
node_modules/readable-web-to-node-stream/node_modules/buffer/package.json
generated
vendored
Normal file
93
node_modules/readable-web-to-node-stream/node_modules/buffer/package.json
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
{
|
||||
"name": "buffer",
|
||||
"description": "Node.js Buffer API, for the browser",
|
||||
"version": "6.0.3",
|
||||
"author": {
|
||||
"name": "Feross Aboukhadijeh",
|
||||
"email": "feross@feross.org",
|
||||
"url": "https://feross.org"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/feross/buffer/issues"
|
||||
},
|
||||
"contributors": [
|
||||
"Romain Beauxis <toots@rastageeks.org>",
|
||||
"James Halliday <mail@substack.net>"
|
||||
],
|
||||
"dependencies": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"airtap": "^3.0.0",
|
||||
"benchmark": "^2.1.4",
|
||||
"browserify": "^17.0.0",
|
||||
"concat-stream": "^2.0.0",
|
||||
"hyperquest": "^2.1.3",
|
||||
"is-buffer": "^2.0.5",
|
||||
"is-nan": "^1.3.0",
|
||||
"split": "^1.0.1",
|
||||
"standard": "*",
|
||||
"tape": "^5.0.1",
|
||||
"through2": "^4.0.2",
|
||||
"uglify-js": "^3.11.5"
|
||||
},
|
||||
"homepage": "https://github.com/feross/buffer",
|
||||
"jspm": {
|
||||
"map": {
|
||||
"./index.js": {
|
||||
"node": "@node/buffer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"keywords": [
|
||||
"arraybuffer",
|
||||
"browser",
|
||||
"browserify",
|
||||
"buffer",
|
||||
"compatible",
|
||||
"dataview",
|
||||
"uint8array"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/feross/buffer.git"
|
||||
},
|
||||
"scripts": {
|
||||
"perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html",
|
||||
"perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js",
|
||||
"size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c",
|
||||
"test": "standard && node ./bin/test.js",
|
||||
"test-browser-old": "airtap -- test/*.js",
|
||||
"test-browser-old-local": "airtap --local -- test/*.js",
|
||||
"test-browser-new": "airtap -- test/*.js test/node/*.js",
|
||||
"test-browser-new-local": "airtap --local -- test/*.js test/node/*.js",
|
||||
"test-node": "tape test/*.js test/node/*.js",
|
||||
"update-authors": "./bin/update-authors.sh"
|
||||
},
|
||||
"standard": {
|
||||
"ignore": [
|
||||
"test/node/**/*.js",
|
||||
"test/common.js",
|
||||
"test/_polyfill.js",
|
||||
"perf/**/*.js"
|
||||
]
|
||||
},
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
]
|
||||
}
|
||||
38
node_modules/readable-web-to-node-stream/node_modules/readable-stream/CONTRIBUTING.md
generated
vendored
38
node_modules/readable-web-to-node-stream/node_modules/readable-stream/CONTRIBUTING.md
generated
vendored
@@ -1,38 +0,0 @@
|
||||
# Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
* (a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
* (b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
* (c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
* (d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
## Moderation Policy
|
||||
|
||||
The [Node.js Moderation Policy] applies to this WG.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
The [Node.js Code of Conduct][] applies to this WG.
|
||||
|
||||
[Node.js Code of Conduct]:
|
||||
https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
|
||||
[Node.js Moderation Policy]:
|
||||
https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md
|
||||
136
node_modules/readable-web-to-node-stream/node_modules/readable-stream/GOVERNANCE.md
generated
vendored
136
node_modules/readable-web-to-node-stream/node_modules/readable-stream/GOVERNANCE.md
generated
vendored
@@ -1,136 +0,0 @@
|
||||
### Streams Working Group
|
||||
|
||||
The Node.js Streams is jointly governed by a Working Group
|
||||
(WG)
|
||||
that is responsible for high-level guidance of the project.
|
||||
|
||||
The WG has final authority over this project including:
|
||||
|
||||
* Technical direction
|
||||
* Project governance and process (including this policy)
|
||||
* Contribution policy
|
||||
* GitHub repository hosting
|
||||
* Conduct guidelines
|
||||
* Maintaining the list of additional Collaborators
|
||||
|
||||
For the current list of WG members, see the project
|
||||
[README.md](./README.md#current-project-team-members).
|
||||
|
||||
### Collaborators
|
||||
|
||||
The readable-stream GitHub repository is
|
||||
maintained by the WG and additional Collaborators who are added by the
|
||||
WG on an ongoing basis.
|
||||
|
||||
Individuals making significant and valuable contributions are made
|
||||
Collaborators and given commit-access to the project. These
|
||||
individuals are identified by the WG and their addition as
|
||||
Collaborators is discussed during the WG meeting.
|
||||
|
||||
_Note:_ If you make a significant contribution and are not considered
|
||||
for commit-access log an issue or contact a WG member directly and it
|
||||
will be brought up in the next WG meeting.
|
||||
|
||||
Modifications of the contents of the readable-stream repository are
|
||||
made on
|
||||
a collaborative basis. Anybody with a GitHub account may propose a
|
||||
modification via pull request and it will be considered by the project
|
||||
Collaborators. All pull requests must be reviewed and accepted by a
|
||||
Collaborator with sufficient expertise who is able to take full
|
||||
responsibility for the change. In the case of pull requests proposed
|
||||
by an existing Collaborator, an additional Collaborator is required
|
||||
for sign-off. Consensus should be sought if additional Collaborators
|
||||
participate and there is disagreement around a particular
|
||||
modification. See _Consensus Seeking Process_ below for further detail
|
||||
on the consensus model used for governance.
|
||||
|
||||
Collaborators may opt to elevate significant or controversial
|
||||
modifications, or modifications that have not found consensus to the
|
||||
WG for discussion by assigning the ***WG-agenda*** tag to a pull
|
||||
request or issue. The WG should serve as the final arbiter where
|
||||
required.
|
||||
|
||||
For the current list of Collaborators, see the project
|
||||
[README.md](./README.md#members).
|
||||
|
||||
### WG Membership
|
||||
|
||||
WG seats are not time-limited. There is no fixed size of the WG.
|
||||
However, the expected target is between 6 and 12, to ensure adequate
|
||||
coverage of important areas of expertise, balanced with the ability to
|
||||
make decisions efficiently.
|
||||
|
||||
There is no specific set of requirements or qualifications for WG
|
||||
membership beyond these rules.
|
||||
|
||||
The WG may add additional members to the WG by unanimous consensus.
|
||||
|
||||
A WG member may be removed from the WG by voluntary resignation, or by
|
||||
unanimous consensus of all other WG members.
|
||||
|
||||
Changes to WG membership should be posted in the agenda, and may be
|
||||
suggested as any other agenda item (see "WG Meetings" below).
|
||||
|
||||
If an addition or removal is proposed during a meeting, and the full
|
||||
WG is not in attendance to participate, then the addition or removal
|
||||
is added to the agenda for the subsequent meeting. This is to ensure
|
||||
that all members are given the opportunity to participate in all
|
||||
membership decisions. If a WG member is unable to attend a meeting
|
||||
where a planned membership decision is being made, then their consent
|
||||
is assumed.
|
||||
|
||||
No more than 1/3 of the WG members may be affiliated with the same
|
||||
employer. If removal or resignation of a WG member, or a change of
|
||||
employment by a WG member, creates a situation where more than 1/3 of
|
||||
the WG membership shares an employer, then the situation must be
|
||||
immediately remedied by the resignation or removal of one or more WG
|
||||
members affiliated with the over-represented employer(s).
|
||||
|
||||
### WG Meetings
|
||||
|
||||
The WG meets occasionally on a Google Hangout On Air. A designated moderator
|
||||
approved by the WG runs the meeting. Each meeting should be
|
||||
published to YouTube.
|
||||
|
||||
Items are added to the WG agenda that are considered contentious or
|
||||
are modifications of governance, contribution policy, WG membership,
|
||||
or release process.
|
||||
|
||||
The intention of the agenda is not to approve or review all patches;
|
||||
that should happen continuously on GitHub and be handled by the larger
|
||||
group of Collaborators.
|
||||
|
||||
Any community member or contributor can ask that something be added to
|
||||
the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
|
||||
WG member or the moderator can add the item to the agenda by adding
|
||||
the ***WG-agenda*** tag to the issue.
|
||||
|
||||
Prior to each WG meeting the moderator will share the Agenda with
|
||||
members of the WG. WG members can add any items they like to the
|
||||
agenda at the beginning of each meeting. The moderator and the WG
|
||||
cannot veto or remove items.
|
||||
|
||||
The WG may invite persons or representatives from certain projects to
|
||||
participate in a non-voting capacity.
|
||||
|
||||
The moderator is responsible for summarizing the discussion of each
|
||||
agenda item and sends it as a pull request after the meeting.
|
||||
|
||||
### Consensus Seeking Process
|
||||
|
||||
The WG follows a
|
||||
[Consensus
|
||||
Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
|
||||
decision-making model.
|
||||
|
||||
When an agenda item has appeared to reach a consensus the moderator
|
||||
will ask "Does anyone object?" as a final call for dissent from the
|
||||
consensus.
|
||||
|
||||
If an agenda item cannot reach a consensus a WG member can call for
|
||||
either a closing vote or a vote to table the issue to the next
|
||||
meeting. The call for a vote must be seconded by a majority of the WG
|
||||
or else the discussion will continue. Simple majority wins.
|
||||
|
||||
Note that changes to WG membership require a majority consensus. See
|
||||
"WG Membership" above.
|
||||
72
node_modules/readable-web-to-node-stream/node_modules/readable-stream/README.md
generated
vendored
72
node_modules/readable-web-to-node-stream/node_modules/readable-stream/README.md
generated
vendored
@@ -1,27 +1,39 @@
|
||||
# readable-stream
|
||||
|
||||
***Node.js core streams for userland*** [](https://travis-ci.com/nodejs/readable-stream)
|
||||
**_Node.js core streams for userland_**
|
||||
|
||||
|
||||
[](https://nodei.co/npm/readable-stream/)
|
||||
[](https://nodei.co/npm/readable-stream/)
|
||||
|
||||
|
||||
[](https://saucelabs.com/u/readabe-stream)
|
||||
[](https://npm.im/readable-stream)
|
||||
[](https://www.npmjs.org/package/readable-stream)
|
||||
[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ANode.js)
|
||||
[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ABrowsers)
|
||||
|
||||
```bash
|
||||
npm install --save readable-stream
|
||||
npm install readable-stream
|
||||
```
|
||||
|
||||
This package is a mirror of the streams implementations in Node.js.
|
||||
This package is a mirror of the streams implementations in Node.js 18.19.0.
|
||||
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html).
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v18.19.0/docs/api/stream.html).
|
||||
|
||||
If you want to guarantee a stable streams base, regardless of what version of
|
||||
Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
|
||||
Node you, or the users of your libraries are using, use **readable-stream** _only_ and avoid the _"stream"_ module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
|
||||
|
||||
As of version 2.0.0 **readable-stream** uses semantic versioning.
|
||||
|
||||
## Version 4.x.x
|
||||
|
||||
v4.x.x of `readable-stream` is a cut from Node 18. This version supports Node 12, 14, 16 and 18, as well as evergreen browsers.
|
||||
The breaking changes introduced by v4 are composed of the combined breaking changes in:
|
||||
* [Node v12](https://nodejs.org/en/blog/release/v12.0.0/)
|
||||
* [Node v13](https://nodejs.org/en/blog/release/v13.0.0/)
|
||||
* [Node v14](https://nodejs.org/en/blog/release/v14.0.0/)
|
||||
* [Node v15](https://nodejs.org/en/blog/release/v15.0.0/)
|
||||
* [Node v16](https://nodejs.org/en/blog/release/v16.0.0/)
|
||||
* [Node v17](https://nodejs.org/en/blog/release/v17.0.0/)
|
||||
* [Node v18](https://nodejs.org/en/blog/release/v18.0.0/)
|
||||
|
||||
This also includes _many_ new features.
|
||||
|
||||
## Version 3.x.x
|
||||
|
||||
v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows:
|
||||
@@ -48,12 +60,9 @@ v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6,
|
||||
https://github.com/nodejs/node/pull/17979
|
||||
|
||||
## Version 2.x.x
|
||||
|
||||
v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11.
|
||||
|
||||
### Big Thanks
|
||||
|
||||
Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce]
|
||||
|
||||
# Usage
|
||||
|
||||
You can swap your `require('stream')` with `require('readable-stream')`
|
||||
@@ -69,38 +78,39 @@ const {
|
||||
pipeline,
|
||||
finished
|
||||
} = require('readable-stream')
|
||||
````
|
||||
```
|
||||
|
||||
Note that `require('stream')` will return `Stream`, while
|
||||
`require('readable-stream')` will return `Readable`. We discourage using
|
||||
whatever is exported directly, but rather use one of the properties as
|
||||
shown in the example above.
|
||||
|
||||
## Usage In Browsers
|
||||
|
||||
You will need a bundler like [`browserify`](https://github.com/browserify/browserify#readme), [`webpack`](https://webpack.js.org/), [`parcel`](https://github.com/parcel-bundler/parcel#readme) or similar. Polyfills are no longer required since version 4.2.0.
|
||||
|
||||
# Streams Working Group
|
||||
|
||||
`readable-stream` is maintained by the Streams Working Group, which
|
||||
oversees the development and maintenance of the Streams API within
|
||||
Node.js. The responsibilities of the Streams Working Group include:
|
||||
|
||||
* Addressing stream issues on the Node.js issue tracker.
|
||||
* Authoring and editing stream documentation within the Node.js project.
|
||||
* Reviewing changes to stream subclasses within the Node.js project.
|
||||
* Redirecting changes to streams from the Node.js project to this
|
||||
- Addressing stream issues on the Node.js issue tracker.
|
||||
- Authoring and editing stream documentation within the Node.js project.
|
||||
- Reviewing changes to stream subclasses within the Node.js project.
|
||||
- Redirecting changes to streams from the Node.js project to this
|
||||
project.
|
||||
* Assisting in the implementation of stream providers within Node.js.
|
||||
* Recommending versions of `readable-stream` to be included in Node.js.
|
||||
* Messaging about the future of streams to give the community advance
|
||||
- Assisting in the implementation of stream providers within Node.js.
|
||||
- Recommending versions of `readable-stream` to be included in Node.js.
|
||||
- Messaging about the future of streams to give the community advance
|
||||
notice of changes.
|
||||
|
||||
<a name="members"></a>
|
||||
|
||||
## Team Members
|
||||
|
||||
* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com>
|
||||
- Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
|
||||
* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
|
||||
* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
|
||||
- **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
|
||||
- **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
|
||||
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
|
||||
* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com>
|
||||
* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com>
|
||||
|
||||
[sauce]: https://saucelabs.com
|
||||
- **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com>
|
||||
- **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl>
|
||||
|
||||
127
node_modules/readable-web-to-node-stream/node_modules/readable-stream/errors-browser.js
generated
vendored
127
node_modules/readable-web-to-node-stream/node_modules/readable-stream/errors-browser.js
generated
vendored
@@ -1,127 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
|
||||
|
||||
var codes = {};
|
||||
|
||||
function createErrorType(code, message, Base) {
|
||||
if (!Base) {
|
||||
Base = Error;
|
||||
}
|
||||
|
||||
function getMessage(arg1, arg2, arg3) {
|
||||
if (typeof message === 'string') {
|
||||
return message;
|
||||
} else {
|
||||
return message(arg1, arg2, arg3);
|
||||
}
|
||||
}
|
||||
|
||||
var NodeError =
|
||||
/*#__PURE__*/
|
||||
function (_Base) {
|
||||
_inheritsLoose(NodeError, _Base);
|
||||
|
||||
function NodeError(arg1, arg2, arg3) {
|
||||
return _Base.call(this, getMessage(arg1, arg2, arg3)) || this;
|
||||
}
|
||||
|
||||
return NodeError;
|
||||
}(Base);
|
||||
|
||||
NodeError.prototype.name = Base.name;
|
||||
NodeError.prototype.code = code;
|
||||
codes[code] = NodeError;
|
||||
} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
|
||||
|
||||
|
||||
function oneOf(expected, thing) {
|
||||
if (Array.isArray(expected)) {
|
||||
var len = expected.length;
|
||||
expected = expected.map(function (i) {
|
||||
return String(i);
|
||||
});
|
||||
|
||||
if (len > 2) {
|
||||
return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1];
|
||||
} else if (len === 2) {
|
||||
return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]);
|
||||
} else {
|
||||
return "of ".concat(thing, " ").concat(expected[0]);
|
||||
}
|
||||
} else {
|
||||
return "of ".concat(thing, " ").concat(String(expected));
|
||||
}
|
||||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
|
||||
|
||||
|
||||
function startsWith(str, search, pos) {
|
||||
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
|
||||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
|
||||
|
||||
function endsWith(str, search, this_len) {
|
||||
if (this_len === undefined || this_len > str.length) {
|
||||
this_len = str.length;
|
||||
}
|
||||
|
||||
return str.substring(this_len - search.length, this_len) === search;
|
||||
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
|
||||
|
||||
|
||||
function includes(str, search, start) {
|
||||
if (typeof start !== 'number') {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
if (start + search.length > str.length) {
|
||||
return false;
|
||||
} else {
|
||||
return str.indexOf(search, start) !== -1;
|
||||
}
|
||||
}
|
||||
|
||||
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
|
||||
return 'The value "' + value + '" is invalid for option "' + name + '"';
|
||||
}, TypeError);
|
||||
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
|
||||
// determiner: 'must be' or 'must not be'
|
||||
var determiner;
|
||||
|
||||
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
|
||||
determiner = 'must not be';
|
||||
expected = expected.replace(/^not /, '');
|
||||
} else {
|
||||
determiner = 'must be';
|
||||
}
|
||||
|
||||
var msg;
|
||||
|
||||
if (endsWith(name, ' argument')) {
|
||||
// For cases like 'first argument'
|
||||
msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
|
||||
} else {
|
||||
var type = includes(name, '.') ? 'property' : 'argument';
|
||||
msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
|
||||
}
|
||||
|
||||
msg += ". Received type ".concat(typeof actual);
|
||||
return msg;
|
||||
}, TypeError);
|
||||
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
|
||||
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
|
||||
return 'The ' + name + ' method is not implemented';
|
||||
});
|
||||
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
|
||||
createErrorType('ERR_STREAM_DESTROYED', function (name) {
|
||||
return 'Cannot call ' + name + ' after a stream was destroyed';
|
||||
});
|
||||
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
|
||||
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
|
||||
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
|
||||
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
|
||||
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
|
||||
return 'Unknown encoding: ' + arg;
|
||||
}, TypeError);
|
||||
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
|
||||
module.exports.codes = codes;
|
||||
116
node_modules/readable-web-to-node-stream/node_modules/readable-stream/errors.js
generated
vendored
116
node_modules/readable-web-to-node-stream/node_modules/readable-stream/errors.js
generated
vendored
@@ -1,116 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const codes = {};
|
||||
|
||||
function createErrorType(code, message, Base) {
|
||||
if (!Base) {
|
||||
Base = Error
|
||||
}
|
||||
|
||||
function getMessage (arg1, arg2, arg3) {
|
||||
if (typeof message === 'string') {
|
||||
return message
|
||||
} else {
|
||||
return message(arg1, arg2, arg3)
|
||||
}
|
||||
}
|
||||
|
||||
class NodeError extends Base {
|
||||
constructor (arg1, arg2, arg3) {
|
||||
super(getMessage(arg1, arg2, arg3));
|
||||
}
|
||||
}
|
||||
|
||||
NodeError.prototype.name = Base.name;
|
||||
NodeError.prototype.code = code;
|
||||
|
||||
codes[code] = NodeError;
|
||||
}
|
||||
|
||||
// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
|
||||
function oneOf(expected, thing) {
|
||||
if (Array.isArray(expected)) {
|
||||
const len = expected.length;
|
||||
expected = expected.map((i) => String(i));
|
||||
if (len > 2) {
|
||||
return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +
|
||||
expected[len - 1];
|
||||
} else if (len === 2) {
|
||||
return `one of ${thing} ${expected[0]} or ${expected[1]}`;
|
||||
} else {
|
||||
return `of ${thing} ${expected[0]}`;
|
||||
}
|
||||
} else {
|
||||
return `of ${thing} ${String(expected)}`;
|
||||
}
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
|
||||
function startsWith(str, search, pos) {
|
||||
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
function endsWith(str, search, this_len) {
|
||||
if (this_len === undefined || this_len > str.length) {
|
||||
this_len = str.length;
|
||||
}
|
||||
return str.substring(this_len - search.length, this_len) === search;
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
|
||||
function includes(str, search, start) {
|
||||
if (typeof start !== 'number') {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
if (start + search.length > str.length) {
|
||||
return false;
|
||||
} else {
|
||||
return str.indexOf(search, start) !== -1;
|
||||
}
|
||||
}
|
||||
|
||||
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
|
||||
return 'The value "' + value + '" is invalid for option "' + name + '"'
|
||||
}, TypeError);
|
||||
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
|
||||
// determiner: 'must be' or 'must not be'
|
||||
let determiner;
|
||||
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
|
||||
determiner = 'must not be';
|
||||
expected = expected.replace(/^not /, '');
|
||||
} else {
|
||||
determiner = 'must be';
|
||||
}
|
||||
|
||||
let msg;
|
||||
if (endsWith(name, ' argument')) {
|
||||
// For cases like 'first argument'
|
||||
msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;
|
||||
} else {
|
||||
const type = includes(name, '.') ? 'property' : 'argument';
|
||||
msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`;
|
||||
}
|
||||
|
||||
msg += `. Received type ${typeof actual}`;
|
||||
return msg;
|
||||
}, TypeError);
|
||||
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
|
||||
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
|
||||
return 'The ' + name + ' method is not implemented'
|
||||
});
|
||||
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
|
||||
createErrorType('ERR_STREAM_DESTROYED', function (name) {
|
||||
return 'Cannot call ' + name + ' after a stream was destroyed';
|
||||
});
|
||||
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
|
||||
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
|
||||
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
|
||||
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
|
||||
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
|
||||
return 'Unknown encoding: ' + arg
|
||||
}, TypeError);
|
||||
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
|
||||
|
||||
module.exports.codes = codes;
|
||||
@@ -1,17 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
var experimentalWarnings = new Set();
|
||||
|
||||
function emitExperimentalWarning(feature) {
|
||||
if (experimentalWarnings.has(feature)) return;
|
||||
var msg = feature + ' is an experimental feature. This feature could ' +
|
||||
'change at any time';
|
||||
experimentalWarnings.add(feature);
|
||||
process.emitWarning(msg, 'ExperimentalWarning');
|
||||
}
|
||||
|
||||
function noop() {}
|
||||
|
||||
module.exports.emitExperimentalWarning = process.emitWarning
|
||||
? emitExperimentalWarning
|
||||
: noop;
|
||||
128
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
128
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
@@ -1,126 +1,4 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'use strict'
|
||||
|
||||
// a duplex stream is just a stream that is both readable and writable.
|
||||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||||
// prototypally inherits from Readable, and then parasitically from
|
||||
// Writable.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
var objectKeys = Object.keys || function (obj) {
|
||||
var keys = [];
|
||||
for (var key in obj) keys.push(key);
|
||||
return keys;
|
||||
};
|
||||
/*</replacement>*/
|
||||
|
||||
module.exports = Duplex;
|
||||
var Readable = require('./_stream_readable');
|
||||
var Writable = require('./_stream_writable');
|
||||
require('inherits')(Duplex, Readable);
|
||||
{
|
||||
// Allow the keys array to be GC'ed.
|
||||
var keys = objectKeys(Writable.prototype);
|
||||
for (var v = 0; v < keys.length; v++) {
|
||||
var method = keys[v];
|
||||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
||||
}
|
||||
}
|
||||
function Duplex(options) {
|
||||
if (!(this instanceof Duplex)) return new Duplex(options);
|
||||
Readable.call(this, options);
|
||||
Writable.call(this, options);
|
||||
this.allowHalfOpen = true;
|
||||
if (options) {
|
||||
if (options.readable === false) this.readable = false;
|
||||
if (options.writable === false) this.writable = false;
|
||||
if (options.allowHalfOpen === false) {
|
||||
this.allowHalfOpen = false;
|
||||
this.once('end', onend);
|
||||
}
|
||||
}
|
||||
}
|
||||
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState.highWaterMark;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(Duplex.prototype, 'writableBuffer', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState && this._writableState.getBuffer();
|
||||
}
|
||||
});
|
||||
Object.defineProperty(Duplex.prototype, 'writableLength', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState.length;
|
||||
}
|
||||
});
|
||||
|
||||
// the no-half-open enforcer
|
||||
function onend() {
|
||||
// If the writable side ended, then we're ok.
|
||||
if (this._writableState.ended) return;
|
||||
|
||||
// no more data can be written.
|
||||
// But allow more writes to happen in this tick.
|
||||
process.nextTick(onEndNT, this);
|
||||
}
|
||||
function onEndNT(self) {
|
||||
self.end();
|
||||
}
|
||||
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return false;
|
||||
}
|
||||
return this._readableState.destroyed && this._writableState.destroyed;
|
||||
},
|
||||
set: function set(value) {
|
||||
// we ignore the value if the stream
|
||||
// has not been initialized yet
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
// backward compatibility, the user is explicitly
|
||||
// managing destroyed
|
||||
this._readableState.destroyed = value;
|
||||
this._writableState.destroyed = value;
|
||||
}
|
||||
});
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Duplex
|
||||
|
||||
@@ -1,37 +1,4 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'use strict'
|
||||
|
||||
// a passthrough stream.
|
||||
// basically just the most minimal sort of Transform stream.
|
||||
// Every written chunk gets output as-is.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = PassThrough;
|
||||
var Transform = require('./_stream_transform');
|
||||
require('inherits')(PassThrough, Transform);
|
||||
function PassThrough(options) {
|
||||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||||
Transform.call(this, options);
|
||||
}
|
||||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||||
cb(null, chunk);
|
||||
};
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').PassThrough
|
||||
|
||||
1029
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
1029
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,190 +1,4 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'use strict'
|
||||
|
||||
// a transform stream is a readable/writable stream where you do
|
||||
// something with the data. Sometimes it's called a "filter",
|
||||
// but that's not a great name for it, since that implies a thing where
|
||||
// some bits pass through, and others are simply ignored. (That would
|
||||
// be a valid example of a transform, of course.)
|
||||
//
|
||||
// While the output is causally related to the input, it's not a
|
||||
// necessarily symmetric or synchronous transformation. For example,
|
||||
// a zlib stream might take multiple plain-text writes(), and then
|
||||
// emit a single compressed chunk some time in the future.
|
||||
//
|
||||
// Here's how this works:
|
||||
//
|
||||
// The Transform stream has all the aspects of the readable and writable
|
||||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||||
// internally, and returns false if there's a lot of pending writes
|
||||
// buffered up. When you call read(), that calls _read(n) until
|
||||
// there's enough pending readable data buffered up.
|
||||
//
|
||||
// In a transform stream, the written data is placed in a buffer. When
|
||||
// _read(n) is called, it transforms the queued up data, calling the
|
||||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||||
// written chunk would result in multiple output chunks, then the first
|
||||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||||
//
|
||||
// This way, back-pressure is actually determined by the reading side,
|
||||
// since _read has to be called to start processing a new chunk. However,
|
||||
// a pathological inflate type of transform can cause excessive buffering
|
||||
// here. For example, imagine a stream where every byte of input is
|
||||
// interpreted as an integer from 0-255, and then results in that many
|
||||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||||
// 1kb of data being output. In this case, you could write a very small
|
||||
// amount of input, and end up with a very large amount of output. In
|
||||
// such a pathological inflating mechanism, there'd be no way to tell
|
||||
// the system to stop doing the transform. A single 4MB write could
|
||||
// cause the system to run out of memory.
|
||||
//
|
||||
// However, even in such a pathological case, only a single written chunk
|
||||
// would be consumed, and then the rest would wait (un-transformed) until
|
||||
// the results of the previous transformed chunk were consumed.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = Transform;
|
||||
var _require$codes = require('../errors').codes,
|
||||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||||
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
||||
ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
|
||||
ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
|
||||
var Duplex = require('./_stream_duplex');
|
||||
require('inherits')(Transform, Duplex);
|
||||
function afterTransform(er, data) {
|
||||
var ts = this._transformState;
|
||||
ts.transforming = false;
|
||||
var cb = ts.writecb;
|
||||
if (cb === null) {
|
||||
return this.emit('error', new ERR_MULTIPLE_CALLBACK());
|
||||
}
|
||||
ts.writechunk = null;
|
||||
ts.writecb = null;
|
||||
if (data != null)
|
||||
// single equals check for both `null` and `undefined`
|
||||
this.push(data);
|
||||
cb(er);
|
||||
var rs = this._readableState;
|
||||
rs.reading = false;
|
||||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||||
this._read(rs.highWaterMark);
|
||||
}
|
||||
}
|
||||
function Transform(options) {
|
||||
if (!(this instanceof Transform)) return new Transform(options);
|
||||
Duplex.call(this, options);
|
||||
this._transformState = {
|
||||
afterTransform: afterTransform.bind(this),
|
||||
needTransform: false,
|
||||
transforming: false,
|
||||
writecb: null,
|
||||
writechunk: null,
|
||||
writeencoding: null
|
||||
};
|
||||
|
||||
// start out asking for a readable event once data is transformed.
|
||||
this._readableState.needReadable = true;
|
||||
|
||||
// we have implemented the _read method, and done the other things
|
||||
// that Readable wants before the first _read call, so unset the
|
||||
// sync guard flag.
|
||||
this._readableState.sync = false;
|
||||
if (options) {
|
||||
if (typeof options.transform === 'function') this._transform = options.transform;
|
||||
if (typeof options.flush === 'function') this._flush = options.flush;
|
||||
}
|
||||
|
||||
// When the writable side finishes, then flush out anything remaining.
|
||||
this.on('prefinish', prefinish);
|
||||
}
|
||||
function prefinish() {
|
||||
var _this = this;
|
||||
if (typeof this._flush === 'function' && !this._readableState.destroyed) {
|
||||
this._flush(function (er, data) {
|
||||
done(_this, er, data);
|
||||
});
|
||||
} else {
|
||||
done(this, null, null);
|
||||
}
|
||||
}
|
||||
Transform.prototype.push = function (chunk, encoding) {
|
||||
this._transformState.needTransform = false;
|
||||
return Duplex.prototype.push.call(this, chunk, encoding);
|
||||
};
|
||||
|
||||
// This is the part where you do stuff!
|
||||
// override this function in implementation classes.
|
||||
// 'chunk' is an input chunk.
|
||||
//
|
||||
// Call `push(newChunk)` to pass along transformed output
|
||||
// to the readable side. You may call 'push' zero or more times.
|
||||
//
|
||||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||||
// an error, then that'll put the hurt on the whole operation. If you
|
||||
// never call cb(), then you'll never get another chunk.
|
||||
Transform.prototype._transform = function (chunk, encoding, cb) {
|
||||
cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
|
||||
};
|
||||
Transform.prototype._write = function (chunk, encoding, cb) {
|
||||
var ts = this._transformState;
|
||||
ts.writecb = cb;
|
||||
ts.writechunk = chunk;
|
||||
ts.writeencoding = encoding;
|
||||
if (!ts.transforming) {
|
||||
var rs = this._readableState;
|
||||
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
||||
}
|
||||
};
|
||||
|
||||
// Doesn't matter what the args are here.
|
||||
// _transform does all the work.
|
||||
// That we got here means that the readable side wants more data.
|
||||
Transform.prototype._read = function (n) {
|
||||
var ts = this._transformState;
|
||||
if (ts.writechunk !== null && !ts.transforming) {
|
||||
ts.transforming = true;
|
||||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||||
} else {
|
||||
// mark that we need a transform, so that any data that comes in
|
||||
// will get processed, now that we've asked for it.
|
||||
ts.needTransform = true;
|
||||
}
|
||||
};
|
||||
Transform.prototype._destroy = function (err, cb) {
|
||||
Duplex.prototype._destroy.call(this, err, function (err2) {
|
||||
cb(err2);
|
||||
});
|
||||
};
|
||||
function done(stream, er, data) {
|
||||
if (er) return stream.emit('error', er);
|
||||
if (data != null)
|
||||
// single equals check for both `null` and `undefined`
|
||||
stream.push(data);
|
||||
|
||||
// TODO(BridgeAR): Write a test for these two error cases
|
||||
// if there's nothing in the write buffer, then that means
|
||||
// that nothing more will ever be provided
|
||||
if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
|
||||
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
|
||||
return stream.push(null);
|
||||
}
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Transform
|
||||
|
||||
@@ -1,641 +1,4 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'use strict'
|
||||
|
||||
// A bit simpler than readable streams.
|
||||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||||
// the drain event emission and buffering.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = Writable;
|
||||
|
||||
/* <replacement> */
|
||||
function WriteReq(chunk, encoding, cb) {
|
||||
this.chunk = chunk;
|
||||
this.encoding = encoding;
|
||||
this.callback = cb;
|
||||
this.next = null;
|
||||
}
|
||||
|
||||
// It seems a linked list but it is not
|
||||
// there will be only 2 of these for each stream
|
||||
function CorkedRequest(state) {
|
||||
var _this = this;
|
||||
this.next = null;
|
||||
this.entry = null;
|
||||
this.finish = function () {
|
||||
onCorkedFinish(_this, state);
|
||||
};
|
||||
}
|
||||
/* </replacement> */
|
||||
|
||||
/*<replacement>*/
|
||||
var Duplex;
|
||||
/*</replacement>*/
|
||||
|
||||
Writable.WritableState = WritableState;
|
||||
|
||||
/*<replacement>*/
|
||||
var internalUtil = {
|
||||
deprecate: require('util-deprecate')
|
||||
};
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var Stream = require('./internal/streams/stream');
|
||||
/*</replacement>*/
|
||||
|
||||
var Buffer = require('buffer').Buffer;
|
||||
var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
|
||||
function _uint8ArrayToBuffer(chunk) {
|
||||
return Buffer.from(chunk);
|
||||
}
|
||||
function _isUint8Array(obj) {
|
||||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||||
}
|
||||
var destroyImpl = require('./internal/streams/destroy');
|
||||
var _require = require('./internal/streams/state'),
|
||||
getHighWaterMark = _require.getHighWaterMark;
|
||||
var _require$codes = require('../errors').codes,
|
||||
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
|
||||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||||
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
||||
ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
|
||||
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
|
||||
ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
|
||||
ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
|
||||
ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
|
||||
var errorOrDestroy = destroyImpl.errorOrDestroy;
|
||||
require('inherits')(Writable, Stream);
|
||||
function nop() {}
|
||||
function WritableState(options, stream, isDuplex) {
|
||||
Duplex = Duplex || require('./_stream_duplex');
|
||||
options = options || {};
|
||||
|
||||
// Duplex streams are both readable and writable, but share
|
||||
// the same options object.
|
||||
// However, some cases require setting options to different
|
||||
// values for the readable and the writable sides of the duplex stream,
|
||||
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
||||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;
|
||||
|
||||
// object stream flag to indicate whether or not this stream
|
||||
// contains buffers or objects.
|
||||
this.objectMode = !!options.objectMode;
|
||||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
|
||||
|
||||
// the point at which write() starts returning false
|
||||
// Note: 0 is a valid value, means that we always return false if
|
||||
// the entire buffer is not flushed immediately on write()
|
||||
this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex);
|
||||
|
||||
// if _final has been called
|
||||
this.finalCalled = false;
|
||||
|
||||
// drain event flag.
|
||||
this.needDrain = false;
|
||||
// at the start of calling end()
|
||||
this.ending = false;
|
||||
// when end() has been called, and returned
|
||||
this.ended = false;
|
||||
// when 'finish' is emitted
|
||||
this.finished = false;
|
||||
|
||||
// has it been destroyed
|
||||
this.destroyed = false;
|
||||
|
||||
// should we decode strings into buffers before passing to _write?
|
||||
// this is here so that some node-core streams can optimize string
|
||||
// handling at a lower level.
|
||||
var noDecode = options.decodeStrings === false;
|
||||
this.decodeStrings = !noDecode;
|
||||
|
||||
// Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||||
|
||||
// not an actual buffer we keep track of, but a measurement
|
||||
// of how much we're waiting to get pushed to some underlying
|
||||
// socket or file.
|
||||
this.length = 0;
|
||||
|
||||
// a flag to see when we're in the middle of a write.
|
||||
this.writing = false;
|
||||
|
||||
// when true all writes will be buffered until .uncork() call
|
||||
this.corked = 0;
|
||||
|
||||
// a flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
this.sync = true;
|
||||
|
||||
// a flag to know if we're processing previously buffered items, which
|
||||
// may call the _write() callback in the same tick, so that we don't
|
||||
// end up in an overlapped onwrite situation.
|
||||
this.bufferProcessing = false;
|
||||
|
||||
// the callback that's passed to _write(chunk,cb)
|
||||
this.onwrite = function (er) {
|
||||
onwrite(stream, er);
|
||||
};
|
||||
|
||||
// the callback that the user supplies to write(chunk,encoding,cb)
|
||||
this.writecb = null;
|
||||
|
||||
// the amount that is being written when _write is called.
|
||||
this.writelen = 0;
|
||||
this.bufferedRequest = null;
|
||||
this.lastBufferedRequest = null;
|
||||
|
||||
// number of pending user-supplied write callbacks
|
||||
// this must be 0 before 'finish' can be emitted
|
||||
this.pendingcb = 0;
|
||||
|
||||
// emit prefinish if the only thing we're waiting for is _write cbs
|
||||
// This is relevant for synchronous Transform streams
|
||||
this.prefinished = false;
|
||||
|
||||
// True if the error was already emitted and should not be thrown again
|
||||
this.errorEmitted = false;
|
||||
|
||||
// Should close be emitted on destroy. Defaults to true.
|
||||
this.emitClose = options.emitClose !== false;
|
||||
|
||||
// Should .destroy() be called after 'finish' (and potentially 'end')
|
||||
this.autoDestroy = !!options.autoDestroy;
|
||||
|
||||
// count buffered requests
|
||||
this.bufferedRequestCount = 0;
|
||||
|
||||
// allocate the first CorkedRequest, there is always
|
||||
// one allocated and free to use, and we maintain at most two
|
||||
this.corkedRequestsFree = new CorkedRequest(this);
|
||||
}
|
||||
WritableState.prototype.getBuffer = function getBuffer() {
|
||||
var current = this.bufferedRequest;
|
||||
var out = [];
|
||||
while (current) {
|
||||
out.push(current);
|
||||
current = current.next;
|
||||
}
|
||||
return out;
|
||||
};
|
||||
(function () {
|
||||
try {
|
||||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||||
get: internalUtil.deprecate(function writableStateBufferGetter() {
|
||||
return this.getBuffer();
|
||||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||||
});
|
||||
} catch (_) {}
|
||||
})();
|
||||
|
||||
// Test _writableState for inheritance to account for Duplex streams,
|
||||
// whose prototype chain only points to Readable.
|
||||
var realHasInstance;
|
||||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||||
value: function value(object) {
|
||||
if (realHasInstance.call(this, object)) return true;
|
||||
if (this !== Writable) return false;
|
||||
return object && object._writableState instanceof WritableState;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
realHasInstance = function realHasInstance(object) {
|
||||
return object instanceof this;
|
||||
};
|
||||
}
|
||||
function Writable(options) {
|
||||
Duplex = Duplex || require('./_stream_duplex');
|
||||
|
||||
// Writable ctor is applied to Duplexes, too.
|
||||
// `realHasInstance` is necessary because using plain `instanceof`
|
||||
// would return false, as no `_writableState` property is attached.
|
||||
|
||||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||||
// `_writableState` that would lead to infinite recursion.
|
||||
|
||||
// Checking for a Stream.Duplex instance is faster here instead of inside
|
||||
// the WritableState constructor, at least with V8 6.5
|
||||
var isDuplex = this instanceof Duplex;
|
||||
if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
|
||||
this._writableState = new WritableState(options, this, isDuplex);
|
||||
|
||||
// legacy.
|
||||
this.writable = true;
|
||||
if (options) {
|
||||
if (typeof options.write === 'function') this._write = options.write;
|
||||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||||
if (typeof options.final === 'function') this._final = options.final;
|
||||
}
|
||||
Stream.call(this);
|
||||
}
|
||||
|
||||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||||
Writable.prototype.pipe = function () {
|
||||
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
|
||||
};
|
||||
function writeAfterEnd(stream, cb) {
|
||||
var er = new ERR_STREAM_WRITE_AFTER_END();
|
||||
// TODO: defer error events consistently everywhere, not just the cb
|
||||
errorOrDestroy(stream, er);
|
||||
process.nextTick(cb, er);
|
||||
}
|
||||
|
||||
// Checks that a user-supplied chunk is valid, especially for the particular
|
||||
// mode the stream is in. Currently this means that `null` is never accepted
|
||||
// and undefined/non-string values are only allowed in object mode.
|
||||
function validChunk(stream, state, chunk, cb) {
|
||||
var er;
|
||||
if (chunk === null) {
|
||||
er = new ERR_STREAM_NULL_VALUES();
|
||||
} else if (typeof chunk !== 'string' && !state.objectMode) {
|
||||
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
|
||||
}
|
||||
if (er) {
|
||||
errorOrDestroy(stream, er);
|
||||
process.nextTick(cb, er);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
var ret = false;
|
||||
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
||||
if (isBuf && !Buffer.isBuffer(chunk)) {
|
||||
chunk = _uint8ArrayToBuffer(chunk);
|
||||
}
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||||
if (typeof cb !== 'function') cb = nop;
|
||||
if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||||
state.pendingcb++;
|
||||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
Writable.prototype.cork = function () {
|
||||
this._writableState.corked++;
|
||||
};
|
||||
Writable.prototype.uncork = function () {
|
||||
var state = this._writableState;
|
||||
if (state.corked) {
|
||||
state.corked--;
|
||||
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||||
}
|
||||
};
|
||||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||||
// node::ParseEncoding() requires lower case.
|
||||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
|
||||
this._writableState.defaultEncoding = encoding;
|
||||
return this;
|
||||
};
|
||||
Object.defineProperty(Writable.prototype, 'writableBuffer', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState && this._writableState.getBuffer();
|
||||
}
|
||||
});
|
||||
function decodeChunk(state, chunk, encoding) {
|
||||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||||
chunk = Buffer.from(chunk, encoding);
|
||||
}
|
||||
return chunk;
|
||||
}
|
||||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState.highWaterMark;
|
||||
}
|
||||
});
|
||||
|
||||
// if we're already writing something, then just put this
|
||||
// in the queue, and wait our turn. Otherwise, call _write
|
||||
// If we return false, then we need a drain event, so set that flag.
|
||||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||||
if (!isBuf) {
|
||||
var newChunk = decodeChunk(state, chunk, encoding);
|
||||
if (chunk !== newChunk) {
|
||||
isBuf = true;
|
||||
encoding = 'buffer';
|
||||
chunk = newChunk;
|
||||
}
|
||||
}
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
state.length += len;
|
||||
var ret = state.length < state.highWaterMark;
|
||||
// we must ensure that previous needDrain will not be reset to false.
|
||||
if (!ret) state.needDrain = true;
|
||||
if (state.writing || state.corked) {
|
||||
var last = state.lastBufferedRequest;
|
||||
state.lastBufferedRequest = {
|
||||
chunk: chunk,
|
||||
encoding: encoding,
|
||||
isBuf: isBuf,
|
||||
callback: cb,
|
||||
next: null
|
||||
};
|
||||
if (last) {
|
||||
last.next = state.lastBufferedRequest;
|
||||
} else {
|
||||
state.bufferedRequest = state.lastBufferedRequest;
|
||||
}
|
||||
state.bufferedRequestCount += 1;
|
||||
} else {
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||||
state.writelen = len;
|
||||
state.writecb = cb;
|
||||
state.writing = true;
|
||||
state.sync = true;
|
||||
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||||
state.sync = false;
|
||||
}
|
||||
function onwriteError(stream, state, sync, er, cb) {
|
||||
--state.pendingcb;
|
||||
if (sync) {
|
||||
// defer the callback if we are being called synchronously
|
||||
// to avoid piling up things on the stack
|
||||
process.nextTick(cb, er);
|
||||
// this can emit finish, and it will always happen
|
||||
// after error
|
||||
process.nextTick(finishMaybe, stream, state);
|
||||
stream._writableState.errorEmitted = true;
|
||||
errorOrDestroy(stream, er);
|
||||
} else {
|
||||
// the caller expect this to happen before if
|
||||
// it is async
|
||||
cb(er);
|
||||
stream._writableState.errorEmitted = true;
|
||||
errorOrDestroy(stream, er);
|
||||
// this can emit finish, but finish must
|
||||
// always follow error
|
||||
finishMaybe(stream, state);
|
||||
}
|
||||
}
|
||||
function onwriteStateUpdate(state) {
|
||||
state.writing = false;
|
||||
state.writecb = null;
|
||||
state.length -= state.writelen;
|
||||
state.writelen = 0;
|
||||
}
|
||||
function onwrite(stream, er) {
|
||||
var state = stream._writableState;
|
||||
var sync = state.sync;
|
||||
var cb = state.writecb;
|
||||
if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
|
||||
onwriteStateUpdate(state);
|
||||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||||
// Check if we're actually ready to finish, but don't emit yet
|
||||
var finished = needFinish(state) || stream.destroyed;
|
||||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||||
clearBuffer(stream, state);
|
||||
}
|
||||
if (sync) {
|
||||
process.nextTick(afterWrite, stream, state, finished, cb);
|
||||
} else {
|
||||
afterWrite(stream, state, finished, cb);
|
||||
}
|
||||
}
|
||||
}
|
||||
function afterWrite(stream, state, finished, cb) {
|
||||
if (!finished) onwriteDrain(stream, state);
|
||||
state.pendingcb--;
|
||||
cb();
|
||||
finishMaybe(stream, state);
|
||||
}
|
||||
|
||||
// Must force callback to be called on nextTick, so that we don't
|
||||
// emit 'drain' before the write() consumer gets the 'false' return
|
||||
// value, and has a chance to attach a 'drain' listener.
|
||||
function onwriteDrain(stream, state) {
|
||||
if (state.length === 0 && state.needDrain) {
|
||||
state.needDrain = false;
|
||||
stream.emit('drain');
|
||||
}
|
||||
}
|
||||
|
||||
// if there's something in the buffer waiting, then process it
|
||||
function clearBuffer(stream, state) {
|
||||
state.bufferProcessing = true;
|
||||
var entry = state.bufferedRequest;
|
||||
if (stream._writev && entry && entry.next) {
|
||||
// Fast case, write everything using _writev()
|
||||
var l = state.bufferedRequestCount;
|
||||
var buffer = new Array(l);
|
||||
var holder = state.corkedRequestsFree;
|
||||
holder.entry = entry;
|
||||
var count = 0;
|
||||
var allBuffers = true;
|
||||
while (entry) {
|
||||
buffer[count] = entry;
|
||||
if (!entry.isBuf) allBuffers = false;
|
||||
entry = entry.next;
|
||||
count += 1;
|
||||
}
|
||||
buffer.allBuffers = allBuffers;
|
||||
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
|
||||
|
||||
// doWrite is almost always async, defer these to save a bit of time
|
||||
// as the hot path ends with doWrite
|
||||
state.pendingcb++;
|
||||
state.lastBufferedRequest = null;
|
||||
if (holder.next) {
|
||||
state.corkedRequestsFree = holder.next;
|
||||
holder.next = null;
|
||||
} else {
|
||||
state.corkedRequestsFree = new CorkedRequest(state);
|
||||
}
|
||||
state.bufferedRequestCount = 0;
|
||||
} else {
|
||||
// Slow case, write chunks one-by-one
|
||||
while (entry) {
|
||||
var chunk = entry.chunk;
|
||||
var encoding = entry.encoding;
|
||||
var cb = entry.callback;
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
entry = entry.next;
|
||||
state.bufferedRequestCount--;
|
||||
// if we didn't call the onwrite immediately, then
|
||||
// it means that we need to wait until it does.
|
||||
// also, that means that the chunk and cb are currently
|
||||
// being processed, so move the buffer counter past them.
|
||||
if (state.writing) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (entry === null) state.lastBufferedRequest = null;
|
||||
}
|
||||
state.bufferedRequest = entry;
|
||||
state.bufferProcessing = false;
|
||||
}
|
||||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||||
cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
|
||||
};
|
||||
Writable.prototype._writev = null;
|
||||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
if (typeof chunk === 'function') {
|
||||
cb = chunk;
|
||||
chunk = null;
|
||||
encoding = null;
|
||||
} else if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
|
||||
|
||||
// .end() fully uncorks
|
||||
if (state.corked) {
|
||||
state.corked = 1;
|
||||
this.uncork();
|
||||
}
|
||||
|
||||
// ignore unnecessary end() calls.
|
||||
if (!state.ending) endWritable(this, state, cb);
|
||||
return this;
|
||||
};
|
||||
Object.defineProperty(Writable.prototype, 'writableLength', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState.length;
|
||||
}
|
||||
});
|
||||
function needFinish(state) {
|
||||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||||
}
|
||||
function callFinal(stream, state) {
|
||||
stream._final(function (err) {
|
||||
state.pendingcb--;
|
||||
if (err) {
|
||||
errorOrDestroy(stream, err);
|
||||
}
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
finishMaybe(stream, state);
|
||||
});
|
||||
}
|
||||
function prefinish(stream, state) {
|
||||
if (!state.prefinished && !state.finalCalled) {
|
||||
if (typeof stream._final === 'function' && !state.destroyed) {
|
||||
state.pendingcb++;
|
||||
state.finalCalled = true;
|
||||
process.nextTick(callFinal, stream, state);
|
||||
} else {
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
}
|
||||
}
|
||||
}
|
||||
function finishMaybe(stream, state) {
|
||||
var need = needFinish(state);
|
||||
if (need) {
|
||||
prefinish(stream, state);
|
||||
if (state.pendingcb === 0) {
|
||||
state.finished = true;
|
||||
stream.emit('finish');
|
||||
if (state.autoDestroy) {
|
||||
// In case of duplex streams we need a way to detect
|
||||
// if the readable side is ready for autoDestroy as well
|
||||
var rState = stream._readableState;
|
||||
if (!rState || rState.autoDestroy && rState.endEmitted) {
|
||||
stream.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return need;
|
||||
}
|
||||
function endWritable(stream, state, cb) {
|
||||
state.ending = true;
|
||||
finishMaybe(stream, state);
|
||||
if (cb) {
|
||||
if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
|
||||
}
|
||||
state.ended = true;
|
||||
stream.writable = false;
|
||||
}
|
||||
function onCorkedFinish(corkReq, state, err) {
|
||||
var entry = corkReq.entry;
|
||||
corkReq.entry = null;
|
||||
while (entry) {
|
||||
var cb = entry.callback;
|
||||
state.pendingcb--;
|
||||
cb(err);
|
||||
entry = entry.next;
|
||||
}
|
||||
|
||||
// reuse the free corkReq.
|
||||
state.corkedRequestsFree.next = corkReq;
|
||||
}
|
||||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
if (this._writableState === undefined) {
|
||||
return false;
|
||||
}
|
||||
return this._writableState.destroyed;
|
||||
},
|
||||
set: function set(value) {
|
||||
// we ignore the value if the stream
|
||||
// has not been initialized yet
|
||||
if (!this._writableState) {
|
||||
return;
|
||||
}
|
||||
|
||||
// backward compatibility, the user is explicitly
|
||||
// managing destroyed
|
||||
this._writableState.destroyed = value;
|
||||
}
|
||||
});
|
||||
Writable.prototype.destroy = destroyImpl.destroy;
|
||||
Writable.prototype._undestroy = destroyImpl.undestroy;
|
||||
Writable.prototype._destroy = function (err, cb) {
|
||||
cb(err);
|
||||
};
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Writable
|
||||
|
||||
52
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js
generated
vendored
Normal file
52
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
'use strict'
|
||||
|
||||
const { SymbolDispose } = require('../../ours/primordials')
|
||||
const { AbortError, codes } = require('../../ours/errors')
|
||||
const { isNodeStream, isWebStream, kControllerErrorFunction } = require('./utils')
|
||||
const eos = require('./end-of-stream')
|
||||
const { ERR_INVALID_ARG_TYPE } = codes
|
||||
let addAbortListener
|
||||
|
||||
// This method is inlined here for readable-stream
|
||||
// It also does not allow for signal to not exist on the stream
|
||||
// https://github.com/nodejs/node/pull/36061#discussion_r533718029
|
||||
const validateAbortSignal = (signal, name) => {
|
||||
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
}
|
||||
module.exports.addAbortSignal = function addAbortSignal(signal, stream) {
|
||||
validateAbortSignal(signal, 'signal')
|
||||
if (!isNodeStream(stream) && !isWebStream(stream)) {
|
||||
throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)
|
||||
}
|
||||
return module.exports.addAbortSignalNoValidate(signal, stream)
|
||||
}
|
||||
module.exports.addAbortSignalNoValidate = function (signal, stream) {
|
||||
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||
return stream
|
||||
}
|
||||
const onAbort = isNodeStream(stream)
|
||||
? () => {
|
||||
stream.destroy(
|
||||
new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
: () => {
|
||||
stream[kControllerErrorFunction](
|
||||
new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (signal.aborted) {
|
||||
onAbort()
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(signal, onAbort)
|
||||
eos(stream, disposable[SymbolDispose])
|
||||
}
|
||||
return stream
|
||||
}
|
||||
@@ -1,180 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var _Object$setPrototypeO;
|
||||
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||||
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
|
||||
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
|
||||
var finished = require('./end-of-stream');
|
||||
var kLastResolve = Symbol('lastResolve');
|
||||
var kLastReject = Symbol('lastReject');
|
||||
var kError = Symbol('error');
|
||||
var kEnded = Symbol('ended');
|
||||
var kLastPromise = Symbol('lastPromise');
|
||||
var kHandlePromise = Symbol('handlePromise');
|
||||
var kStream = Symbol('stream');
|
||||
function createIterResult(value, done) {
|
||||
return {
|
||||
value: value,
|
||||
done: done
|
||||
};
|
||||
}
|
||||
function readAndResolve(iter) {
|
||||
var resolve = iter[kLastResolve];
|
||||
if (resolve !== null) {
|
||||
var data = iter[kStream].read();
|
||||
// we defer if data is null
|
||||
// we can be expecting either 'end' or
|
||||
// 'error'
|
||||
if (data !== null) {
|
||||
iter[kLastPromise] = null;
|
||||
iter[kLastResolve] = null;
|
||||
iter[kLastReject] = null;
|
||||
resolve(createIterResult(data, false));
|
||||
}
|
||||
}
|
||||
}
|
||||
function onReadable(iter) {
|
||||
// we wait for the next tick, because it might
|
||||
// emit an error with process.nextTick
|
||||
process.nextTick(readAndResolve, iter);
|
||||
}
|
||||
function wrapForNext(lastPromise, iter) {
|
||||
return function (resolve, reject) {
|
||||
lastPromise.then(function () {
|
||||
if (iter[kEnded]) {
|
||||
resolve(createIterResult(undefined, true));
|
||||
return;
|
||||
}
|
||||
iter[kHandlePromise](resolve, reject);
|
||||
}, reject);
|
||||
};
|
||||
}
|
||||
var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
|
||||
var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
|
||||
get stream() {
|
||||
return this[kStream];
|
||||
},
|
||||
next: function next() {
|
||||
var _this = this;
|
||||
// if we have detected an error in the meanwhile
|
||||
// reject straight away
|
||||
var error = this[kError];
|
||||
if (error !== null) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
if (this[kEnded]) {
|
||||
return Promise.resolve(createIterResult(undefined, true));
|
||||
}
|
||||
if (this[kStream].destroyed) {
|
||||
// We need to defer via nextTick because if .destroy(err) is
|
||||
// called, the error will be emitted via nextTick, and
|
||||
// we cannot guarantee that there is no error lingering around
|
||||
// waiting to be emitted.
|
||||
return new Promise(function (resolve, reject) {
|
||||
process.nextTick(function () {
|
||||
if (_this[kError]) {
|
||||
reject(_this[kError]);
|
||||
} else {
|
||||
resolve(createIterResult(undefined, true));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// if we have multiple next() calls
|
||||
// we will wait for the previous Promise to finish
|
||||
// this logic is optimized to support for await loops,
|
||||
// where next() is only called once at a time
|
||||
var lastPromise = this[kLastPromise];
|
||||
var promise;
|
||||
if (lastPromise) {
|
||||
promise = new Promise(wrapForNext(lastPromise, this));
|
||||
} else {
|
||||
// fast path needed to support multiple this.push()
|
||||
// without triggering the next() queue
|
||||
var data = this[kStream].read();
|
||||
if (data !== null) {
|
||||
return Promise.resolve(createIterResult(data, false));
|
||||
}
|
||||
promise = new Promise(this[kHandlePromise]);
|
||||
}
|
||||
this[kLastPromise] = promise;
|
||||
return promise;
|
||||
}
|
||||
}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
|
||||
return this;
|
||||
}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
|
||||
var _this2 = this;
|
||||
// destroy(err, cb) is a private API
|
||||
// we can guarantee we have that here, because we control the
|
||||
// Readable class this is attached to
|
||||
return new Promise(function (resolve, reject) {
|
||||
_this2[kStream].destroy(null, function (err) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
resolve(createIterResult(undefined, true));
|
||||
});
|
||||
});
|
||||
}), _Object$setPrototypeO), AsyncIteratorPrototype);
|
||||
var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
|
||||
var _Object$create;
|
||||
var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
|
||||
value: stream,
|
||||
writable: true
|
||||
}), _defineProperty(_Object$create, kLastResolve, {
|
||||
value: null,
|
||||
writable: true
|
||||
}), _defineProperty(_Object$create, kLastReject, {
|
||||
value: null,
|
||||
writable: true
|
||||
}), _defineProperty(_Object$create, kError, {
|
||||
value: null,
|
||||
writable: true
|
||||
}), _defineProperty(_Object$create, kEnded, {
|
||||
value: stream._readableState.endEmitted,
|
||||
writable: true
|
||||
}), _defineProperty(_Object$create, kHandlePromise, {
|
||||
value: function value(resolve, reject) {
|
||||
var data = iterator[kStream].read();
|
||||
if (data) {
|
||||
iterator[kLastPromise] = null;
|
||||
iterator[kLastResolve] = null;
|
||||
iterator[kLastReject] = null;
|
||||
resolve(createIterResult(data, false));
|
||||
} else {
|
||||
iterator[kLastResolve] = resolve;
|
||||
iterator[kLastReject] = reject;
|
||||
}
|
||||
},
|
||||
writable: true
|
||||
}), _Object$create));
|
||||
iterator[kLastPromise] = null;
|
||||
finished(stream, function (err) {
|
||||
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
||||
var reject = iterator[kLastReject];
|
||||
// reject if we are waiting for data in the Promise
|
||||
// returned by next() and store the error
|
||||
if (reject !== null) {
|
||||
iterator[kLastPromise] = null;
|
||||
iterator[kLastResolve] = null;
|
||||
iterator[kLastReject] = null;
|
||||
reject(err);
|
||||
}
|
||||
iterator[kError] = err;
|
||||
return;
|
||||
}
|
||||
var resolve = iterator[kLastResolve];
|
||||
if (resolve !== null) {
|
||||
iterator[kLastPromise] = null;
|
||||
iterator[kLastResolve] = null;
|
||||
iterator[kLastReject] = null;
|
||||
resolve(createIterResult(undefined, true));
|
||||
}
|
||||
iterator[kEnded] = true;
|
||||
});
|
||||
stream.on('readable', onReadable.bind(null, iterator));
|
||||
return iterator;
|
||||
};
|
||||
module.exports = createReadableStreamAsyncIterator;
|
||||
@@ -1,183 +1,157 @@
|
||||
'use strict';
|
||||
'use strict'
|
||||
|
||||
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
||||
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
||||
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } }
|
||||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
|
||||
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
|
||||
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
|
||||
var _require = require('buffer'),
|
||||
Buffer = _require.Buffer;
|
||||
var _require2 = require('util'),
|
||||
inspect = _require2.inspect;
|
||||
var custom = inspect && inspect.custom || 'inspect';
|
||||
function copyBuffer(src, target, offset) {
|
||||
Buffer.prototype.copy.call(src, target, offset);
|
||||
}
|
||||
module.exports = /*#__PURE__*/function () {
|
||||
function BufferList() {
|
||||
_classCallCheck(this, BufferList);
|
||||
this.head = null;
|
||||
this.tail = null;
|
||||
this.length = 0;
|
||||
const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = require('../../ours/primordials')
|
||||
const { Buffer } = require('buffer')
|
||||
const { inspect } = require('../../ours/util')
|
||||
module.exports = class BufferList {
|
||||
constructor() {
|
||||
this.head = null
|
||||
this.tail = null
|
||||
this.length = 0
|
||||
}
|
||||
_createClass(BufferList, [{
|
||||
key: "push",
|
||||
value: function push(v) {
|
||||
var entry = {
|
||||
data: v,
|
||||
next: null
|
||||
};
|
||||
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
||||
this.tail = entry;
|
||||
++this.length;
|
||||
push(v) {
|
||||
const entry = {
|
||||
data: v,
|
||||
next: null
|
||||
}
|
||||
}, {
|
||||
key: "unshift",
|
||||
value: function unshift(v) {
|
||||
var entry = {
|
||||
data: v,
|
||||
next: this.head
|
||||
};
|
||||
if (this.length === 0) this.tail = entry;
|
||||
this.head = entry;
|
||||
++this.length;
|
||||
if (this.length > 0) this.tail.next = entry
|
||||
else this.head = entry
|
||||
this.tail = entry
|
||||
++this.length
|
||||
}
|
||||
unshift(v) {
|
||||
const entry = {
|
||||
data: v,
|
||||
next: this.head
|
||||
}
|
||||
}, {
|
||||
key: "shift",
|
||||
value: function shift() {
|
||||
if (this.length === 0) return;
|
||||
var ret = this.head.data;
|
||||
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
||||
--this.length;
|
||||
return ret;
|
||||
}
|
||||
}, {
|
||||
key: "clear",
|
||||
value: function clear() {
|
||||
this.head = this.tail = null;
|
||||
this.length = 0;
|
||||
}
|
||||
}, {
|
||||
key: "join",
|
||||
value: function join(s) {
|
||||
if (this.length === 0) return '';
|
||||
var p = this.head;
|
||||
var ret = '' + p.data;
|
||||
while (p = p.next) ret += s + p.data;
|
||||
return ret;
|
||||
}
|
||||
}, {
|
||||
key: "concat",
|
||||
value: function concat(n) {
|
||||
if (this.length === 0) return Buffer.alloc(0);
|
||||
var ret = Buffer.allocUnsafe(n >>> 0);
|
||||
var p = this.head;
|
||||
var i = 0;
|
||||
while (p) {
|
||||
copyBuffer(p.data, ret, i);
|
||||
i += p.data.length;
|
||||
p = p.next;
|
||||
}
|
||||
return ret;
|
||||
if (this.length === 0) this.tail = entry
|
||||
this.head = entry
|
||||
++this.length
|
||||
}
|
||||
shift() {
|
||||
if (this.length === 0) return
|
||||
const ret = this.head.data
|
||||
if (this.length === 1) this.head = this.tail = null
|
||||
else this.head = this.head.next
|
||||
--this.length
|
||||
return ret
|
||||
}
|
||||
clear() {
|
||||
this.head = this.tail = null
|
||||
this.length = 0
|
||||
}
|
||||
join(s) {
|
||||
if (this.length === 0) return ''
|
||||
let p = this.head
|
||||
let ret = '' + p.data
|
||||
while ((p = p.next) !== null) ret += s + p.data
|
||||
return ret
|
||||
}
|
||||
concat(n) {
|
||||
if (this.length === 0) return Buffer.alloc(0)
|
||||
const ret = Buffer.allocUnsafe(n >>> 0)
|
||||
let p = this.head
|
||||
let i = 0
|
||||
while (p) {
|
||||
TypedArrayPrototypeSet(ret, p.data, i)
|
||||
i += p.data.length
|
||||
p = p.next
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// Consumes a specified amount of bytes or characters from the buffered data.
|
||||
}, {
|
||||
key: "consume",
|
||||
value: function consume(n, hasStrings) {
|
||||
var ret;
|
||||
if (n < this.head.data.length) {
|
||||
// `slice` is the same for buffers and strings.
|
||||
ret = this.head.data.slice(0, n);
|
||||
this.head.data = this.head.data.slice(n);
|
||||
} else if (n === this.head.data.length) {
|
||||
// First chunk is a perfect match.
|
||||
ret = this.shift();
|
||||
// Consumes a specified amount of bytes or characters from the buffered data.
|
||||
consume(n, hasStrings) {
|
||||
const data = this.head.data
|
||||
if (n < data.length) {
|
||||
// `slice` is the same for buffers and strings.
|
||||
const slice = data.slice(0, n)
|
||||
this.head.data = data.slice(n)
|
||||
return slice
|
||||
}
|
||||
if (n === data.length) {
|
||||
// First chunk is a perfect match.
|
||||
return this.shift()
|
||||
}
|
||||
// Result spans more than one buffer.
|
||||
return hasStrings ? this._getString(n) : this._getBuffer(n)
|
||||
}
|
||||
first() {
|
||||
return this.head.data
|
||||
}
|
||||
*[SymbolIterator]() {
|
||||
for (let p = this.head; p; p = p.next) {
|
||||
yield p.data
|
||||
}
|
||||
}
|
||||
|
||||
// Consumes a specified amount of characters from the buffered data.
|
||||
_getString(n) {
|
||||
let ret = ''
|
||||
let p = this.head
|
||||
let c = 0
|
||||
do {
|
||||
const str = p.data
|
||||
if (n > str.length) {
|
||||
ret += str
|
||||
n -= str.length
|
||||
} else {
|
||||
// Result spans more than one buffer.
|
||||
ret = hasStrings ? this._getString(n) : this._getBuffer(n);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}, {
|
||||
key: "first",
|
||||
value: function first() {
|
||||
return this.head.data;
|
||||
}
|
||||
|
||||
// Consumes a specified amount of characters from the buffered data.
|
||||
}, {
|
||||
key: "_getString",
|
||||
value: function _getString(n) {
|
||||
var p = this.head;
|
||||
var c = 1;
|
||||
var ret = p.data;
|
||||
n -= ret.length;
|
||||
while (p = p.next) {
|
||||
var str = p.data;
|
||||
var nb = n > str.length ? str.length : n;
|
||||
if (nb === str.length) ret += str;else ret += str.slice(0, n);
|
||||
n -= nb;
|
||||
if (n === 0) {
|
||||
if (nb === str.length) {
|
||||
++c;
|
||||
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
||||
} else {
|
||||
this.head = p;
|
||||
p.data = str.slice(nb);
|
||||
}
|
||||
break;
|
||||
if (n === str.length) {
|
||||
ret += str
|
||||
++c
|
||||
if (p.next) this.head = p.next
|
||||
else this.head = this.tail = null
|
||||
} else {
|
||||
ret += StringPrototypeSlice(str, 0, n)
|
||||
this.head = p
|
||||
p.data = StringPrototypeSlice(str, n)
|
||||
}
|
||||
++c;
|
||||
break
|
||||
}
|
||||
this.length -= c;
|
||||
return ret;
|
||||
}
|
||||
++c
|
||||
} while ((p = p.next) !== null)
|
||||
this.length -= c
|
||||
return ret
|
||||
}
|
||||
|
||||
// Consumes a specified amount of bytes from the buffered data.
|
||||
}, {
|
||||
key: "_getBuffer",
|
||||
value: function _getBuffer(n) {
|
||||
var ret = Buffer.allocUnsafe(n);
|
||||
var p = this.head;
|
||||
var c = 1;
|
||||
p.data.copy(ret);
|
||||
n -= p.data.length;
|
||||
while (p = p.next) {
|
||||
var buf = p.data;
|
||||
var nb = n > buf.length ? buf.length : n;
|
||||
buf.copy(ret, ret.length - n, 0, nb);
|
||||
n -= nb;
|
||||
if (n === 0) {
|
||||
if (nb === buf.length) {
|
||||
++c;
|
||||
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
||||
} else {
|
||||
this.head = p;
|
||||
p.data = buf.slice(nb);
|
||||
}
|
||||
break;
|
||||
// Consumes a specified amount of bytes from the buffered data.
|
||||
_getBuffer(n) {
|
||||
const ret = Buffer.allocUnsafe(n)
|
||||
const retLen = n
|
||||
let p = this.head
|
||||
let c = 0
|
||||
do {
|
||||
const buf = p.data
|
||||
if (n > buf.length) {
|
||||
TypedArrayPrototypeSet(ret, buf, retLen - n)
|
||||
n -= buf.length
|
||||
} else {
|
||||
if (n === buf.length) {
|
||||
TypedArrayPrototypeSet(ret, buf, retLen - n)
|
||||
++c
|
||||
if (p.next) this.head = p.next
|
||||
else this.head = this.tail = null
|
||||
} else {
|
||||
TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n)
|
||||
this.head = p
|
||||
p.data = buf.slice(n)
|
||||
}
|
||||
++c;
|
||||
break
|
||||
}
|
||||
this.length -= c;
|
||||
return ret;
|
||||
}
|
||||
++c
|
||||
} while ((p = p.next) !== null)
|
||||
this.length -= c
|
||||
return ret
|
||||
}
|
||||
|
||||
// Make sure the linked list only shows the minimal necessary information.
|
||||
}, {
|
||||
key: custom,
|
||||
value: function value(_, options) {
|
||||
return inspect(this, _objectSpread(_objectSpread({}, options), {}, {
|
||||
// Only inspect one level.
|
||||
depth: 0,
|
||||
// It should not recurse.
|
||||
customInspect: false
|
||||
}));
|
||||
}
|
||||
}]);
|
||||
return BufferList;
|
||||
}();
|
||||
// Make sure the linked list only shows the minimal necessary information.
|
||||
[Symbol.for('nodejs.util.inspect.custom')](_, options) {
|
||||
return inspect(this, {
|
||||
...options,
|
||||
// Only inspect one level.
|
||||
depth: 0,
|
||||
// It should not recurse.
|
||||
customInspect: false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
194
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/compose.js
generated
vendored
Normal file
194
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/compose.js
generated
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
'use strict'
|
||||
|
||||
const { pipeline } = require('./pipeline')
|
||||
const Duplex = require('./duplex')
|
||||
const { destroyer } = require('./destroy')
|
||||
const {
|
||||
isNodeStream,
|
||||
isReadable,
|
||||
isWritable,
|
||||
isWebStream,
|
||||
isTransformStream,
|
||||
isWritableStream,
|
||||
isReadableStream
|
||||
} = require('./utils')
|
||||
const {
|
||||
AbortError,
|
||||
codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }
|
||||
} = require('../../ours/errors')
|
||||
const eos = require('./end-of-stream')
|
||||
module.exports = function compose(...streams) {
|
||||
if (streams.length === 0) {
|
||||
throw new ERR_MISSING_ARGS('streams')
|
||||
}
|
||||
if (streams.length === 1) {
|
||||
return Duplex.from(streams[0])
|
||||
}
|
||||
const orgStreams = [...streams]
|
||||
if (typeof streams[0] === 'function') {
|
||||
streams[0] = Duplex.from(streams[0])
|
||||
}
|
||||
if (typeof streams[streams.length - 1] === 'function') {
|
||||
const idx = streams.length - 1
|
||||
streams[idx] = Duplex.from(streams[idx])
|
||||
}
|
||||
for (let n = 0; n < streams.length; ++n) {
|
||||
if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) {
|
||||
// TODO(ronag): Add checks for non streams.
|
||||
continue
|
||||
}
|
||||
if (
|
||||
n < streams.length - 1 &&
|
||||
!(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n]))
|
||||
) {
|
||||
throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable')
|
||||
}
|
||||
if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) {
|
||||
throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable')
|
||||
}
|
||||
}
|
||||
let ondrain
|
||||
let onfinish
|
||||
let onreadable
|
||||
let onclose
|
||||
let d
|
||||
function onfinished(err) {
|
||||
const cb = onclose
|
||||
onclose = null
|
||||
if (cb) {
|
||||
cb(err)
|
||||
} else if (err) {
|
||||
d.destroy(err)
|
||||
} else if (!readable && !writable) {
|
||||
d.destroy()
|
||||
}
|
||||
}
|
||||
const head = streams[0]
|
||||
const tail = pipeline(streams, onfinished)
|
||||
const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head))
|
||||
const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail))
|
||||
|
||||
// TODO(ronag): Avoid double buffering.
|
||||
// Implement Writable/Readable/Duplex traits.
|
||||
// See, https://github.com/nodejs/node/pull/33515.
|
||||
d = new Duplex({
|
||||
// TODO (ronag): highWaterMark?
|
||||
writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode),
|
||||
readableObjectMode: !!(tail !== null && tail !== undefined && tail.readableObjectMode),
|
||||
writable,
|
||||
readable
|
||||
})
|
||||
if (writable) {
|
||||
if (isNodeStream(head)) {
|
||||
d._write = function (chunk, encoding, callback) {
|
||||
if (head.write(chunk, encoding)) {
|
||||
callback()
|
||||
} else {
|
||||
ondrain = callback
|
||||
}
|
||||
}
|
||||
d._final = function (callback) {
|
||||
head.end()
|
||||
onfinish = callback
|
||||
}
|
||||
head.on('drain', function () {
|
||||
if (ondrain) {
|
||||
const cb = ondrain
|
||||
ondrain = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
} else if (isWebStream(head)) {
|
||||
const writable = isTransformStream(head) ? head.writable : head
|
||||
const writer = writable.getWriter()
|
||||
d._write = async function (chunk, encoding, callback) {
|
||||
try {
|
||||
await writer.ready
|
||||
writer.write(chunk).catch(() => {})
|
||||
callback()
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
d._final = async function (callback) {
|
||||
try {
|
||||
await writer.ready
|
||||
writer.close().catch(() => {})
|
||||
onfinish = callback
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
const toRead = isTransformStream(tail) ? tail.readable : tail
|
||||
eos(toRead, () => {
|
||||
if (onfinish) {
|
||||
const cb = onfinish
|
||||
onfinish = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
if (readable) {
|
||||
if (isNodeStream(tail)) {
|
||||
tail.on('readable', function () {
|
||||
if (onreadable) {
|
||||
const cb = onreadable
|
||||
onreadable = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
tail.on('end', function () {
|
||||
d.push(null)
|
||||
})
|
||||
d._read = function () {
|
||||
while (true) {
|
||||
const buf = tail.read()
|
||||
if (buf === null) {
|
||||
onreadable = d._read
|
||||
return
|
||||
}
|
||||
if (!d.push(buf)) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (isWebStream(tail)) {
|
||||
const readable = isTransformStream(tail) ? tail.readable : tail
|
||||
const reader = readable.getReader()
|
||||
d._read = async function () {
|
||||
while (true) {
|
||||
try {
|
||||
const { value, done } = await reader.read()
|
||||
if (!d.push(value)) {
|
||||
return
|
||||
}
|
||||
if (done) {
|
||||
d.push(null)
|
||||
return
|
||||
}
|
||||
} catch {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
d._destroy = function (err, callback) {
|
||||
if (!err && onclose !== null) {
|
||||
err = new AbortError()
|
||||
}
|
||||
onreadable = null
|
||||
ondrain = null
|
||||
onfinish = null
|
||||
if (onclose === null) {
|
||||
callback(err)
|
||||
} else {
|
||||
onclose = callback
|
||||
if (isNodeStream(tail)) {
|
||||
destroyer(tail, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
||||
@@ -1,96 +1,290 @@
|
||||
'use strict';
|
||||
'use strict'
|
||||
|
||||
// undocumented cb() API, needed for core, not for public API
|
||||
function destroy(err, cb) {
|
||||
var _this = this;
|
||||
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
||||
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
||||
if (readableDestroyed || writableDestroyed) {
|
||||
if (cb) {
|
||||
cb(err);
|
||||
} else if (err) {
|
||||
if (!this._writableState) {
|
||||
process.nextTick(emitErrorNT, this, err);
|
||||
} else if (!this._writableState.errorEmitted) {
|
||||
this._writableState.errorEmitted = true;
|
||||
process.nextTick(emitErrorNT, this, err);
|
||||
}
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const {
|
||||
aggregateTwoErrors,
|
||||
codes: { ERR_MULTIPLE_CALLBACK },
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { Symbol } = require('../../ours/primordials')
|
||||
const { kIsDestroyed, isDestroyed, isFinished, isServerRequest } = require('./utils')
|
||||
const kDestroy = Symbol('kDestroy')
|
||||
const kConstruct = Symbol('kConstruct')
|
||||
function checkError(err, w, r) {
|
||||
if (err) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
err.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (w && !w.errored) {
|
||||
w.errored = err
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
// we set destroyed to true before firing error callbacks in order
|
||||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||||
|
||||
if (this._readableState) {
|
||||
this._readableState.destroyed = true;
|
||||
}
|
||||
|
||||
// if this is a duplex stream mark the writable part as destroyed as well
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = true;
|
||||
}
|
||||
this._destroy(err || null, function (err) {
|
||||
if (!cb && err) {
|
||||
if (!_this._writableState) {
|
||||
process.nextTick(emitErrorAndCloseNT, _this, err);
|
||||
} else if (!_this._writableState.errorEmitted) {
|
||||
_this._writableState.errorEmitted = true;
|
||||
process.nextTick(emitErrorAndCloseNT, _this, err);
|
||||
} else {
|
||||
process.nextTick(emitCloseNT, _this);
|
||||
}
|
||||
} else if (cb) {
|
||||
process.nextTick(emitCloseNT, _this);
|
||||
cb(err);
|
||||
} else {
|
||||
process.nextTick(emitCloseNT, _this);
|
||||
if (r && !r.errored) {
|
||||
r.errored = err
|
||||
}
|
||||
});
|
||||
return this;
|
||||
}
|
||||
}
|
||||
function emitErrorAndCloseNT(self, err) {
|
||||
emitErrorNT(self, err);
|
||||
emitCloseNT(self);
|
||||
|
||||
// Backwards compat. cb() is undocumented and unused in core but
|
||||
// unfortunately might be used by modules.
|
||||
function destroy(err, cb) {
|
||||
const r = this._readableState
|
||||
const w = this._writableState
|
||||
// With duplex streams we use the writable side for state.
|
||||
const s = w || r
|
||||
if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {
|
||||
if (typeof cb === 'function') {
|
||||
cb()
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
// We set destroyed to true before firing error callbacks in order
|
||||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||||
checkError(err, w, r)
|
||||
if (w) {
|
||||
w.destroyed = true
|
||||
}
|
||||
if (r) {
|
||||
r.destroyed = true
|
||||
}
|
||||
|
||||
// If still constructing then defer calling _destroy.
|
||||
if (!s.constructed) {
|
||||
this.once(kDestroy, function (er) {
|
||||
_destroy(this, aggregateTwoErrors(er, err), cb)
|
||||
})
|
||||
} else {
|
||||
_destroy(this, err, cb)
|
||||
}
|
||||
return this
|
||||
}
|
||||
function _destroy(self, err, cb) {
|
||||
let called = false
|
||||
function onDestroy(err) {
|
||||
if (called) {
|
||||
return
|
||||
}
|
||||
called = true
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
checkError(err, w, r)
|
||||
if (w) {
|
||||
w.closed = true
|
||||
}
|
||||
if (r) {
|
||||
r.closed = true
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb(err)
|
||||
}
|
||||
if (err) {
|
||||
process.nextTick(emitErrorCloseNT, self, err)
|
||||
} else {
|
||||
process.nextTick(emitCloseNT, self)
|
||||
}
|
||||
}
|
||||
try {
|
||||
self._destroy(err || null, onDestroy)
|
||||
} catch (err) {
|
||||
onDestroy(err)
|
||||
}
|
||||
}
|
||||
function emitErrorCloseNT(self, err) {
|
||||
emitErrorNT(self, err)
|
||||
emitCloseNT(self)
|
||||
}
|
||||
function emitCloseNT(self) {
|
||||
if (self._writableState && !self._writableState.emitClose) return;
|
||||
if (self._readableState && !self._readableState.emitClose) return;
|
||||
self.emit('close');
|
||||
}
|
||||
function undestroy() {
|
||||
if (this._readableState) {
|
||||
this._readableState.destroyed = false;
|
||||
this._readableState.reading = false;
|
||||
this._readableState.ended = false;
|
||||
this._readableState.endEmitted = false;
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
if (w) {
|
||||
w.closeEmitted = true
|
||||
}
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = false;
|
||||
this._writableState.ended = false;
|
||||
this._writableState.ending = false;
|
||||
this._writableState.finalCalled = false;
|
||||
this._writableState.prefinished = false;
|
||||
this._writableState.finished = false;
|
||||
this._writableState.errorEmitted = false;
|
||||
if (r) {
|
||||
r.closeEmitted = true
|
||||
}
|
||||
if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) {
|
||||
self.emit('close')
|
||||
}
|
||||
}
|
||||
function emitErrorNT(self, err) {
|
||||
self.emit('error', err);
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) {
|
||||
return
|
||||
}
|
||||
if (w) {
|
||||
w.errorEmitted = true
|
||||
}
|
||||
if (r) {
|
||||
r.errorEmitted = true
|
||||
}
|
||||
self.emit('error', err)
|
||||
}
|
||||
function errorOrDestroy(stream, err) {
|
||||
function undestroy() {
|
||||
const r = this._readableState
|
||||
const w = this._writableState
|
||||
if (r) {
|
||||
r.constructed = true
|
||||
r.closed = false
|
||||
r.closeEmitted = false
|
||||
r.destroyed = false
|
||||
r.errored = null
|
||||
r.errorEmitted = false
|
||||
r.reading = false
|
||||
r.ended = r.readable === false
|
||||
r.endEmitted = r.readable === false
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = true
|
||||
w.destroyed = false
|
||||
w.closed = false
|
||||
w.closeEmitted = false
|
||||
w.errored = null
|
||||
w.errorEmitted = false
|
||||
w.finalCalled = false
|
||||
w.prefinished = false
|
||||
w.ended = w.writable === false
|
||||
w.ending = w.writable === false
|
||||
w.finished = w.writable === false
|
||||
}
|
||||
}
|
||||
function errorOrDestroy(stream, err, sync) {
|
||||
// We have tests that rely on errors being emitted
|
||||
// in the same tick, so changing this is semver major.
|
||||
// For now when you opt-in to autoDestroy we allow
|
||||
// the error to be emitted nextTick. In a future
|
||||
// semver major update we should change the default to this.
|
||||
|
||||
var rState = stream._readableState;
|
||||
var wState = stream._writableState;
|
||||
if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {
|
||||
return this
|
||||
}
|
||||
if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy))
|
||||
stream.destroy(err)
|
||||
else if (err) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
err.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (w && !w.errored) {
|
||||
w.errored = err
|
||||
}
|
||||
if (r && !r.errored) {
|
||||
r.errored = err
|
||||
}
|
||||
if (sync) {
|
||||
process.nextTick(emitErrorNT, stream, err)
|
||||
} else {
|
||||
emitErrorNT(stream, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function construct(stream, cb) {
|
||||
if (typeof stream._construct !== 'function') {
|
||||
return
|
||||
}
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
if (r) {
|
||||
r.constructed = false
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = false
|
||||
}
|
||||
stream.once(kConstruct, cb)
|
||||
if (stream.listenerCount(kConstruct) > 1) {
|
||||
// Duplex
|
||||
return
|
||||
}
|
||||
process.nextTick(constructNT, stream)
|
||||
}
|
||||
function constructNT(stream) {
|
||||
let called = false
|
||||
function onConstruct(err) {
|
||||
if (called) {
|
||||
errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
called = true
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
const s = w || r
|
||||
if (r) {
|
||||
r.constructed = true
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = true
|
||||
}
|
||||
if (s.destroyed) {
|
||||
stream.emit(kDestroy, err)
|
||||
} else if (err) {
|
||||
errorOrDestroy(stream, err, true)
|
||||
} else {
|
||||
process.nextTick(emitConstructNT, stream)
|
||||
}
|
||||
}
|
||||
try {
|
||||
stream._construct((err) => {
|
||||
process.nextTick(onConstruct, err)
|
||||
})
|
||||
} catch (err) {
|
||||
process.nextTick(onConstruct, err)
|
||||
}
|
||||
}
|
||||
function emitConstructNT(stream) {
|
||||
stream.emit(kConstruct)
|
||||
}
|
||||
function isRequest(stream) {
|
||||
return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function'
|
||||
}
|
||||
function emitCloseLegacy(stream) {
|
||||
stream.emit('close')
|
||||
}
|
||||
function emitErrorCloseLegacy(stream, err) {
|
||||
stream.emit('error', err)
|
||||
process.nextTick(emitCloseLegacy, stream)
|
||||
}
|
||||
|
||||
// Normalize destroy for legacy.
|
||||
function destroyer(stream, err) {
|
||||
if (!stream || isDestroyed(stream)) {
|
||||
return
|
||||
}
|
||||
if (!err && !isFinished(stream)) {
|
||||
err = new AbortError()
|
||||
}
|
||||
|
||||
// TODO: Remove isRequest branches.
|
||||
if (isServerRequest(stream)) {
|
||||
stream.socket = null
|
||||
stream.destroy(err)
|
||||
} else if (isRequest(stream)) {
|
||||
stream.abort()
|
||||
} else if (isRequest(stream.req)) {
|
||||
stream.req.abort()
|
||||
} else if (typeof stream.destroy === 'function') {
|
||||
stream.destroy(err)
|
||||
} else if (typeof stream.close === 'function') {
|
||||
// TODO: Don't lose err?
|
||||
stream.close()
|
||||
} else if (err) {
|
||||
process.nextTick(emitErrorCloseLegacy, stream, err)
|
||||
} else {
|
||||
process.nextTick(emitCloseLegacy, stream)
|
||||
}
|
||||
if (!stream.destroyed) {
|
||||
stream[kIsDestroyed] = true
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
destroy: destroy,
|
||||
undestroy: undestroy,
|
||||
errorOrDestroy: errorOrDestroy
|
||||
};
|
||||
construct,
|
||||
destroyer,
|
||||
destroy,
|
||||
undestroy,
|
||||
errorOrDestroy
|
||||
}
|
||||
|
||||
143
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/duplex.js
generated
vendored
Normal file
143
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/duplex.js
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a duplex stream is just a stream that is both readable and writable.
|
||||
// Since JS doesn't have multiple prototype inheritance, this class
|
||||
// prototypically inherits from Readable, and then parasitically from
|
||||
// Writable.
|
||||
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
ObjectDefineProperties,
|
||||
ObjectGetOwnPropertyDescriptor,
|
||||
ObjectKeys,
|
||||
ObjectSetPrototypeOf
|
||||
} = require('../../ours/primordials')
|
||||
module.exports = Duplex
|
||||
const Readable = require('./readable')
|
||||
const Writable = require('./writable')
|
||||
ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype)
|
||||
ObjectSetPrototypeOf(Duplex, Readable)
|
||||
{
|
||||
const keys = ObjectKeys(Writable.prototype)
|
||||
// Allow the keys array to be GC'ed.
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const method = keys[i]
|
||||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]
|
||||
}
|
||||
}
|
||||
function Duplex(options) {
|
||||
if (!(this instanceof Duplex)) return new Duplex(options)
|
||||
Readable.call(this, options)
|
||||
Writable.call(this, options)
|
||||
if (options) {
|
||||
this.allowHalfOpen = options.allowHalfOpen !== false
|
||||
if (options.readable === false) {
|
||||
this._readableState.readable = false
|
||||
this._readableState.ended = true
|
||||
this._readableState.endEmitted = true
|
||||
}
|
||||
if (options.writable === false) {
|
||||
this._writableState.writable = false
|
||||
this._writableState.ending = true
|
||||
this._writableState.ended = true
|
||||
this._writableState.finished = true
|
||||
}
|
||||
} else {
|
||||
this.allowHalfOpen = true
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(Duplex.prototype, {
|
||||
writable: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable')
|
||||
},
|
||||
writableHighWaterMark: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark')
|
||||
},
|
||||
writableObjectMode: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode')
|
||||
},
|
||||
writableBuffer: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer')
|
||||
},
|
||||
writableLength: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength')
|
||||
},
|
||||
writableFinished: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished')
|
||||
},
|
||||
writableCorked: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked')
|
||||
},
|
||||
writableEnded: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded')
|
||||
},
|
||||
writableNeedDrain: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain')
|
||||
},
|
||||
destroyed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return false
|
||||
}
|
||||
return this._readableState.destroyed && this._writableState.destroyed
|
||||
},
|
||||
set(value) {
|
||||
// Backward compatibility, the user is explicitly
|
||||
// managing destroyed.
|
||||
if (this._readableState && this._writableState) {
|
||||
this._readableState.destroyed = value
|
||||
this._writableState.destroyed = value
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
let webStreamsAdapters
|
||||
|
||||
// Lazy to avoid circular references
|
||||
function lazyWebStreams() {
|
||||
if (webStreamsAdapters === undefined) webStreamsAdapters = {}
|
||||
return webStreamsAdapters
|
||||
}
|
||||
Duplex.fromWeb = function (pair, options) {
|
||||
return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options)
|
||||
}
|
||||
Duplex.toWeb = function (duplex) {
|
||||
return lazyWebStreams().newReadableWritablePairFromDuplex(duplex)
|
||||
}
|
||||
let duplexify
|
||||
Duplex.from = function (body) {
|
||||
if (!duplexify) {
|
||||
duplexify = require('./duplexify')
|
||||
}
|
||||
return duplexify(body, 'body')
|
||||
}
|
||||
378
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/duplexify.js
generated
vendored
Normal file
378
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/duplexify.js
generated
vendored
Normal file
@@ -0,0 +1,378 @@
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
;('use strict')
|
||||
const bufferModule = require('buffer')
|
||||
const {
|
||||
isReadable,
|
||||
isWritable,
|
||||
isIterable,
|
||||
isNodeStream,
|
||||
isReadableNodeStream,
|
||||
isWritableNodeStream,
|
||||
isDuplexNodeStream,
|
||||
isReadableStream,
|
||||
isWritableStream
|
||||
} = require('./utils')
|
||||
const eos = require('./end-of-stream')
|
||||
const {
|
||||
AbortError,
|
||||
codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE }
|
||||
} = require('../../ours/errors')
|
||||
const { destroyer } = require('./destroy')
|
||||
const Duplex = require('./duplex')
|
||||
const Readable = require('./readable')
|
||||
const Writable = require('./writable')
|
||||
const { createDeferredPromise } = require('../../ours/util')
|
||||
const from = require('./from')
|
||||
const Blob = globalThis.Blob || bufferModule.Blob
|
||||
const isBlob =
|
||||
typeof Blob !== 'undefined'
|
||||
? function isBlob(b) {
|
||||
return b instanceof Blob
|
||||
}
|
||||
: function isBlob(b) {
|
||||
return false
|
||||
}
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const { FunctionPrototypeCall } = require('../../ours/primordials')
|
||||
|
||||
// This is needed for pre node 17.
|
||||
class Duplexify extends Duplex {
|
||||
constructor(options) {
|
||||
super(options)
|
||||
|
||||
// https://github.com/nodejs/node/pull/34385
|
||||
|
||||
if ((options === null || options === undefined ? undefined : options.readable) === false) {
|
||||
this._readableState.readable = false
|
||||
this._readableState.ended = true
|
||||
this._readableState.endEmitted = true
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.writable) === false) {
|
||||
this._writableState.writable = false
|
||||
this._writableState.ending = true
|
||||
this._writableState.ended = true
|
||||
this._writableState.finished = true
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = function duplexify(body, name) {
|
||||
if (isDuplexNodeStream(body)) {
|
||||
return body
|
||||
}
|
||||
if (isReadableNodeStream(body)) {
|
||||
return _duplexify({
|
||||
readable: body
|
||||
})
|
||||
}
|
||||
if (isWritableNodeStream(body)) {
|
||||
return _duplexify({
|
||||
writable: body
|
||||
})
|
||||
}
|
||||
if (isNodeStream(body)) {
|
||||
return _duplexify({
|
||||
writable: false,
|
||||
readable: false
|
||||
})
|
||||
}
|
||||
if (isReadableStream(body)) {
|
||||
return _duplexify({
|
||||
readable: Readable.fromWeb(body)
|
||||
})
|
||||
}
|
||||
if (isWritableStream(body)) {
|
||||
return _duplexify({
|
||||
writable: Writable.fromWeb(body)
|
||||
})
|
||||
}
|
||||
if (typeof body === 'function') {
|
||||
const { value, write, final, destroy } = fromAsyncGen(body)
|
||||
if (isIterable(value)) {
|
||||
return from(Duplexify, value, {
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
write,
|
||||
final,
|
||||
destroy
|
||||
})
|
||||
}
|
||||
const then = value === null || value === undefined ? undefined : value.then
|
||||
if (typeof then === 'function') {
|
||||
let d
|
||||
const promise = FunctionPrototypeCall(
|
||||
then,
|
||||
value,
|
||||
(val) => {
|
||||
if (val != null) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val)
|
||||
}
|
||||
},
|
||||
(err) => {
|
||||
destroyer(d, err)
|
||||
}
|
||||
)
|
||||
return (d = new Duplexify({
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
readable: false,
|
||||
write,
|
||||
final(cb) {
|
||||
final(async () => {
|
||||
try {
|
||||
await promise
|
||||
process.nextTick(cb, null)
|
||||
} catch (err) {
|
||||
process.nextTick(cb, err)
|
||||
}
|
||||
})
|
||||
},
|
||||
destroy
|
||||
}))
|
||||
}
|
||||
throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value)
|
||||
}
|
||||
if (isBlob(body)) {
|
||||
return duplexify(body.arrayBuffer())
|
||||
}
|
||||
if (isIterable(body)) {
|
||||
return from(Duplexify, body, {
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
writable: false
|
||||
})
|
||||
}
|
||||
if (
|
||||
isReadableStream(body === null || body === undefined ? undefined : body.readable) &&
|
||||
isWritableStream(body === null || body === undefined ? undefined : body.writable)
|
||||
) {
|
||||
return Duplexify.fromWeb(body)
|
||||
}
|
||||
if (
|
||||
typeof (body === null || body === undefined ? undefined : body.writable) === 'object' ||
|
||||
typeof (body === null || body === undefined ? undefined : body.readable) === 'object'
|
||||
) {
|
||||
const readable =
|
||||
body !== null && body !== undefined && body.readable
|
||||
? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable)
|
||||
? body === null || body === undefined
|
||||
? undefined
|
||||
: body.readable
|
||||
: duplexify(body.readable)
|
||||
: undefined
|
||||
const writable =
|
||||
body !== null && body !== undefined && body.writable
|
||||
? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable)
|
||||
? body === null || body === undefined
|
||||
? undefined
|
||||
: body.writable
|
||||
: duplexify(body.writable)
|
||||
: undefined
|
||||
return _duplexify({
|
||||
readable,
|
||||
writable
|
||||
})
|
||||
}
|
||||
const then = body === null || body === undefined ? undefined : body.then
|
||||
if (typeof then === 'function') {
|
||||
let d
|
||||
FunctionPrototypeCall(
|
||||
then,
|
||||
body,
|
||||
(val) => {
|
||||
if (val != null) {
|
||||
d.push(val)
|
||||
}
|
||||
d.push(null)
|
||||
},
|
||||
(err) => {
|
||||
destroyer(d, err)
|
||||
}
|
||||
)
|
||||
return (d = new Duplexify({
|
||||
objectMode: true,
|
||||
writable: false,
|
||||
read() {}
|
||||
}))
|
||||
}
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
name,
|
||||
[
|
||||
'Blob',
|
||||
'ReadableStream',
|
||||
'WritableStream',
|
||||
'Stream',
|
||||
'Iterable',
|
||||
'AsyncIterable',
|
||||
'Function',
|
||||
'{ readable, writable } pair',
|
||||
'Promise'
|
||||
],
|
||||
body
|
||||
)
|
||||
}
|
||||
function fromAsyncGen(fn) {
|
||||
let { promise, resolve } = createDeferredPromise()
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
const value = fn(
|
||||
(async function* () {
|
||||
while (true) {
|
||||
const _promise = promise
|
||||
promise = null
|
||||
const { chunk, done, cb } = await _promise
|
||||
process.nextTick(cb)
|
||||
if (done) return
|
||||
if (signal.aborted)
|
||||
throw new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
;({ promise, resolve } = createDeferredPromise())
|
||||
yield chunk
|
||||
}
|
||||
})(),
|
||||
{
|
||||
signal
|
||||
}
|
||||
)
|
||||
return {
|
||||
value,
|
||||
write(chunk, encoding, cb) {
|
||||
const _resolve = resolve
|
||||
resolve = null
|
||||
_resolve({
|
||||
chunk,
|
||||
done: false,
|
||||
cb
|
||||
})
|
||||
},
|
||||
final(cb) {
|
||||
const _resolve = resolve
|
||||
resolve = null
|
||||
_resolve({
|
||||
done: true,
|
||||
cb
|
||||
})
|
||||
},
|
||||
destroy(err, cb) {
|
||||
ac.abort()
|
||||
cb(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function _duplexify(pair) {
|
||||
const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable
|
||||
const w = pair.writable
|
||||
let readable = !!isReadable(r)
|
||||
let writable = !!isWritable(w)
|
||||
let ondrain
|
||||
let onfinish
|
||||
let onreadable
|
||||
let onclose
|
||||
let d
|
||||
function onfinished(err) {
|
||||
const cb = onclose
|
||||
onclose = null
|
||||
if (cb) {
|
||||
cb(err)
|
||||
} else if (err) {
|
||||
d.destroy(err)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(ronag): Avoid double buffering.
|
||||
// Implement Writable/Readable/Duplex traits.
|
||||
// See, https://github.com/nodejs/node/pull/33515.
|
||||
d = new Duplexify({
|
||||
// TODO (ronag): highWaterMark?
|
||||
readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode),
|
||||
writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode),
|
||||
readable,
|
||||
writable
|
||||
})
|
||||
if (writable) {
|
||||
eos(w, (err) => {
|
||||
writable = false
|
||||
if (err) {
|
||||
destroyer(r, err)
|
||||
}
|
||||
onfinished(err)
|
||||
})
|
||||
d._write = function (chunk, encoding, callback) {
|
||||
if (w.write(chunk, encoding)) {
|
||||
callback()
|
||||
} else {
|
||||
ondrain = callback
|
||||
}
|
||||
}
|
||||
d._final = function (callback) {
|
||||
w.end()
|
||||
onfinish = callback
|
||||
}
|
||||
w.on('drain', function () {
|
||||
if (ondrain) {
|
||||
const cb = ondrain
|
||||
ondrain = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
w.on('finish', function () {
|
||||
if (onfinish) {
|
||||
const cb = onfinish
|
||||
onfinish = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
if (readable) {
|
||||
eos(r, (err) => {
|
||||
readable = false
|
||||
if (err) {
|
||||
destroyer(r, err)
|
||||
}
|
||||
onfinished(err)
|
||||
})
|
||||
r.on('readable', function () {
|
||||
if (onreadable) {
|
||||
const cb = onreadable
|
||||
onreadable = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
r.on('end', function () {
|
||||
d.push(null)
|
||||
})
|
||||
d._read = function () {
|
||||
while (true) {
|
||||
const buf = r.read()
|
||||
if (buf === null) {
|
||||
onreadable = d._read
|
||||
return
|
||||
}
|
||||
if (!d.push(buf)) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
d._destroy = function (err, callback) {
|
||||
if (!err && onclose !== null) {
|
||||
err = new AbortError()
|
||||
}
|
||||
onreadable = null
|
||||
ondrain = null
|
||||
onfinish = null
|
||||
if (onclose === null) {
|
||||
callback(err)
|
||||
} else {
|
||||
onclose = callback
|
||||
destroyer(w, err)
|
||||
destroyer(r, err)
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
||||
@@ -1,86 +1,286 @@
|
||||
// Ported from https://github.com/mafintosh/end-of-stream with
|
||||
// permission from the author, Mathias Buus (@mafintosh).
|
||||
|
||||
'use strict';
|
||||
'use strict'
|
||||
|
||||
var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;
|
||||
function once(callback) {
|
||||
var called = false;
|
||||
return function () {
|
||||
if (called) return;
|
||||
called = true;
|
||||
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||||
args[_key] = arguments[_key];
|
||||
}
|
||||
callback.apply(this, args);
|
||||
};
|
||||
}
|
||||
function noop() {}
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const { AbortError, codes } = require('../../ours/errors')
|
||||
const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes
|
||||
const { kEmptyObject, once } = require('../../ours/util')
|
||||
const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = require('../validators')
|
||||
const { Promise, PromisePrototypeThen, SymbolDispose } = require('../../ours/primordials')
|
||||
const {
|
||||
isClosed,
|
||||
isReadable,
|
||||
isReadableNodeStream,
|
||||
isReadableStream,
|
||||
isReadableFinished,
|
||||
isReadableErrored,
|
||||
isWritable,
|
||||
isWritableNodeStream,
|
||||
isWritableStream,
|
||||
isWritableFinished,
|
||||
isWritableErrored,
|
||||
isNodeStream,
|
||||
willEmitClose: _willEmitClose,
|
||||
kIsClosedPromise
|
||||
} = require('./utils')
|
||||
let addAbortListener
|
||||
function isRequest(stream) {
|
||||
return stream.setHeader && typeof stream.abort === 'function';
|
||||
return stream.setHeader && typeof stream.abort === 'function'
|
||||
}
|
||||
function eos(stream, opts, callback) {
|
||||
if (typeof opts === 'function') return eos(stream, null, opts);
|
||||
if (!opts) opts = {};
|
||||
callback = once(callback || noop);
|
||||
var readable = opts.readable || opts.readable !== false && stream.readable;
|
||||
var writable = opts.writable || opts.writable !== false && stream.writable;
|
||||
var onlegacyfinish = function onlegacyfinish() {
|
||||
if (!stream.writable) onfinish();
|
||||
};
|
||||
var writableEnded = stream._writableState && stream._writableState.finished;
|
||||
var onfinish = function onfinish() {
|
||||
writable = false;
|
||||
writableEnded = true;
|
||||
if (!readable) callback.call(stream);
|
||||
};
|
||||
var readableEnded = stream._readableState && stream._readableState.endEmitted;
|
||||
var onend = function onend() {
|
||||
readable = false;
|
||||
readableEnded = true;
|
||||
if (!writable) callback.call(stream);
|
||||
};
|
||||
var onerror = function onerror(err) {
|
||||
callback.call(stream, err);
|
||||
};
|
||||
var onclose = function onclose() {
|
||||
var err;
|
||||
if (readable && !readableEnded) {
|
||||
if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
||||
return callback.call(stream, err);
|
||||
}
|
||||
if (writable && !writableEnded) {
|
||||
if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
||||
return callback.call(stream, err);
|
||||
}
|
||||
};
|
||||
var onrequest = function onrequest() {
|
||||
stream.req.on('finish', onfinish);
|
||||
};
|
||||
if (isRequest(stream)) {
|
||||
stream.on('complete', onfinish);
|
||||
stream.on('abort', onclose);
|
||||
if (stream.req) onrequest();else stream.on('request', onrequest);
|
||||
} else if (writable && !stream._writableState) {
|
||||
// legacy streams
|
||||
stream.on('end', onlegacyfinish);
|
||||
stream.on('close', onlegacyfinish);
|
||||
const nop = () => {}
|
||||
function eos(stream, options, callback) {
|
||||
var _options$readable, _options$writable
|
||||
if (arguments.length === 2) {
|
||||
callback = options
|
||||
options = kEmptyObject
|
||||
} else if (options == null) {
|
||||
options = kEmptyObject
|
||||
} else {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
stream.on('end', onend);
|
||||
stream.on('finish', onfinish);
|
||||
if (opts.error !== false) stream.on('error', onerror);
|
||||
stream.on('close', onclose);
|
||||
return function () {
|
||||
stream.removeListener('complete', onfinish);
|
||||
stream.removeListener('abort', onclose);
|
||||
stream.removeListener('request', onrequest);
|
||||
if (stream.req) stream.req.removeListener('finish', onfinish);
|
||||
stream.removeListener('end', onlegacyfinish);
|
||||
stream.removeListener('close', onlegacyfinish);
|
||||
stream.removeListener('finish', onfinish);
|
||||
stream.removeListener('end', onend);
|
||||
stream.removeListener('error', onerror);
|
||||
stream.removeListener('close', onclose);
|
||||
};
|
||||
validateFunction(callback, 'callback')
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
callback = once(callback)
|
||||
if (isReadableStream(stream) || isWritableStream(stream)) {
|
||||
return eosWeb(stream, options, callback)
|
||||
}
|
||||
if (!isNodeStream(stream)) {
|
||||
throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)
|
||||
}
|
||||
const readable =
|
||||
(_options$readable = options.readable) !== null && _options$readable !== undefined
|
||||
? _options$readable
|
||||
: isReadableNodeStream(stream)
|
||||
const writable =
|
||||
(_options$writable = options.writable) !== null && _options$writable !== undefined
|
||||
? _options$writable
|
||||
: isWritableNodeStream(stream)
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const onlegacyfinish = () => {
|
||||
if (!stream.writable) {
|
||||
onfinish()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO (ronag): Improve soft detection to include core modules and
|
||||
// common ecosystem modules that do properly emit 'close' but fail
|
||||
// this generic check.
|
||||
let willEmitClose =
|
||||
_willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable
|
||||
let writableFinished = isWritableFinished(stream, false)
|
||||
const onfinish = () => {
|
||||
writableFinished = true
|
||||
// Stream should not be destroyed here. If it is that
|
||||
// means that user space is doing something differently and
|
||||
// we cannot trust willEmitClose.
|
||||
if (stream.destroyed) {
|
||||
willEmitClose = false
|
||||
}
|
||||
if (willEmitClose && (!stream.readable || readable)) {
|
||||
return
|
||||
}
|
||||
if (!readable || readableFinished) {
|
||||
callback.call(stream)
|
||||
}
|
||||
}
|
||||
let readableFinished = isReadableFinished(stream, false)
|
||||
const onend = () => {
|
||||
readableFinished = true
|
||||
// Stream should not be destroyed here. If it is that
|
||||
// means that user space is doing something differently and
|
||||
// we cannot trust willEmitClose.
|
||||
if (stream.destroyed) {
|
||||
willEmitClose = false
|
||||
}
|
||||
if (willEmitClose && (!stream.writable || writable)) {
|
||||
return
|
||||
}
|
||||
if (!writable || writableFinished) {
|
||||
callback.call(stream)
|
||||
}
|
||||
}
|
||||
const onerror = (err) => {
|
||||
callback.call(stream, err)
|
||||
}
|
||||
let closed = isClosed(stream)
|
||||
const onclose = () => {
|
||||
closed = true
|
||||
const errored = isWritableErrored(stream) || isReadableErrored(stream)
|
||||
if (errored && typeof errored !== 'boolean') {
|
||||
return callback.call(stream, errored)
|
||||
}
|
||||
if (readable && !readableFinished && isReadableNodeStream(stream, true)) {
|
||||
if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
if (writable && !writableFinished) {
|
||||
if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
callback.call(stream)
|
||||
}
|
||||
const onclosed = () => {
|
||||
closed = true
|
||||
const errored = isWritableErrored(stream) || isReadableErrored(stream)
|
||||
if (errored && typeof errored !== 'boolean') {
|
||||
return callback.call(stream, errored)
|
||||
}
|
||||
callback.call(stream)
|
||||
}
|
||||
const onrequest = () => {
|
||||
stream.req.on('finish', onfinish)
|
||||
}
|
||||
if (isRequest(stream)) {
|
||||
stream.on('complete', onfinish)
|
||||
if (!willEmitClose) {
|
||||
stream.on('abort', onclose)
|
||||
}
|
||||
if (stream.req) {
|
||||
onrequest()
|
||||
} else {
|
||||
stream.on('request', onrequest)
|
||||
}
|
||||
} else if (writable && !wState) {
|
||||
// legacy streams
|
||||
stream.on('end', onlegacyfinish)
|
||||
stream.on('close', onlegacyfinish)
|
||||
}
|
||||
|
||||
// Not all streams will emit 'close' after 'aborted'.
|
||||
if (!willEmitClose && typeof stream.aborted === 'boolean') {
|
||||
stream.on('aborted', onclose)
|
||||
}
|
||||
stream.on('end', onend)
|
||||
stream.on('finish', onfinish)
|
||||
if (options.error !== false) {
|
||||
stream.on('error', onerror)
|
||||
}
|
||||
stream.on('close', onclose)
|
||||
if (closed) {
|
||||
process.nextTick(onclose)
|
||||
} else if (
|
||||
(wState !== null && wState !== undefined && wState.errorEmitted) ||
|
||||
(rState !== null && rState !== undefined && rState.errorEmitted)
|
||||
) {
|
||||
if (!willEmitClose) {
|
||||
process.nextTick(onclosed)
|
||||
}
|
||||
} else if (
|
||||
!readable &&
|
||||
(!willEmitClose || isReadable(stream)) &&
|
||||
(writableFinished || isWritable(stream) === false)
|
||||
) {
|
||||
process.nextTick(onclosed)
|
||||
} else if (
|
||||
!writable &&
|
||||
(!willEmitClose || isWritable(stream)) &&
|
||||
(readableFinished || isReadable(stream) === false)
|
||||
) {
|
||||
process.nextTick(onclosed)
|
||||
} else if (rState && stream.req && stream.aborted) {
|
||||
process.nextTick(onclosed)
|
||||
}
|
||||
const cleanup = () => {
|
||||
callback = nop
|
||||
stream.removeListener('aborted', onclose)
|
||||
stream.removeListener('complete', onfinish)
|
||||
stream.removeListener('abort', onclose)
|
||||
stream.removeListener('request', onrequest)
|
||||
if (stream.req) stream.req.removeListener('finish', onfinish)
|
||||
stream.removeListener('end', onlegacyfinish)
|
||||
stream.removeListener('close', onlegacyfinish)
|
||||
stream.removeListener('finish', onfinish)
|
||||
stream.removeListener('end', onend)
|
||||
stream.removeListener('error', onerror)
|
||||
stream.removeListener('close', onclose)
|
||||
}
|
||||
if (options.signal && !closed) {
|
||||
const abort = () => {
|
||||
// Keep it because cleanup removes it.
|
||||
const endCallback = callback
|
||||
cleanup()
|
||||
endCallback.call(
|
||||
stream,
|
||||
new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (options.signal.aborted) {
|
||||
process.nextTick(abort)
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(options.signal, abort)
|
||||
const originalCallback = callback
|
||||
callback = once((...args) => {
|
||||
disposable[SymbolDispose]()
|
||||
originalCallback.apply(stream, args)
|
||||
})
|
||||
}
|
||||
}
|
||||
return cleanup
|
||||
}
|
||||
module.exports = eos;
|
||||
function eosWeb(stream, options, callback) {
|
||||
let isAborted = false
|
||||
let abort = nop
|
||||
if (options.signal) {
|
||||
abort = () => {
|
||||
isAborted = true
|
||||
callback.call(
|
||||
stream,
|
||||
new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (options.signal.aborted) {
|
||||
process.nextTick(abort)
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(options.signal, abort)
|
||||
const originalCallback = callback
|
||||
callback = once((...args) => {
|
||||
disposable[SymbolDispose]()
|
||||
originalCallback.apply(stream, args)
|
||||
})
|
||||
}
|
||||
}
|
||||
const resolverFn = (...args) => {
|
||||
if (!isAborted) {
|
||||
process.nextTick(() => callback.apply(stream, args))
|
||||
}
|
||||
}
|
||||
PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn)
|
||||
return nop
|
||||
}
|
||||
function finished(stream, opts) {
|
||||
var _opts
|
||||
let autoCleanup = false
|
||||
if (opts === null) {
|
||||
opts = kEmptyObject
|
||||
}
|
||||
if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) {
|
||||
validateBoolean(opts.cleanup, 'cleanup')
|
||||
autoCleanup = opts.cleanup
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
const cleanup = eos(stream, opts, (err) => {
|
||||
if (autoCleanup) {
|
||||
cleanup()
|
||||
}
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
module.exports = eos
|
||||
module.exports.finished = finished
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
module.exports = function () {
|
||||
throw new Error('Readable.from is not available in the browser')
|
||||
};
|
||||
@@ -1,52 +1,98 @@
|
||||
'use strict';
|
||||
'use strict'
|
||||
|
||||
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
||||
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
||||
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
||||
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
||||
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||||
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
|
||||
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
|
||||
var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials')
|
||||
const { Buffer } = require('buffer')
|
||||
const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require('../../ours/errors').codes
|
||||
function from(Readable, iterable, opts) {
|
||||
var iterator;
|
||||
if (iterable && typeof iterable.next === 'function') {
|
||||
iterator = iterable;
|
||||
} else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);
|
||||
var readable = new Readable(_objectSpread({
|
||||
objectMode: true
|
||||
}, opts));
|
||||
// Reading boolean to protect against _read
|
||||
let iterator
|
||||
if (typeof iterable === 'string' || iterable instanceof Buffer) {
|
||||
return new Readable({
|
||||
objectMode: true,
|
||||
...opts,
|
||||
read() {
|
||||
this.push(iterable)
|
||||
this.push(null)
|
||||
}
|
||||
})
|
||||
}
|
||||
let isAsync
|
||||
if (iterable && iterable[SymbolAsyncIterator]) {
|
||||
isAsync = true
|
||||
iterator = iterable[SymbolAsyncIterator]()
|
||||
} else if (iterable && iterable[SymbolIterator]) {
|
||||
isAsync = false
|
||||
iterator = iterable[SymbolIterator]()
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable)
|
||||
}
|
||||
const readable = new Readable({
|
||||
objectMode: true,
|
||||
highWaterMark: 1,
|
||||
// TODO(ronag): What options should be allowed?
|
||||
...opts
|
||||
})
|
||||
|
||||
// Flag to protect against _read
|
||||
// being called before last iteration completion.
|
||||
var reading = false;
|
||||
let reading = false
|
||||
readable._read = function () {
|
||||
if (!reading) {
|
||||
reading = true;
|
||||
next();
|
||||
reading = true
|
||||
next()
|
||||
}
|
||||
};
|
||||
function next() {
|
||||
return _next2.apply(this, arguments);
|
||||
}
|
||||
function _next2() {
|
||||
_next2 = _asyncToGenerator(function* () {
|
||||
readable._destroy = function (error, cb) {
|
||||
PromisePrototypeThen(
|
||||
close(error),
|
||||
() => process.nextTick(cb, error),
|
||||
// nextTick is here in case cb throws
|
||||
(e) => process.nextTick(cb, e || error)
|
||||
)
|
||||
}
|
||||
async function close(error) {
|
||||
const hadError = error !== undefined && error !== null
|
||||
const hasThrow = typeof iterator.throw === 'function'
|
||||
if (hadError && hasThrow) {
|
||||
const { value, done } = await iterator.throw(error)
|
||||
await value
|
||||
if (done) {
|
||||
return
|
||||
}
|
||||
}
|
||||
if (typeof iterator.return === 'function') {
|
||||
const { value } = await iterator.return()
|
||||
await value
|
||||
}
|
||||
}
|
||||
async function next() {
|
||||
for (;;) {
|
||||
try {
|
||||
var _yield$iterator$next = yield iterator.next(),
|
||||
value = _yield$iterator$next.value,
|
||||
done = _yield$iterator$next.done;
|
||||
const { value, done } = isAsync ? await iterator.next() : iterator.next()
|
||||
if (done) {
|
||||
readable.push(null);
|
||||
} else if (readable.push(yield value)) {
|
||||
next();
|
||||
readable.push(null)
|
||||
} else {
|
||||
reading = false;
|
||||
const res = value && typeof value.then === 'function' ? await value : value
|
||||
if (res === null) {
|
||||
reading = false
|
||||
throw new ERR_STREAM_NULL_VALUES()
|
||||
} else if (readable.push(res)) {
|
||||
continue
|
||||
} else {
|
||||
reading = false
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
readable.destroy(err);
|
||||
readable.destroy(err)
|
||||
}
|
||||
});
|
||||
return _next2.apply(this, arguments);
|
||||
break
|
||||
}
|
||||
}
|
||||
return readable;
|
||||
return readable
|
||||
}
|
||||
module.exports = from;
|
||||
module.exports = from
|
||||
|
||||
51
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/lazy_transform.js
generated
vendored
Normal file
51
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/lazy_transform.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
// LazyTransform is a special type of Transform stream that is lazily loaded.
|
||||
// This is used for performance with bi-API-ship: when two APIs are available
|
||||
// for the stream, one conventional and one non-conventional.
|
||||
'use strict'
|
||||
|
||||
const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
const stream = require('../../stream')
|
||||
const { getDefaultEncoding } = require('../crypto/util')
|
||||
module.exports = LazyTransform
|
||||
function LazyTransform(options) {
|
||||
this._options = options
|
||||
}
|
||||
ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype)
|
||||
ObjectSetPrototypeOf(LazyTransform, stream.Transform)
|
||||
function makeGetter(name) {
|
||||
return function () {
|
||||
stream.Transform.call(this, this._options)
|
||||
this._writableState.decodeStrings = false
|
||||
if (!this._options || !this._options.defaultEncoding) {
|
||||
this._writableState.defaultEncoding = getDefaultEncoding()
|
||||
}
|
||||
return this[name]
|
||||
}
|
||||
}
|
||||
function makeSetter(name) {
|
||||
return function (val) {
|
||||
ObjectDefineProperty(this, name, {
|
||||
__proto__: null,
|
||||
value: val,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(LazyTransform.prototype, {
|
||||
_readableState: {
|
||||
__proto__: null,
|
||||
get: makeGetter('_readableState'),
|
||||
set: makeSetter('_readableState'),
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
},
|
||||
_writableState: {
|
||||
__proto__: null,
|
||||
get: makeGetter('_writableState'),
|
||||
set: makeSetter('_writableState'),
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
})
|
||||
89
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/legacy.js
generated
vendored
Normal file
89
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/legacy.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
'use strict'
|
||||
|
||||
const { ArrayIsArray, ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
const { EventEmitter: EE } = require('events')
|
||||
function Stream(opts) {
|
||||
EE.call(this, opts)
|
||||
}
|
||||
ObjectSetPrototypeOf(Stream.prototype, EE.prototype)
|
||||
ObjectSetPrototypeOf(Stream, EE)
|
||||
Stream.prototype.pipe = function (dest, options) {
|
||||
const source = this
|
||||
function ondata(chunk) {
|
||||
if (dest.writable && dest.write(chunk) === false && source.pause) {
|
||||
source.pause()
|
||||
}
|
||||
}
|
||||
source.on('data', ondata)
|
||||
function ondrain() {
|
||||
if (source.readable && source.resume) {
|
||||
source.resume()
|
||||
}
|
||||
}
|
||||
dest.on('drain', ondrain)
|
||||
|
||||
// If the 'end' option is not supplied, dest.end() will be called when
|
||||
// source gets the 'end' or 'close' events. Only dest.end() once.
|
||||
if (!dest._isStdio && (!options || options.end !== false)) {
|
||||
source.on('end', onend)
|
||||
source.on('close', onclose)
|
||||
}
|
||||
let didOnEnd = false
|
||||
function onend() {
|
||||
if (didOnEnd) return
|
||||
didOnEnd = true
|
||||
dest.end()
|
||||
}
|
||||
function onclose() {
|
||||
if (didOnEnd) return
|
||||
didOnEnd = true
|
||||
if (typeof dest.destroy === 'function') dest.destroy()
|
||||
}
|
||||
|
||||
// Don't leave dangling pipes when there are errors.
|
||||
function onerror(er) {
|
||||
cleanup()
|
||||
if (EE.listenerCount(this, 'error') === 0) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
prependListener(source, 'error', onerror)
|
||||
prependListener(dest, 'error', onerror)
|
||||
|
||||
// Remove all the event listeners that were added.
|
||||
function cleanup() {
|
||||
source.removeListener('data', ondata)
|
||||
dest.removeListener('drain', ondrain)
|
||||
source.removeListener('end', onend)
|
||||
source.removeListener('close', onclose)
|
||||
source.removeListener('error', onerror)
|
||||
dest.removeListener('error', onerror)
|
||||
source.removeListener('end', cleanup)
|
||||
source.removeListener('close', cleanup)
|
||||
dest.removeListener('close', cleanup)
|
||||
}
|
||||
source.on('end', cleanup)
|
||||
source.on('close', cleanup)
|
||||
dest.on('close', cleanup)
|
||||
dest.emit('pipe', source)
|
||||
|
||||
// Allow for unix-like usage: A.pipe(B).pipe(C)
|
||||
return dest
|
||||
}
|
||||
function prependListener(emitter, event, fn) {
|
||||
// Sadly this is not cacheable as some libraries bundle their own
|
||||
// event emitter implementation with them.
|
||||
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn)
|
||||
|
||||
// This is a hack to make sure that our error handler is attached before any
|
||||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||||
// to continue to work with older versions of Node.js that do not include
|
||||
// the prependListener() method. The goal is to eventually remove this hack.
|
||||
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn)
|
||||
else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn)
|
||||
else emitter._events[event] = [fn, emitter._events[event]]
|
||||
}
|
||||
module.exports = {
|
||||
Stream,
|
||||
prependListener
|
||||
}
|
||||
457
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/operators.js
generated
vendored
Normal file
457
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/operators.js
generated
vendored
Normal file
@@ -0,0 +1,457 @@
|
||||
'use strict'
|
||||
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const {
|
||||
codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE },
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { validateAbortSignal, validateInteger, validateObject } = require('../validators')
|
||||
const kWeakHandler = require('../../ours/primordials').Symbol('kWeak')
|
||||
const kResistStopPropagation = require('../../ours/primordials').Symbol('kResistStopPropagation')
|
||||
const { finished } = require('./end-of-stream')
|
||||
const staticCompose = require('./compose')
|
||||
const { addAbortSignalNoValidate } = require('./add-abort-signal')
|
||||
const { isWritable, isNodeStream } = require('./utils')
|
||||
const { deprecate } = require('../../ours/util')
|
||||
const {
|
||||
ArrayPrototypePush,
|
||||
Boolean,
|
||||
MathFloor,
|
||||
Number,
|
||||
NumberIsNaN,
|
||||
Promise,
|
||||
PromiseReject,
|
||||
PromiseResolve,
|
||||
PromisePrototypeThen,
|
||||
Symbol
|
||||
} = require('../../ours/primordials')
|
||||
const kEmpty = Symbol('kEmpty')
|
||||
const kEof = Symbol('kEof')
|
||||
function compose(stream, options) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
if (isNodeStream(stream) && !isWritable(stream)) {
|
||||
throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable')
|
||||
}
|
||||
const composedStream = staticCompose(this, stream)
|
||||
if (options !== null && options !== undefined && options.signal) {
|
||||
// Not validating as we already validated before
|
||||
addAbortSignalNoValidate(options.signal, composedStream)
|
||||
}
|
||||
return composedStream
|
||||
}
|
||||
function map(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
let concurrency = 1
|
||||
if ((options === null || options === undefined ? undefined : options.concurrency) != null) {
|
||||
concurrency = MathFloor(options.concurrency)
|
||||
}
|
||||
let highWaterMark = concurrency - 1
|
||||
if ((options === null || options === undefined ? undefined : options.highWaterMark) != null) {
|
||||
highWaterMark = MathFloor(options.highWaterMark)
|
||||
}
|
||||
validateInteger(concurrency, 'options.concurrency', 1)
|
||||
validateInteger(highWaterMark, 'options.highWaterMark', 0)
|
||||
highWaterMark += concurrency
|
||||
return async function* map() {
|
||||
const signal = require('../../ours/util').AbortSignalAny(
|
||||
[options === null || options === undefined ? undefined : options.signal].filter(Boolean)
|
||||
)
|
||||
const stream = this
|
||||
const queue = []
|
||||
const signalOpt = {
|
||||
signal
|
||||
}
|
||||
let next
|
||||
let resume
|
||||
let done = false
|
||||
let cnt = 0
|
||||
function onCatch() {
|
||||
done = true
|
||||
afterItemProcessed()
|
||||
}
|
||||
function afterItemProcessed() {
|
||||
cnt -= 1
|
||||
maybeResume()
|
||||
}
|
||||
function maybeResume() {
|
||||
if (resume && !done && cnt < concurrency && queue.length < highWaterMark) {
|
||||
resume()
|
||||
resume = null
|
||||
}
|
||||
}
|
||||
async function pump() {
|
||||
try {
|
||||
for await (let val of stream) {
|
||||
if (done) {
|
||||
return
|
||||
}
|
||||
if (signal.aborted) {
|
||||
throw new AbortError()
|
||||
}
|
||||
try {
|
||||
val = fn(val, signalOpt)
|
||||
if (val === kEmpty) {
|
||||
continue
|
||||
}
|
||||
val = PromiseResolve(val)
|
||||
} catch (err) {
|
||||
val = PromiseReject(err)
|
||||
}
|
||||
cnt += 1
|
||||
PromisePrototypeThen(val, afterItemProcessed, onCatch)
|
||||
queue.push(val)
|
||||
if (next) {
|
||||
next()
|
||||
next = null
|
||||
}
|
||||
if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) {
|
||||
await new Promise((resolve) => {
|
||||
resume = resolve
|
||||
})
|
||||
}
|
||||
}
|
||||
queue.push(kEof)
|
||||
} catch (err) {
|
||||
const val = PromiseReject(err)
|
||||
PromisePrototypeThen(val, afterItemProcessed, onCatch)
|
||||
queue.push(val)
|
||||
} finally {
|
||||
done = true
|
||||
if (next) {
|
||||
next()
|
||||
next = null
|
||||
}
|
||||
}
|
||||
}
|
||||
pump()
|
||||
try {
|
||||
while (true) {
|
||||
while (queue.length > 0) {
|
||||
const val = await queue[0]
|
||||
if (val === kEof) {
|
||||
return
|
||||
}
|
||||
if (signal.aborted) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (val !== kEmpty) {
|
||||
yield val
|
||||
}
|
||||
queue.shift()
|
||||
maybeResume()
|
||||
}
|
||||
await new Promise((resolve) => {
|
||||
next = resolve
|
||||
})
|
||||
}
|
||||
} finally {
|
||||
done = true
|
||||
if (resume) {
|
||||
resume()
|
||||
resume = null
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function asIndexedPairs(options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
return async function* asIndexedPairs() {
|
||||
let index = 0
|
||||
for await (const val of this) {
|
||||
var _options$signal
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal = options.signal) !== null &&
|
||||
_options$signal !== undefined &&
|
||||
_options$signal.aborted
|
||||
) {
|
||||
throw new AbortError({
|
||||
cause: options.signal.reason
|
||||
})
|
||||
}
|
||||
yield [index++, val]
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
async function some(fn, options = undefined) {
|
||||
for await (const unused of filter.call(this, fn, options)) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
async function every(fn, options = undefined) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
// https://en.wikipedia.org/wiki/De_Morgan%27s_laws
|
||||
return !(await some.call(
|
||||
this,
|
||||
async (...args) => {
|
||||
return !(await fn(...args))
|
||||
},
|
||||
options
|
||||
))
|
||||
}
|
||||
async function find(fn, options) {
|
||||
for await (const result of filter.call(this, fn, options)) {
|
||||
return result
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
async function forEach(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
async function forEachFn(value, options) {
|
||||
await fn(value, options)
|
||||
return kEmpty
|
||||
}
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
for await (const unused of map.call(this, forEachFn, options));
|
||||
}
|
||||
function filter(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
async function filterFn(value, options) {
|
||||
if (await fn(value, options)) {
|
||||
return value
|
||||
}
|
||||
return kEmpty
|
||||
}
|
||||
return map.call(this, filterFn, options)
|
||||
}
|
||||
|
||||
// Specific to provide better error to reduce since the argument is only
|
||||
// missing if the stream has no items in it - but the code is still appropriate
|
||||
class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS {
|
||||
constructor() {
|
||||
super('reduce')
|
||||
this.message = 'Reduce of an empty stream requires an initial value'
|
||||
}
|
||||
}
|
||||
async function reduce(reducer, initialValue, options) {
|
||||
var _options$signal2
|
||||
if (typeof reducer !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer)
|
||||
}
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
let hasInitialValue = arguments.length > 1
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal2 = options.signal) !== null &&
|
||||
_options$signal2 !== undefined &&
|
||||
_options$signal2.aborted
|
||||
) {
|
||||
const err = new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
this.once('error', () => {}) // The error is already propagated
|
||||
await finished(this.destroy(err))
|
||||
throw err
|
||||
}
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
if (options !== null && options !== undefined && options.signal) {
|
||||
const opts = {
|
||||
once: true,
|
||||
[kWeakHandler]: this,
|
||||
[kResistStopPropagation]: true
|
||||
}
|
||||
options.signal.addEventListener('abort', () => ac.abort(), opts)
|
||||
}
|
||||
let gotAnyItemFromStream = false
|
||||
try {
|
||||
for await (const value of this) {
|
||||
var _options$signal3
|
||||
gotAnyItemFromStream = true
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal3 = options.signal) !== null &&
|
||||
_options$signal3 !== undefined &&
|
||||
_options$signal3.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (!hasInitialValue) {
|
||||
initialValue = value
|
||||
hasInitialValue = true
|
||||
} else {
|
||||
initialValue = await reducer(initialValue, value, {
|
||||
signal
|
||||
})
|
||||
}
|
||||
}
|
||||
if (!gotAnyItemFromStream && !hasInitialValue) {
|
||||
throw new ReduceAwareErrMissingArgs()
|
||||
}
|
||||
} finally {
|
||||
ac.abort()
|
||||
}
|
||||
return initialValue
|
||||
}
|
||||
async function toArray(options) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
const result = []
|
||||
for await (const val of this) {
|
||||
var _options$signal4
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal4 = options.signal) !== null &&
|
||||
_options$signal4 !== undefined &&
|
||||
_options$signal4.aborted
|
||||
) {
|
||||
throw new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
}
|
||||
ArrayPrototypePush(result, val)
|
||||
}
|
||||
return result
|
||||
}
|
||||
function flatMap(fn, options) {
|
||||
const values = map.call(this, fn, options)
|
||||
return async function* flatMap() {
|
||||
for await (const val of values) {
|
||||
yield* val
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function toIntegerOrInfinity(number) {
|
||||
// We coerce here to align with the spec
|
||||
// https://github.com/tc39/proposal-iterator-helpers/issues/169
|
||||
number = Number(number)
|
||||
if (NumberIsNaN(number)) {
|
||||
return 0
|
||||
}
|
||||
if (number < 0) {
|
||||
throw new ERR_OUT_OF_RANGE('number', '>= 0', number)
|
||||
}
|
||||
return number
|
||||
}
|
||||
function drop(number, options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
number = toIntegerOrInfinity(number)
|
||||
return async function* drop() {
|
||||
var _options$signal5
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal5 = options.signal) !== null &&
|
||||
_options$signal5 !== undefined &&
|
||||
_options$signal5.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
for await (const val of this) {
|
||||
var _options$signal6
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal6 = options.signal) !== null &&
|
||||
_options$signal6 !== undefined &&
|
||||
_options$signal6.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (number-- <= 0) {
|
||||
yield val
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function take(number, options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
number = toIntegerOrInfinity(number)
|
||||
return async function* take() {
|
||||
var _options$signal7
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal7 = options.signal) !== null &&
|
||||
_options$signal7 !== undefined &&
|
||||
_options$signal7.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
for await (const val of this) {
|
||||
var _options$signal8
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal8 = options.signal) !== null &&
|
||||
_options$signal8 !== undefined &&
|
||||
_options$signal8.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (number-- > 0) {
|
||||
yield val
|
||||
}
|
||||
|
||||
// Don't get another item from iterator in case we reached the end
|
||||
if (number <= 0) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
module.exports.streamReturningOperators = {
|
||||
asIndexedPairs: deprecate(asIndexedPairs, 'readable.asIndexedPairs will be removed in a future version.'),
|
||||
drop,
|
||||
filter,
|
||||
flatMap,
|
||||
map,
|
||||
take,
|
||||
compose
|
||||
}
|
||||
module.exports.promiseReturningOperators = {
|
||||
every,
|
||||
forEach,
|
||||
reduce,
|
||||
toArray,
|
||||
some,
|
||||
find
|
||||
}
|
||||
39
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/passthrough.js
generated
vendored
Normal file
39
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/passthrough.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a passthrough stream.
|
||||
// basically just the most minimal sort of Transform stream.
|
||||
// Every written chunk gets output as-is.
|
||||
|
||||
'use strict'
|
||||
|
||||
const { ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
module.exports = PassThrough
|
||||
const Transform = require('./transform')
|
||||
ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype)
|
||||
ObjectSetPrototypeOf(PassThrough, Transform)
|
||||
function PassThrough(options) {
|
||||
if (!(this instanceof PassThrough)) return new PassThrough(options)
|
||||
Transform.call(this, options)
|
||||
}
|
||||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||||
cb(null, chunk)
|
||||
}
|
||||
@@ -1,86 +1,471 @@
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
// Ported from https://github.com/mafintosh/pump with
|
||||
// permission from the author, Mathias Buus (@mafintosh).
|
||||
|
||||
'use strict';
|
||||
|
||||
var eos;
|
||||
function once(callback) {
|
||||
var called = false;
|
||||
return function () {
|
||||
if (called) return;
|
||||
called = true;
|
||||
callback.apply(void 0, arguments);
|
||||
};
|
||||
}
|
||||
var _require$codes = require('../../../errors').codes,
|
||||
ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
|
||||
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
|
||||
function noop(err) {
|
||||
// Rethrow the error if it exists to avoid swallowing it
|
||||
if (err) throw err;
|
||||
}
|
||||
function isRequest(stream) {
|
||||
return stream.setHeader && typeof stream.abort === 'function';
|
||||
}
|
||||
function destroyer(stream, reading, writing, callback) {
|
||||
callback = once(callback);
|
||||
var closed = false;
|
||||
stream.on('close', function () {
|
||||
closed = true;
|
||||
});
|
||||
if (eos === undefined) eos = require('./end-of-stream');
|
||||
eos(stream, {
|
||||
readable: reading,
|
||||
writable: writing
|
||||
}, function (err) {
|
||||
if (err) return callback(err);
|
||||
closed = true;
|
||||
callback();
|
||||
});
|
||||
var destroyed = false;
|
||||
return function (err) {
|
||||
if (closed) return;
|
||||
if (destroyed) return;
|
||||
destroyed = true;
|
||||
|
||||
// request.destroy just do .end - .abort is what we want
|
||||
if (isRequest(stream)) return stream.abort();
|
||||
if (typeof stream.destroy === 'function') return stream.destroy();
|
||||
callback(err || new ERR_STREAM_DESTROYED('pipe'));
|
||||
};
|
||||
}
|
||||
function call(fn) {
|
||||
fn();
|
||||
}
|
||||
function pipe(from, to) {
|
||||
return from.pipe(to);
|
||||
;('use strict')
|
||||
const { ArrayIsArray, Promise, SymbolAsyncIterator, SymbolDispose } = require('../../ours/primordials')
|
||||
const eos = require('./end-of-stream')
|
||||
const { once } = require('../../ours/util')
|
||||
const destroyImpl = require('./destroy')
|
||||
const Duplex = require('./duplex')
|
||||
const {
|
||||
aggregateTwoErrors,
|
||||
codes: {
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
ERR_INVALID_RETURN_VALUE,
|
||||
ERR_MISSING_ARGS,
|
||||
ERR_STREAM_DESTROYED,
|
||||
ERR_STREAM_PREMATURE_CLOSE
|
||||
},
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { validateFunction, validateAbortSignal } = require('../validators')
|
||||
const {
|
||||
isIterable,
|
||||
isReadable,
|
||||
isReadableNodeStream,
|
||||
isNodeStream,
|
||||
isTransformStream,
|
||||
isWebStream,
|
||||
isReadableStream,
|
||||
isReadableFinished
|
||||
} = require('./utils')
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
let PassThrough
|
||||
let Readable
|
||||
let addAbortListener
|
||||
function destroyer(stream, reading, writing) {
|
||||
let finished = false
|
||||
stream.on('close', () => {
|
||||
finished = true
|
||||
})
|
||||
const cleanup = eos(
|
||||
stream,
|
||||
{
|
||||
readable: reading,
|
||||
writable: writing
|
||||
},
|
||||
(err) => {
|
||||
finished = !err
|
||||
}
|
||||
)
|
||||
return {
|
||||
destroy: (err) => {
|
||||
if (finished) return
|
||||
finished = true
|
||||
destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe'))
|
||||
},
|
||||
cleanup
|
||||
}
|
||||
}
|
||||
function popCallback(streams) {
|
||||
if (!streams.length) return noop;
|
||||
if (typeof streams[streams.length - 1] !== 'function') return noop;
|
||||
return streams.pop();
|
||||
// Streams should never be an empty array. It should always contain at least
|
||||
// a single stream. Therefore optimize for the average case instead of
|
||||
// checking for length === 0 as well.
|
||||
validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]')
|
||||
return streams.pop()
|
||||
}
|
||||
function pipeline() {
|
||||
for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
|
||||
streams[_key] = arguments[_key];
|
||||
function makeAsyncIterable(val) {
|
||||
if (isIterable(val)) {
|
||||
return val
|
||||
} else if (isReadableNodeStream(val)) {
|
||||
// Legacy streams are not Iterable.
|
||||
return fromReadable(val)
|
||||
}
|
||||
throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val)
|
||||
}
|
||||
async function* fromReadable(val) {
|
||||
if (!Readable) {
|
||||
Readable = require('./readable')
|
||||
}
|
||||
yield* Readable.prototype[SymbolAsyncIterator].call(val)
|
||||
}
|
||||
async function pumpToNode(iterable, writable, finish, { end }) {
|
||||
let error
|
||||
let onresolve = null
|
||||
const resume = (err) => {
|
||||
if (err) {
|
||||
error = err
|
||||
}
|
||||
if (onresolve) {
|
||||
const callback = onresolve
|
||||
onresolve = null
|
||||
callback()
|
||||
}
|
||||
}
|
||||
const wait = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
onresolve = () => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
writable.on('drain', resume)
|
||||
const cleanup = eos(
|
||||
writable,
|
||||
{
|
||||
readable: false
|
||||
},
|
||||
resume
|
||||
)
|
||||
try {
|
||||
if (writable.writableNeedDrain) {
|
||||
await wait()
|
||||
}
|
||||
for await (const chunk of iterable) {
|
||||
if (!writable.write(chunk)) {
|
||||
await wait()
|
||||
}
|
||||
}
|
||||
if (end) {
|
||||
writable.end()
|
||||
await wait()
|
||||
}
|
||||
finish()
|
||||
} catch (err) {
|
||||
finish(error !== err ? aggregateTwoErrors(error, err) : err)
|
||||
} finally {
|
||||
cleanup()
|
||||
writable.off('drain', resume)
|
||||
}
|
||||
}
|
||||
async function pumpToWeb(readable, writable, finish, { end }) {
|
||||
if (isTransformStream(writable)) {
|
||||
writable = writable.writable
|
||||
}
|
||||
// https://streams.spec.whatwg.org/#example-manual-write-with-backpressure
|
||||
const writer = writable.getWriter()
|
||||
try {
|
||||
for await (const chunk of readable) {
|
||||
await writer.ready
|
||||
writer.write(chunk).catch(() => {})
|
||||
}
|
||||
await writer.ready
|
||||
if (end) {
|
||||
await writer.close()
|
||||
}
|
||||
finish()
|
||||
} catch (err) {
|
||||
try {
|
||||
await writer.abort(err)
|
||||
finish(err)
|
||||
} catch (err) {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function pipeline(...streams) {
|
||||
return pipelineImpl(streams, once(popCallback(streams)))
|
||||
}
|
||||
function pipelineImpl(streams, callback, opts) {
|
||||
if (streams.length === 1 && ArrayIsArray(streams[0])) {
|
||||
streams = streams[0]
|
||||
}
|
||||
var callback = popCallback(streams);
|
||||
if (Array.isArray(streams[0])) streams = streams[0];
|
||||
if (streams.length < 2) {
|
||||
throw new ERR_MISSING_ARGS('streams');
|
||||
throw new ERR_MISSING_ARGS('streams')
|
||||
}
|
||||
var error;
|
||||
var destroys = streams.map(function (stream, i) {
|
||||
var reading = i < streams.length - 1;
|
||||
var writing = i > 0;
|
||||
return destroyer(stream, reading, writing, function (err) {
|
||||
if (!error) error = err;
|
||||
if (err) destroys.forEach(call);
|
||||
if (reading) return;
|
||||
destroys.forEach(call);
|
||||
callback(error);
|
||||
});
|
||||
});
|
||||
return streams.reduce(pipe);
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
const outerSignal = opts === null || opts === undefined ? undefined : opts.signal
|
||||
|
||||
// Need to cleanup event listeners if last stream is readable
|
||||
// https://github.com/nodejs/node/issues/35452
|
||||
const lastStreamCleanup = []
|
||||
validateAbortSignal(outerSignal, 'options.signal')
|
||||
function abort() {
|
||||
finishImpl(new AbortError())
|
||||
}
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
let disposable
|
||||
if (outerSignal) {
|
||||
disposable = addAbortListener(outerSignal, abort)
|
||||
}
|
||||
let error
|
||||
let value
|
||||
const destroys = []
|
||||
let finishCount = 0
|
||||
function finish(err) {
|
||||
finishImpl(err, --finishCount === 0)
|
||||
}
|
||||
function finishImpl(err, final) {
|
||||
var _disposable
|
||||
if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) {
|
||||
error = err
|
||||
}
|
||||
if (!error && !final) {
|
||||
return
|
||||
}
|
||||
while (destroys.length) {
|
||||
destroys.shift()(error)
|
||||
}
|
||||
;(_disposable = disposable) === null || _disposable === undefined ? undefined : _disposable[SymbolDispose]()
|
||||
ac.abort()
|
||||
if (final) {
|
||||
if (!error) {
|
||||
lastStreamCleanup.forEach((fn) => fn())
|
||||
}
|
||||
process.nextTick(callback, error, value)
|
||||
}
|
||||
}
|
||||
let ret
|
||||
for (let i = 0; i < streams.length; i++) {
|
||||
const stream = streams[i]
|
||||
const reading = i < streams.length - 1
|
||||
const writing = i > 0
|
||||
const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false
|
||||
const isLastStream = i === streams.length - 1
|
||||
if (isNodeStream(stream)) {
|
||||
if (end) {
|
||||
const { destroy, cleanup } = destroyer(stream, reading, writing)
|
||||
destroys.push(destroy)
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
}
|
||||
|
||||
// Catch stream errors that occur after pipe/pump has completed.
|
||||
function onError(err) {
|
||||
if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
stream.on('error', onError)
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(() => {
|
||||
stream.removeListener('error', onError)
|
||||
})
|
||||
}
|
||||
}
|
||||
if (i === 0) {
|
||||
if (typeof stream === 'function') {
|
||||
ret = stream({
|
||||
signal
|
||||
})
|
||||
if (!isIterable(ret)) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret)
|
||||
}
|
||||
} else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) {
|
||||
ret = stream
|
||||
} else {
|
||||
ret = Duplex.from(stream)
|
||||
}
|
||||
} else if (typeof stream === 'function') {
|
||||
if (isTransformStream(ret)) {
|
||||
var _ret
|
||||
ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable)
|
||||
} else {
|
||||
ret = makeAsyncIterable(ret)
|
||||
}
|
||||
ret = stream(ret, {
|
||||
signal
|
||||
})
|
||||
if (reading) {
|
||||
if (!isIterable(ret, true)) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret)
|
||||
}
|
||||
} else {
|
||||
var _ret2
|
||||
if (!PassThrough) {
|
||||
PassThrough = require('./passthrough')
|
||||
}
|
||||
|
||||
// If the last argument to pipeline is not a stream
|
||||
// we must create a proxy stream so that pipeline(...)
|
||||
// always returns a stream which can be further
|
||||
// composed through `.pipe(stream)`.
|
||||
|
||||
const pt = new PassThrough({
|
||||
objectMode: true
|
||||
})
|
||||
|
||||
// Handle Promises/A+ spec, `then` could be a getter that throws on
|
||||
// second use.
|
||||
const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then
|
||||
if (typeof then === 'function') {
|
||||
finishCount++
|
||||
then.call(
|
||||
ret,
|
||||
(val) => {
|
||||
value = val
|
||||
if (val != null) {
|
||||
pt.write(val)
|
||||
}
|
||||
if (end) {
|
||||
pt.end()
|
||||
}
|
||||
process.nextTick(finish)
|
||||
},
|
||||
(err) => {
|
||||
pt.destroy(err)
|
||||
process.nextTick(finish, err)
|
||||
}
|
||||
)
|
||||
} else if (isIterable(ret, true)) {
|
||||
finishCount++
|
||||
pumpToNode(ret, pt, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isReadableStream(ret) || isTransformStream(ret)) {
|
||||
const toRead = ret.readable || ret
|
||||
finishCount++
|
||||
pumpToNode(toRead, pt, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret)
|
||||
}
|
||||
ret = pt
|
||||
const { destroy, cleanup } = destroyer(ret, false, true)
|
||||
destroys.push(destroy)
|
||||
if (isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
}
|
||||
} else if (isNodeStream(stream)) {
|
||||
if (isReadableNodeStream(ret)) {
|
||||
finishCount += 2
|
||||
const cleanup = pipe(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
} else if (isTransformStream(ret) || isReadableStream(ret)) {
|
||||
const toRead = ret.readable || ret
|
||||
finishCount++
|
||||
pumpToNode(toRead, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isIterable(ret)) {
|
||||
finishCount++
|
||||
pumpToNode(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
'val',
|
||||
['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],
|
||||
ret
|
||||
)
|
||||
}
|
||||
ret = stream
|
||||
} else if (isWebStream(stream)) {
|
||||
if (isReadableNodeStream(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(makeAsyncIterable(ret), stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isReadableStream(ret) || isIterable(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isTransformStream(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(ret.readable, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
'val',
|
||||
['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],
|
||||
ret
|
||||
)
|
||||
}
|
||||
ret = stream
|
||||
} else {
|
||||
ret = Duplex.from(stream)
|
||||
}
|
||||
}
|
||||
if (
|
||||
(signal !== null && signal !== undefined && signal.aborted) ||
|
||||
(outerSignal !== null && outerSignal !== undefined && outerSignal.aborted)
|
||||
) {
|
||||
process.nextTick(abort)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
function pipe(src, dst, finish, { end }) {
|
||||
let ended = false
|
||||
dst.on('close', () => {
|
||||
if (!ended) {
|
||||
// Finish if the destination closes before the source has completed.
|
||||
finish(new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
})
|
||||
src.pipe(dst, {
|
||||
end: false
|
||||
}) // If end is true we already will have a listener to end dst.
|
||||
|
||||
if (end) {
|
||||
// Compat. Before node v10.12.0 stdio used to throw an error so
|
||||
// pipe() did/does not end() stdio destinations.
|
||||
// Now they allow it but "secretly" don't close the underlying fd.
|
||||
|
||||
function endFn() {
|
||||
ended = true
|
||||
dst.end()
|
||||
}
|
||||
if (isReadableFinished(src)) {
|
||||
// End the destination if the source has already ended.
|
||||
process.nextTick(endFn)
|
||||
} else {
|
||||
src.once('end', endFn)
|
||||
}
|
||||
} else {
|
||||
finish()
|
||||
}
|
||||
eos(
|
||||
src,
|
||||
{
|
||||
readable: true,
|
||||
writable: false
|
||||
},
|
||||
(err) => {
|
||||
const rState = src._readableState
|
||||
if (
|
||||
err &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE' &&
|
||||
rState &&
|
||||
rState.ended &&
|
||||
!rState.errored &&
|
||||
!rState.errorEmitted
|
||||
) {
|
||||
// Some readable streams will emit 'close' before 'end'. However, since
|
||||
// this is on the readable side 'end' should still be emitted if the
|
||||
// stream has been ended and no error emitted. This should be allowed in
|
||||
// favor of backwards compatibility. Since the stream is piped to a
|
||||
// destination this should not result in any observable difference.
|
||||
// We don't need to check if this is a writable premature close since
|
||||
// eos will only fail with premature close on the reading side for
|
||||
// duplex streams.
|
||||
src.once('end', finish).once('error', finish)
|
||||
} else {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
)
|
||||
return eos(
|
||||
dst,
|
||||
{
|
||||
readable: false,
|
||||
writable: true
|
||||
},
|
||||
finish
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
pipelineImpl,
|
||||
pipeline
|
||||
}
|
||||
module.exports = pipeline;
|
||||
1290
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/readable.js
generated
vendored
Normal file
1290
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/readable.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,39 @@
|
||||
'use strict';
|
||||
'use strict'
|
||||
|
||||
var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
|
||||
const { MathFloor, NumberIsInteger } = require('../../ours/primordials')
|
||||
const { validateInteger } = require('../validators')
|
||||
const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes
|
||||
let defaultHighWaterMarkBytes = 16 * 1024
|
||||
let defaultHighWaterMarkObjectMode = 16
|
||||
function highWaterMarkFrom(options, isDuplex, duplexKey) {
|
||||
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
|
||||
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null
|
||||
}
|
||||
function getDefaultHighWaterMark(objectMode) {
|
||||
return objectMode ? defaultHighWaterMarkObjectMode : defaultHighWaterMarkBytes
|
||||
}
|
||||
function setDefaultHighWaterMark(objectMode, value) {
|
||||
validateInteger(value, 'value', 0)
|
||||
if (objectMode) {
|
||||
defaultHighWaterMarkObjectMode = value
|
||||
} else {
|
||||
defaultHighWaterMarkBytes = value
|
||||
}
|
||||
}
|
||||
function getHighWaterMark(state, options, duplexKey, isDuplex) {
|
||||
var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
|
||||
const hwm = highWaterMarkFrom(options, isDuplex, duplexKey)
|
||||
if (hwm != null) {
|
||||
if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
|
||||
var name = isDuplex ? duplexKey : 'highWaterMark';
|
||||
throw new ERR_INVALID_OPT_VALUE(name, hwm);
|
||||
if (!NumberIsInteger(hwm) || hwm < 0) {
|
||||
const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark'
|
||||
throw new ERR_INVALID_ARG_VALUE(name, hwm)
|
||||
}
|
||||
return Math.floor(hwm);
|
||||
return MathFloor(hwm)
|
||||
}
|
||||
|
||||
// Default value
|
||||
return state.objectMode ? 16 : 16 * 1024;
|
||||
return getDefaultHighWaterMark(state.objectMode)
|
||||
}
|
||||
module.exports = {
|
||||
getHighWaterMark: getHighWaterMark
|
||||
};
|
||||
getHighWaterMark,
|
||||
getDefaultHighWaterMark,
|
||||
setDefaultHighWaterMark
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
module.exports = require('events').EventEmitter;
|
||||
@@ -1 +0,0 @@
|
||||
module.exports = require('stream');
|
||||
180
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/transform.js
generated
vendored
Normal file
180
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/transform.js
generated
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a transform stream is a readable/writable stream where you do
|
||||
// something with the data. Sometimes it's called a "filter",
|
||||
// but that's not a great name for it, since that implies a thing where
|
||||
// some bits pass through, and others are simply ignored. (That would
|
||||
// be a valid example of a transform, of course.)
|
||||
//
|
||||
// While the output is causally related to the input, it's not a
|
||||
// necessarily symmetric or synchronous transformation. For example,
|
||||
// a zlib stream might take multiple plain-text writes(), and then
|
||||
// emit a single compressed chunk some time in the future.
|
||||
//
|
||||
// Here's how this works:
|
||||
//
|
||||
// The Transform stream has all the aspects of the readable and writable
|
||||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||||
// internally, and returns false if there's a lot of pending writes
|
||||
// buffered up. When you call read(), that calls _read(n) until
|
||||
// there's enough pending readable data buffered up.
|
||||
//
|
||||
// In a transform stream, the written data is placed in a buffer. When
|
||||
// _read(n) is called, it transforms the queued up data, calling the
|
||||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||||
// written chunk would result in multiple output chunks, then the first
|
||||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||||
//
|
||||
// This way, back-pressure is actually determined by the reading side,
|
||||
// since _read has to be called to start processing a new chunk. However,
|
||||
// a pathological inflate type of transform can cause excessive buffering
|
||||
// here. For example, imagine a stream where every byte of input is
|
||||
// interpreted as an integer from 0-255, and then results in that many
|
||||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||||
// 1kb of data being output. In this case, you could write a very small
|
||||
// amount of input, and end up with a very large amount of output. In
|
||||
// such a pathological inflating mechanism, there'd be no way to tell
|
||||
// the system to stop doing the transform. A single 4MB write could
|
||||
// cause the system to run out of memory.
|
||||
//
|
||||
// However, even in such a pathological case, only a single written chunk
|
||||
// would be consumed, and then the rest would wait (un-transformed) until
|
||||
// the results of the previous transformed chunk were consumed.
|
||||
|
||||
'use strict'
|
||||
|
||||
const { ObjectSetPrototypeOf, Symbol } = require('../../ours/primordials')
|
||||
module.exports = Transform
|
||||
const { ERR_METHOD_NOT_IMPLEMENTED } = require('../../ours/errors').codes
|
||||
const Duplex = require('./duplex')
|
||||
const { getHighWaterMark } = require('./state')
|
||||
ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype)
|
||||
ObjectSetPrototypeOf(Transform, Duplex)
|
||||
const kCallback = Symbol('kCallback')
|
||||
function Transform(options) {
|
||||
if (!(this instanceof Transform)) return new Transform(options)
|
||||
|
||||
// TODO (ronag): This should preferably always be
|
||||
// applied but would be semver-major. Or even better;
|
||||
// make Transform a Readable with the Writable interface.
|
||||
const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null
|
||||
if (readableHighWaterMark === 0) {
|
||||
// A Duplex will buffer both on the writable and readable side while
|
||||
// a Transform just wants to buffer hwm number of elements. To avoid
|
||||
// buffering twice we disable buffering on the writable side.
|
||||
options = {
|
||||
...options,
|
||||
highWaterMark: null,
|
||||
readableHighWaterMark,
|
||||
// TODO (ronag): 0 is not optimal since we have
|
||||
// a "bug" where we check needDrain before calling _write and not after.
|
||||
// Refs: https://github.com/nodejs/node/pull/32887
|
||||
// Refs: https://github.com/nodejs/node/pull/35941
|
||||
writableHighWaterMark: options.writableHighWaterMark || 0
|
||||
}
|
||||
}
|
||||
Duplex.call(this, options)
|
||||
|
||||
// We have implemented the _read method, and done the other things
|
||||
// that Readable wants before the first _read call, so unset the
|
||||
// sync guard flag.
|
||||
this._readableState.sync = false
|
||||
this[kCallback] = null
|
||||
if (options) {
|
||||
if (typeof options.transform === 'function') this._transform = options.transform
|
||||
if (typeof options.flush === 'function') this._flush = options.flush
|
||||
}
|
||||
|
||||
// When the writable side finishes, then flush out anything remaining.
|
||||
// Backwards compat. Some Transform streams incorrectly implement _final
|
||||
// instead of or in addition to _flush. By using 'prefinish' instead of
|
||||
// implementing _final we continue supporting this unfortunate use case.
|
||||
this.on('prefinish', prefinish)
|
||||
}
|
||||
function final(cb) {
|
||||
if (typeof this._flush === 'function' && !this.destroyed) {
|
||||
this._flush((er, data) => {
|
||||
if (er) {
|
||||
if (cb) {
|
||||
cb(er)
|
||||
} else {
|
||||
this.destroy(er)
|
||||
}
|
||||
return
|
||||
}
|
||||
if (data != null) {
|
||||
this.push(data)
|
||||
}
|
||||
this.push(null)
|
||||
if (cb) {
|
||||
cb()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
this.push(null)
|
||||
if (cb) {
|
||||
cb()
|
||||
}
|
||||
}
|
||||
}
|
||||
function prefinish() {
|
||||
if (this._final !== final) {
|
||||
final.call(this)
|
||||
}
|
||||
}
|
||||
Transform.prototype._final = final
|
||||
Transform.prototype._transform = function (chunk, encoding, callback) {
|
||||
throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()')
|
||||
}
|
||||
Transform.prototype._write = function (chunk, encoding, callback) {
|
||||
const rState = this._readableState
|
||||
const wState = this._writableState
|
||||
const length = rState.length
|
||||
this._transform(chunk, encoding, (err, val) => {
|
||||
if (err) {
|
||||
callback(err)
|
||||
return
|
||||
}
|
||||
if (val != null) {
|
||||
this.push(val)
|
||||
}
|
||||
if (
|
||||
wState.ended ||
|
||||
// Backwards compat.
|
||||
length === rState.length ||
|
||||
// Backwards compat.
|
||||
rState.length < rState.highWaterMark
|
||||
) {
|
||||
callback()
|
||||
} else {
|
||||
this[kCallback] = callback
|
||||
}
|
||||
})
|
||||
}
|
||||
Transform.prototype._read = function () {
|
||||
if (this[kCallback]) {
|
||||
const callback = this[kCallback]
|
||||
this[kCallback] = null
|
||||
callback()
|
||||
}
|
||||
}
|
||||
327
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/utils.js
generated
vendored
Normal file
327
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/utils.js
generated
vendored
Normal file
@@ -0,0 +1,327 @@
|
||||
'use strict'
|
||||
|
||||
const { SymbolAsyncIterator, SymbolIterator, SymbolFor } = require('../../ours/primordials')
|
||||
|
||||
// We need to use SymbolFor to make these globally available
|
||||
// for interopt with readable-stream, i.e. readable-stream
|
||||
// and node core needs to be able to read/write private state
|
||||
// from each other for proper interoperability.
|
||||
const kIsDestroyed = SymbolFor('nodejs.stream.destroyed')
|
||||
const kIsErrored = SymbolFor('nodejs.stream.errored')
|
||||
const kIsReadable = SymbolFor('nodejs.stream.readable')
|
||||
const kIsWritable = SymbolFor('nodejs.stream.writable')
|
||||
const kIsDisturbed = SymbolFor('nodejs.stream.disturbed')
|
||||
const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise')
|
||||
const kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction')
|
||||
function isReadableNodeStream(obj, strict = false) {
|
||||
var _obj$_readableState
|
||||
return !!(
|
||||
(
|
||||
obj &&
|
||||
typeof obj.pipe === 'function' &&
|
||||
typeof obj.on === 'function' &&
|
||||
(!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) &&
|
||||
(!obj._writableState ||
|
||||
((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined
|
||||
? undefined
|
||||
: _obj$_readableState.readable) !== false) &&
|
||||
// Duplex
|
||||
(!obj._writableState || obj._readableState)
|
||||
) // Writable has .pipe.
|
||||
)
|
||||
}
|
||||
function isWritableNodeStream(obj) {
|
||||
var _obj$_writableState
|
||||
return !!(
|
||||
(
|
||||
obj &&
|
||||
typeof obj.write === 'function' &&
|
||||
typeof obj.on === 'function' &&
|
||||
(!obj._readableState ||
|
||||
((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined
|
||||
? undefined
|
||||
: _obj$_writableState.writable) !== false)
|
||||
) // Duplex
|
||||
)
|
||||
}
|
||||
function isDuplexNodeStream(obj) {
|
||||
return !!(
|
||||
obj &&
|
||||
typeof obj.pipe === 'function' &&
|
||||
obj._readableState &&
|
||||
typeof obj.on === 'function' &&
|
||||
typeof obj.write === 'function'
|
||||
)
|
||||
}
|
||||
function isNodeStream(obj) {
|
||||
return (
|
||||
obj &&
|
||||
(obj._readableState ||
|
||||
obj._writableState ||
|
||||
(typeof obj.write === 'function' && typeof obj.on === 'function') ||
|
||||
(typeof obj.pipe === 'function' && typeof obj.on === 'function'))
|
||||
)
|
||||
}
|
||||
function isReadableStream(obj) {
|
||||
return !!(
|
||||
obj &&
|
||||
!isNodeStream(obj) &&
|
||||
typeof obj.pipeThrough === 'function' &&
|
||||
typeof obj.getReader === 'function' &&
|
||||
typeof obj.cancel === 'function'
|
||||
)
|
||||
}
|
||||
function isWritableStream(obj) {
|
||||
return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function')
|
||||
}
|
||||
function isTransformStream(obj) {
|
||||
return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object')
|
||||
}
|
||||
function isWebStream(obj) {
|
||||
return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj)
|
||||
}
|
||||
function isIterable(obj, isAsync) {
|
||||
if (obj == null) return false
|
||||
if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'
|
||||
if (isAsync === false) return typeof obj[SymbolIterator] === 'function'
|
||||
return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function'
|
||||
}
|
||||
function isDestroyed(stream) {
|
||||
if (!isNodeStream(stream)) return null
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const state = wState || rState
|
||||
return !!(stream.destroyed || stream[kIsDestroyed] || (state !== null && state !== undefined && state.destroyed))
|
||||
}
|
||||
|
||||
// Have been end():d.
|
||||
function isWritableEnded(stream) {
|
||||
if (!isWritableNodeStream(stream)) return null
|
||||
if (stream.writableEnded === true) return true
|
||||
const wState = stream._writableState
|
||||
if (wState !== null && wState !== undefined && wState.errored) return false
|
||||
if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null
|
||||
return wState.ended
|
||||
}
|
||||
|
||||
// Have emitted 'finish'.
|
||||
function isWritableFinished(stream, strict) {
|
||||
if (!isWritableNodeStream(stream)) return null
|
||||
if (stream.writableFinished === true) return true
|
||||
const wState = stream._writableState
|
||||
if (wState !== null && wState !== undefined && wState.errored) return false
|
||||
if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null
|
||||
return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0))
|
||||
}
|
||||
|
||||
// Have been push(null):d.
|
||||
function isReadableEnded(stream) {
|
||||
if (!isReadableNodeStream(stream)) return null
|
||||
if (stream.readableEnded === true) return true
|
||||
const rState = stream._readableState
|
||||
if (!rState || rState.errored) return false
|
||||
if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null
|
||||
return rState.ended
|
||||
}
|
||||
|
||||
// Have emitted 'end'.
|
||||
function isReadableFinished(stream, strict) {
|
||||
if (!isReadableNodeStream(stream)) return null
|
||||
const rState = stream._readableState
|
||||
if (rState !== null && rState !== undefined && rState.errored) return false
|
||||
if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null
|
||||
return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0))
|
||||
}
|
||||
function isReadable(stream) {
|
||||
if (stream && stream[kIsReadable] != null) return stream[kIsReadable]
|
||||
if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null
|
||||
if (isDestroyed(stream)) return false
|
||||
return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream)
|
||||
}
|
||||
function isWritable(stream) {
|
||||
if (stream && stream[kIsWritable] != null) return stream[kIsWritable]
|
||||
if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null
|
||||
if (isDestroyed(stream)) return false
|
||||
return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream)
|
||||
}
|
||||
function isFinished(stream, opts) {
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (isDestroyed(stream)) {
|
||||
return true
|
||||
}
|
||||
if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) {
|
||||
return false
|
||||
}
|
||||
if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
function isWritableErrored(stream) {
|
||||
var _stream$_writableStat, _stream$_writableStat2
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (stream.writableErrored) {
|
||||
return stream.writableErrored
|
||||
}
|
||||
return (_stream$_writableStat =
|
||||
(_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined
|
||||
? _stream$_writableStat
|
||||
: null
|
||||
}
|
||||
function isReadableErrored(stream) {
|
||||
var _stream$_readableStat, _stream$_readableStat2
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (stream.readableErrored) {
|
||||
return stream.readableErrored
|
||||
}
|
||||
return (_stream$_readableStat =
|
||||
(_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined
|
||||
? _stream$_readableStat
|
||||
: null
|
||||
}
|
||||
function isClosed(stream) {
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (typeof stream.closed === 'boolean') {
|
||||
return stream.closed
|
||||
}
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
if (
|
||||
typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' ||
|
||||
typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean'
|
||||
) {
|
||||
return (
|
||||
(wState === null || wState === undefined ? undefined : wState.closed) ||
|
||||
(rState === null || rState === undefined ? undefined : rState.closed)
|
||||
)
|
||||
}
|
||||
if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) {
|
||||
return stream._closed
|
||||
}
|
||||
return null
|
||||
}
|
||||
function isOutgoingMessage(stream) {
|
||||
return (
|
||||
typeof stream._closed === 'boolean' &&
|
||||
typeof stream._defaultKeepAlive === 'boolean' &&
|
||||
typeof stream._removedConnection === 'boolean' &&
|
||||
typeof stream._removedContLen === 'boolean'
|
||||
)
|
||||
}
|
||||
function isServerResponse(stream) {
|
||||
return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream)
|
||||
}
|
||||
function isServerRequest(stream) {
|
||||
var _stream$req
|
||||
return (
|
||||
typeof stream._consuming === 'boolean' &&
|
||||
typeof stream._dumped === 'boolean' &&
|
||||
((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) ===
|
||||
undefined
|
||||
)
|
||||
}
|
||||
function willEmitClose(stream) {
|
||||
if (!isNodeStream(stream)) return null
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const state = wState || rState
|
||||
return (
|
||||
(!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false)
|
||||
)
|
||||
}
|
||||
function isDisturbed(stream) {
|
||||
var _stream$kIsDisturbed
|
||||
return !!(
|
||||
stream &&
|
||||
((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined
|
||||
? _stream$kIsDisturbed
|
||||
: stream.readableDidRead || stream.readableAborted)
|
||||
)
|
||||
}
|
||||
function isErrored(stream) {
|
||||
var _ref,
|
||||
_ref2,
|
||||
_ref3,
|
||||
_ref4,
|
||||
_ref5,
|
||||
_stream$kIsErrored,
|
||||
_stream$_readableStat3,
|
||||
_stream$_writableStat3,
|
||||
_stream$_readableStat4,
|
||||
_stream$_writableStat4
|
||||
return !!(
|
||||
stream &&
|
||||
((_ref =
|
||||
(_ref2 =
|
||||
(_ref3 =
|
||||
(_ref4 =
|
||||
(_ref5 =
|
||||
(_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined
|
||||
? _stream$kIsErrored
|
||||
: stream.readableErrored) !== null && _ref5 !== undefined
|
||||
? _ref5
|
||||
: stream.writableErrored) !== null && _ref4 !== undefined
|
||||
? _ref4
|
||||
: (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined
|
||||
? _ref3
|
||||
: (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined
|
||||
? _ref2
|
||||
: (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat4.errored) !== null && _ref !== undefined
|
||||
? _ref
|
||||
: (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat4.errored)
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
isDestroyed,
|
||||
kIsDestroyed,
|
||||
isDisturbed,
|
||||
kIsDisturbed,
|
||||
isErrored,
|
||||
kIsErrored,
|
||||
isReadable,
|
||||
kIsReadable,
|
||||
kIsClosedPromise,
|
||||
kControllerErrorFunction,
|
||||
kIsWritable,
|
||||
isClosed,
|
||||
isDuplexNodeStream,
|
||||
isFinished,
|
||||
isIterable,
|
||||
isReadableNodeStream,
|
||||
isReadableStream,
|
||||
isReadableEnded,
|
||||
isReadableFinished,
|
||||
isReadableErrored,
|
||||
isNodeStream,
|
||||
isWebStream,
|
||||
isWritable,
|
||||
isWritableNodeStream,
|
||||
isWritableStream,
|
||||
isWritableEnded,
|
||||
isWritableFinished,
|
||||
isWritableErrored,
|
||||
isServerRequest,
|
||||
isServerResponse,
|
||||
willEmitClose,
|
||||
isTransformStream
|
||||
}
|
||||
819
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/writable.js
generated
vendored
Normal file
819
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/streams/writable.js
generated
vendored
Normal file
@@ -0,0 +1,819 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// A bit simpler than readable streams.
|
||||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||||
// the drain event emission and buffering.
|
||||
|
||||
'use strict'
|
||||
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const {
|
||||
ArrayPrototypeSlice,
|
||||
Error,
|
||||
FunctionPrototypeSymbolHasInstance,
|
||||
ObjectDefineProperty,
|
||||
ObjectDefineProperties,
|
||||
ObjectSetPrototypeOf,
|
||||
StringPrototypeToLowerCase,
|
||||
Symbol,
|
||||
SymbolHasInstance
|
||||
} = require('../../ours/primordials')
|
||||
module.exports = Writable
|
||||
Writable.WritableState = WritableState
|
||||
const { EventEmitter: EE } = require('events')
|
||||
const Stream = require('./legacy').Stream
|
||||
const { Buffer } = require('buffer')
|
||||
const destroyImpl = require('./destroy')
|
||||
const { addAbortSignal } = require('./add-abort-signal')
|
||||
const { getHighWaterMark, getDefaultHighWaterMark } = require('./state')
|
||||
const {
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
ERR_METHOD_NOT_IMPLEMENTED,
|
||||
ERR_MULTIPLE_CALLBACK,
|
||||
ERR_STREAM_CANNOT_PIPE,
|
||||
ERR_STREAM_DESTROYED,
|
||||
ERR_STREAM_ALREADY_FINISHED,
|
||||
ERR_STREAM_NULL_VALUES,
|
||||
ERR_STREAM_WRITE_AFTER_END,
|
||||
ERR_UNKNOWN_ENCODING
|
||||
} = require('../../ours/errors').codes
|
||||
const { errorOrDestroy } = destroyImpl
|
||||
ObjectSetPrototypeOf(Writable.prototype, Stream.prototype)
|
||||
ObjectSetPrototypeOf(Writable, Stream)
|
||||
function nop() {}
|
||||
const kOnFinished = Symbol('kOnFinished')
|
||||
function WritableState(options, stream, isDuplex) {
|
||||
// Duplex streams are both readable and writable, but share
|
||||
// the same options object.
|
||||
// However, some cases require setting options to different
|
||||
// values for the readable and the writable sides of the duplex stream,
|
||||
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
||||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex')
|
||||
|
||||
// Object stream flag to indicate whether or not this stream
|
||||
// contains buffers or objects.
|
||||
this.objectMode = !!(options && options.objectMode)
|
||||
if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode)
|
||||
|
||||
// The point at which write() starts returning false
|
||||
// Note: 0 is a valid value, means that we always return false if
|
||||
// the entire buffer is not flushed immediately on write().
|
||||
this.highWaterMark = options
|
||||
? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex)
|
||||
: getDefaultHighWaterMark(false)
|
||||
|
||||
// if _final has been called.
|
||||
this.finalCalled = false
|
||||
|
||||
// drain event flag.
|
||||
this.needDrain = false
|
||||
// At the start of calling end()
|
||||
this.ending = false
|
||||
// When end() has been called, and returned.
|
||||
this.ended = false
|
||||
// When 'finish' is emitted.
|
||||
this.finished = false
|
||||
|
||||
// Has it been destroyed
|
||||
this.destroyed = false
|
||||
|
||||
// Should we decode strings into buffers before passing to _write?
|
||||
// this is here so that some node-core streams can optimize string
|
||||
// handling at a lower level.
|
||||
const noDecode = !!(options && options.decodeStrings === false)
|
||||
this.decodeStrings = !noDecode
|
||||
|
||||
// Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'
|
||||
|
||||
// Not an actual buffer we keep track of, but a measurement
|
||||
// of how much we're waiting to get pushed to some underlying
|
||||
// socket or file.
|
||||
this.length = 0
|
||||
|
||||
// A flag to see when we're in the middle of a write.
|
||||
this.writing = false
|
||||
|
||||
// When true all writes will be buffered until .uncork() call.
|
||||
this.corked = 0
|
||||
|
||||
// A flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
this.sync = true
|
||||
|
||||
// A flag to know if we're processing previously buffered items, which
|
||||
// may call the _write() callback in the same tick, so that we don't
|
||||
// end up in an overlapped onwrite situation.
|
||||
this.bufferProcessing = false
|
||||
|
||||
// The callback that's passed to _write(chunk, cb).
|
||||
this.onwrite = onwrite.bind(undefined, stream)
|
||||
|
||||
// The callback that the user supplies to write(chunk, encoding, cb).
|
||||
this.writecb = null
|
||||
|
||||
// The amount that is being written when _write is called.
|
||||
this.writelen = 0
|
||||
|
||||
// Storage for data passed to the afterWrite() callback in case of
|
||||
// synchronous _write() completion.
|
||||
this.afterWriteTickInfo = null
|
||||
resetBuffer(this)
|
||||
|
||||
// Number of pending user-supplied write callbacks
|
||||
// this must be 0 before 'finish' can be emitted.
|
||||
this.pendingcb = 0
|
||||
|
||||
// Stream is still being constructed and cannot be
|
||||
// destroyed until construction finished or failed.
|
||||
// Async construction is opt in, therefore we start as
|
||||
// constructed.
|
||||
this.constructed = true
|
||||
|
||||
// Emit prefinish if the only thing we're waiting for is _write cbs
|
||||
// This is relevant for synchronous Transform streams.
|
||||
this.prefinished = false
|
||||
|
||||
// True if the error was already emitted and should not be thrown again.
|
||||
this.errorEmitted = false
|
||||
|
||||
// Should close be emitted on destroy. Defaults to true.
|
||||
this.emitClose = !options || options.emitClose !== false
|
||||
|
||||
// Should .destroy() be called after 'finish' (and potentially 'end').
|
||||
this.autoDestroy = !options || options.autoDestroy !== false
|
||||
|
||||
// Indicates whether the stream has errored. When true all write() calls
|
||||
// should return false. This is needed since when autoDestroy
|
||||
// is disabled we need a way to tell whether the stream has failed.
|
||||
this.errored = null
|
||||
|
||||
// Indicates whether the stream has finished destroying.
|
||||
this.closed = false
|
||||
|
||||
// True if close has been emitted or would have been emitted
|
||||
// depending on emitClose.
|
||||
this.closeEmitted = false
|
||||
this[kOnFinished] = []
|
||||
}
|
||||
function resetBuffer(state) {
|
||||
state.buffered = []
|
||||
state.bufferedIndex = 0
|
||||
state.allBuffers = true
|
||||
state.allNoop = true
|
||||
}
|
||||
WritableState.prototype.getBuffer = function getBuffer() {
|
||||
return ArrayPrototypeSlice(this.buffered, this.bufferedIndex)
|
||||
}
|
||||
ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this.buffered.length - this.bufferedIndex
|
||||
}
|
||||
})
|
||||
function Writable(options) {
|
||||
// Writable ctor is applied to Duplexes, too.
|
||||
// `realHasInstance` is necessary because using plain `instanceof`
|
||||
// would return false, as no `_writableState` property is attached.
|
||||
|
||||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||||
// `_writableState` that would lead to infinite recursion.
|
||||
|
||||
// Checking for a Stream.Duplex instance is faster here instead of inside
|
||||
// the WritableState constructor, at least with V8 6.5.
|
||||
const isDuplex = this instanceof require('./duplex')
|
||||
if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options)
|
||||
this._writableState = new WritableState(options, this, isDuplex)
|
||||
if (options) {
|
||||
if (typeof options.write === 'function') this._write = options.write
|
||||
if (typeof options.writev === 'function') this._writev = options.writev
|
||||
if (typeof options.destroy === 'function') this._destroy = options.destroy
|
||||
if (typeof options.final === 'function') this._final = options.final
|
||||
if (typeof options.construct === 'function') this._construct = options.construct
|
||||
if (options.signal) addAbortSignal(options.signal, this)
|
||||
}
|
||||
Stream.call(this, options)
|
||||
destroyImpl.construct(this, () => {
|
||||
const state = this._writableState
|
||||
if (!state.writing) {
|
||||
clearBuffer(this, state)
|
||||
}
|
||||
finishMaybe(this, state)
|
||||
})
|
||||
}
|
||||
ObjectDefineProperty(Writable, SymbolHasInstance, {
|
||||
__proto__: null,
|
||||
value: function (object) {
|
||||
if (FunctionPrototypeSymbolHasInstance(this, object)) return true
|
||||
if (this !== Writable) return false
|
||||
return object && object._writableState instanceof WritableState
|
||||
}
|
||||
})
|
||||
|
||||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||||
Writable.prototype.pipe = function () {
|
||||
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE())
|
||||
}
|
||||
function _write(stream, chunk, encoding, cb) {
|
||||
const state = stream._writableState
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
encoding = state.defaultEncoding
|
||||
} else {
|
||||
if (!encoding) encoding = state.defaultEncoding
|
||||
else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
|
||||
if (typeof cb !== 'function') cb = nop
|
||||
}
|
||||
if (chunk === null) {
|
||||
throw new ERR_STREAM_NULL_VALUES()
|
||||
} else if (!state.objectMode) {
|
||||
if (typeof chunk === 'string') {
|
||||
if (state.decodeStrings !== false) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
encoding = 'buffer'
|
||||
}
|
||||
} else if (chunk instanceof Buffer) {
|
||||
encoding = 'buffer'
|
||||
} else if (Stream._isUint8Array(chunk)) {
|
||||
chunk = Stream._uint8ArrayToBuffer(chunk)
|
||||
encoding = 'buffer'
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)
|
||||
}
|
||||
}
|
||||
let err
|
||||
if (state.ending) {
|
||||
err = new ERR_STREAM_WRITE_AFTER_END()
|
||||
} else if (state.destroyed) {
|
||||
err = new ERR_STREAM_DESTROYED('write')
|
||||
}
|
||||
if (err) {
|
||||
process.nextTick(cb, err)
|
||||
errorOrDestroy(stream, err, true)
|
||||
return err
|
||||
}
|
||||
state.pendingcb++
|
||||
return writeOrBuffer(stream, state, chunk, encoding, cb)
|
||||
}
|
||||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||||
return _write(this, chunk, encoding, cb) === true
|
||||
}
|
||||
Writable.prototype.cork = function () {
|
||||
this._writableState.corked++
|
||||
}
|
||||
Writable.prototype.uncork = function () {
|
||||
const state = this._writableState
|
||||
if (state.corked) {
|
||||
state.corked--
|
||||
if (!state.writing) clearBuffer(this, state)
|
||||
}
|
||||
}
|
||||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||||
// node::ParseEncoding() requires lower case.
|
||||
if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding)
|
||||
if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
|
||||
this._writableState.defaultEncoding = encoding
|
||||
return this
|
||||
}
|
||||
|
||||
// If we're already writing something, then just put this
|
||||
// in the queue, and wait our turn. Otherwise, call _write
|
||||
// If we return false, then we need a drain event, so set that flag.
|
||||
function writeOrBuffer(stream, state, chunk, encoding, callback) {
|
||||
const len = state.objectMode ? 1 : chunk.length
|
||||
state.length += len
|
||||
|
||||
// stream._write resets state.length
|
||||
const ret = state.length < state.highWaterMark
|
||||
// We must ensure that previous needDrain will not be reset to false.
|
||||
if (!ret) state.needDrain = true
|
||||
if (state.writing || state.corked || state.errored || !state.constructed) {
|
||||
state.buffered.push({
|
||||
chunk,
|
||||
encoding,
|
||||
callback
|
||||
})
|
||||
if (state.allBuffers && encoding !== 'buffer') {
|
||||
state.allBuffers = false
|
||||
}
|
||||
if (state.allNoop && callback !== nop) {
|
||||
state.allNoop = false
|
||||
}
|
||||
} else {
|
||||
state.writelen = len
|
||||
state.writecb = callback
|
||||
state.writing = true
|
||||
state.sync = true
|
||||
stream._write(chunk, encoding, state.onwrite)
|
||||
state.sync = false
|
||||
}
|
||||
|
||||
// Return false if errored or destroyed in order to break
|
||||
// any synchronous while(stream.write(data)) loops.
|
||||
return ret && !state.errored && !state.destroyed
|
||||
}
|
||||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||||
state.writelen = len
|
||||
state.writecb = cb
|
||||
state.writing = true
|
||||
state.sync = true
|
||||
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'))
|
||||
else if (writev) stream._writev(chunk, state.onwrite)
|
||||
else stream._write(chunk, encoding, state.onwrite)
|
||||
state.sync = false
|
||||
}
|
||||
function onwriteError(stream, state, er, cb) {
|
||||
--state.pendingcb
|
||||
cb(er)
|
||||
// Ensure callbacks are invoked even when autoDestroy is
|
||||
// not enabled. Passing `er` here doesn't make sense since
|
||||
// it's related to one specific write, not to the buffered
|
||||
// writes.
|
||||
errorBuffer(state)
|
||||
// This can emit error, but error must always follow cb.
|
||||
errorOrDestroy(stream, er)
|
||||
}
|
||||
function onwrite(stream, er) {
|
||||
const state = stream._writableState
|
||||
const sync = state.sync
|
||||
const cb = state.writecb
|
||||
if (typeof cb !== 'function') {
|
||||
errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
state.writing = false
|
||||
state.writecb = null
|
||||
state.length -= state.writelen
|
||||
state.writelen = 0
|
||||
if (er) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
er.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (!state.errored) {
|
||||
state.errored = er
|
||||
}
|
||||
|
||||
// In case of duplex streams we need to notify the readable side of the
|
||||
// error.
|
||||
if (stream._readableState && !stream._readableState.errored) {
|
||||
stream._readableState.errored = er
|
||||
}
|
||||
if (sync) {
|
||||
process.nextTick(onwriteError, stream, state, er, cb)
|
||||
} else {
|
||||
onwriteError(stream, state, er, cb)
|
||||
}
|
||||
} else {
|
||||
if (state.buffered.length > state.bufferedIndex) {
|
||||
clearBuffer(stream, state)
|
||||
}
|
||||
if (sync) {
|
||||
// It is a common case that the callback passed to .write() is always
|
||||
// the same. In that case, we do not schedule a new nextTick(), but
|
||||
// rather just increase a counter, to improve performance and avoid
|
||||
// memory allocations.
|
||||
if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) {
|
||||
state.afterWriteTickInfo.count++
|
||||
} else {
|
||||
state.afterWriteTickInfo = {
|
||||
count: 1,
|
||||
cb,
|
||||
stream,
|
||||
state
|
||||
}
|
||||
process.nextTick(afterWriteTick, state.afterWriteTickInfo)
|
||||
}
|
||||
} else {
|
||||
afterWrite(stream, state, 1, cb)
|
||||
}
|
||||
}
|
||||
}
|
||||
function afterWriteTick({ stream, state, count, cb }) {
|
||||
state.afterWriteTickInfo = null
|
||||
return afterWrite(stream, state, count, cb)
|
||||
}
|
||||
function afterWrite(stream, state, count, cb) {
|
||||
const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain
|
||||
if (needDrain) {
|
||||
state.needDrain = false
|
||||
stream.emit('drain')
|
||||
}
|
||||
while (count-- > 0) {
|
||||
state.pendingcb--
|
||||
cb()
|
||||
}
|
||||
if (state.destroyed) {
|
||||
errorBuffer(state)
|
||||
}
|
||||
finishMaybe(stream, state)
|
||||
}
|
||||
|
||||
// If there's something in the buffer waiting, then invoke callbacks.
|
||||
function errorBuffer(state) {
|
||||
if (state.writing) {
|
||||
return
|
||||
}
|
||||
for (let n = state.bufferedIndex; n < state.buffered.length; ++n) {
|
||||
var _state$errored
|
||||
const { chunk, callback } = state.buffered[n]
|
||||
const len = state.objectMode ? 1 : chunk.length
|
||||
state.length -= len
|
||||
callback(
|
||||
(_state$errored = state.errored) !== null && _state$errored !== undefined
|
||||
? _state$errored
|
||||
: new ERR_STREAM_DESTROYED('write')
|
||||
)
|
||||
}
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
var _state$errored2
|
||||
onfinishCallbacks[i](
|
||||
(_state$errored2 = state.errored) !== null && _state$errored2 !== undefined
|
||||
? _state$errored2
|
||||
: new ERR_STREAM_DESTROYED('end')
|
||||
)
|
||||
}
|
||||
resetBuffer(state)
|
||||
}
|
||||
|
||||
// If there's something in the buffer waiting, then process it.
|
||||
function clearBuffer(stream, state) {
|
||||
if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) {
|
||||
return
|
||||
}
|
||||
const { buffered, bufferedIndex, objectMode } = state
|
||||
const bufferedLength = buffered.length - bufferedIndex
|
||||
if (!bufferedLength) {
|
||||
return
|
||||
}
|
||||
let i = bufferedIndex
|
||||
state.bufferProcessing = true
|
||||
if (bufferedLength > 1 && stream._writev) {
|
||||
state.pendingcb -= bufferedLength - 1
|
||||
const callback = state.allNoop
|
||||
? nop
|
||||
: (err) => {
|
||||
for (let n = i; n < buffered.length; ++n) {
|
||||
buffered[n].callback(err)
|
||||
}
|
||||
}
|
||||
// Make a copy of `buffered` if it's going to be used by `callback` above,
|
||||
// since `doWrite` will mutate the array.
|
||||
const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i)
|
||||
chunks.allBuffers = state.allBuffers
|
||||
doWrite(stream, state, true, state.length, chunks, '', callback)
|
||||
resetBuffer(state)
|
||||
} else {
|
||||
do {
|
||||
const { chunk, encoding, callback } = buffered[i]
|
||||
buffered[i++] = null
|
||||
const len = objectMode ? 1 : chunk.length
|
||||
doWrite(stream, state, false, len, chunk, encoding, callback)
|
||||
} while (i < buffered.length && !state.writing)
|
||||
if (i === buffered.length) {
|
||||
resetBuffer(state)
|
||||
} else if (i > 256) {
|
||||
buffered.splice(0, i)
|
||||
state.bufferedIndex = 0
|
||||
} else {
|
||||
state.bufferedIndex = i
|
||||
}
|
||||
}
|
||||
state.bufferProcessing = false
|
||||
}
|
||||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||||
if (this._writev) {
|
||||
this._writev(
|
||||
[
|
||||
{
|
||||
chunk,
|
||||
encoding
|
||||
}
|
||||
],
|
||||
cb
|
||||
)
|
||||
} else {
|
||||
throw new ERR_METHOD_NOT_IMPLEMENTED('_write()')
|
||||
}
|
||||
}
|
||||
Writable.prototype._writev = null
|
||||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||||
const state = this._writableState
|
||||
if (typeof chunk === 'function') {
|
||||
cb = chunk
|
||||
chunk = null
|
||||
encoding = null
|
||||
} else if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
encoding = null
|
||||
}
|
||||
let err
|
||||
if (chunk !== null && chunk !== undefined) {
|
||||
const ret = _write(this, chunk, encoding)
|
||||
if (ret instanceof Error) {
|
||||
err = ret
|
||||
}
|
||||
}
|
||||
|
||||
// .end() fully uncorks.
|
||||
if (state.corked) {
|
||||
state.corked = 1
|
||||
this.uncork()
|
||||
}
|
||||
if (err) {
|
||||
// Do nothing...
|
||||
} else if (!state.errored && !state.ending) {
|
||||
// This is forgiving in terms of unnecessary calls to end() and can hide
|
||||
// logic errors. However, usually such errors are harmless and causing a
|
||||
// hard error can be disproportionately destructive. It is not always
|
||||
// trivial for the user to determine whether end() needs to be called
|
||||
// or not.
|
||||
|
||||
state.ending = true
|
||||
finishMaybe(this, state, true)
|
||||
state.ended = true
|
||||
} else if (state.finished) {
|
||||
err = new ERR_STREAM_ALREADY_FINISHED('end')
|
||||
} else if (state.destroyed) {
|
||||
err = new ERR_STREAM_DESTROYED('end')
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
if (err || state.finished) {
|
||||
process.nextTick(cb, err)
|
||||
} else {
|
||||
state[kOnFinished].push(cb)
|
||||
}
|
||||
}
|
||||
return this
|
||||
}
|
||||
function needFinish(state) {
|
||||
return (
|
||||
state.ending &&
|
||||
!state.destroyed &&
|
||||
state.constructed &&
|
||||
state.length === 0 &&
|
||||
!state.errored &&
|
||||
state.buffered.length === 0 &&
|
||||
!state.finished &&
|
||||
!state.writing &&
|
||||
!state.errorEmitted &&
|
||||
!state.closeEmitted
|
||||
)
|
||||
}
|
||||
function callFinal(stream, state) {
|
||||
let called = false
|
||||
function onFinish(err) {
|
||||
if (called) {
|
||||
errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
called = true
|
||||
state.pendingcb--
|
||||
if (err) {
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
onfinishCallbacks[i](err)
|
||||
}
|
||||
errorOrDestroy(stream, err, state.sync)
|
||||
} else if (needFinish(state)) {
|
||||
state.prefinished = true
|
||||
stream.emit('prefinish')
|
||||
// Backwards compat. Don't check state.sync here.
|
||||
// Some streams assume 'finish' will be emitted
|
||||
// asynchronously relative to _final callback.
|
||||
state.pendingcb++
|
||||
process.nextTick(finish, stream, state)
|
||||
}
|
||||
}
|
||||
state.sync = true
|
||||
state.pendingcb++
|
||||
try {
|
||||
stream._final(onFinish)
|
||||
} catch (err) {
|
||||
onFinish(err)
|
||||
}
|
||||
state.sync = false
|
||||
}
|
||||
function prefinish(stream, state) {
|
||||
if (!state.prefinished && !state.finalCalled) {
|
||||
if (typeof stream._final === 'function' && !state.destroyed) {
|
||||
state.finalCalled = true
|
||||
callFinal(stream, state)
|
||||
} else {
|
||||
state.prefinished = true
|
||||
stream.emit('prefinish')
|
||||
}
|
||||
}
|
||||
}
|
||||
function finishMaybe(stream, state, sync) {
|
||||
if (needFinish(state)) {
|
||||
prefinish(stream, state)
|
||||
if (state.pendingcb === 0) {
|
||||
if (sync) {
|
||||
state.pendingcb++
|
||||
process.nextTick(
|
||||
(stream, state) => {
|
||||
if (needFinish(state)) {
|
||||
finish(stream, state)
|
||||
} else {
|
||||
state.pendingcb--
|
||||
}
|
||||
},
|
||||
stream,
|
||||
state
|
||||
)
|
||||
} else if (needFinish(state)) {
|
||||
state.pendingcb++
|
||||
finish(stream, state)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function finish(stream, state) {
|
||||
state.pendingcb--
|
||||
state.finished = true
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
onfinishCallbacks[i]()
|
||||
}
|
||||
stream.emit('finish')
|
||||
if (state.autoDestroy) {
|
||||
// In case of duplex streams we need a way to detect
|
||||
// if the readable side is ready for autoDestroy as well.
|
||||
const rState = stream._readableState
|
||||
const autoDestroy =
|
||||
!rState ||
|
||||
(rState.autoDestroy &&
|
||||
// We don't expect the readable to ever 'end'
|
||||
// if readable is explicitly set to false.
|
||||
(rState.endEmitted || rState.readable === false))
|
||||
if (autoDestroy) {
|
||||
stream.destroy()
|
||||
}
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(Writable.prototype, {
|
||||
closed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.closed : false
|
||||
}
|
||||
},
|
||||
destroyed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.destroyed : false
|
||||
},
|
||||
set(value) {
|
||||
// Backward compatibility, the user is explicitly managing destroyed.
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = value
|
||||
}
|
||||
}
|
||||
},
|
||||
writable: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
const w = this._writableState
|
||||
// w.writable === false means that this is part of a Duplex stream
|
||||
// where the writable side was disabled upon construction.
|
||||
// Compat. The user might manually disable writable side through
|
||||
// deprecated setter.
|
||||
return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended
|
||||
},
|
||||
set(val) {
|
||||
// Backwards compatible.
|
||||
if (this._writableState) {
|
||||
this._writableState.writable = !!val
|
||||
}
|
||||
}
|
||||
},
|
||||
writableFinished: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.finished : false
|
||||
}
|
||||
},
|
||||
writableObjectMode: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.objectMode : false
|
||||
}
|
||||
},
|
||||
writableBuffer: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.getBuffer()
|
||||
}
|
||||
},
|
||||
writableEnded: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.ending : false
|
||||
}
|
||||
},
|
||||
writableNeedDrain: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
const wState = this._writableState
|
||||
if (!wState) return false
|
||||
return !wState.destroyed && !wState.ending && wState.needDrain
|
||||
}
|
||||
},
|
||||
writableHighWaterMark: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.highWaterMark
|
||||
}
|
||||
},
|
||||
writableCorked: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.corked : 0
|
||||
}
|
||||
},
|
||||
writableLength: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.length
|
||||
}
|
||||
},
|
||||
errored: {
|
||||
__proto__: null,
|
||||
enumerable: false,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.errored : null
|
||||
}
|
||||
},
|
||||
writableAborted: {
|
||||
__proto__: null,
|
||||
enumerable: false,
|
||||
get: function () {
|
||||
return !!(
|
||||
this._writableState.writable !== false &&
|
||||
(this._writableState.destroyed || this._writableState.errored) &&
|
||||
!this._writableState.finished
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
const destroy = destroyImpl.destroy
|
||||
Writable.prototype.destroy = function (err, cb) {
|
||||
const state = this._writableState
|
||||
|
||||
// Invoke pending callbacks.
|
||||
if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) {
|
||||
process.nextTick(errorBuffer, state)
|
||||
}
|
||||
destroy.call(this, err, cb)
|
||||
return this
|
||||
}
|
||||
Writable.prototype._undestroy = destroyImpl.undestroy
|
||||
Writable.prototype._destroy = function (err, cb) {
|
||||
cb(err)
|
||||
}
|
||||
Writable.prototype[EE.captureRejectionSymbol] = function (err) {
|
||||
this.destroy(err)
|
||||
}
|
||||
let webStreamsAdapters
|
||||
|
||||
// Lazy to avoid circular references
|
||||
function lazyWebStreams() {
|
||||
if (webStreamsAdapters === undefined) webStreamsAdapters = {}
|
||||
return webStreamsAdapters
|
||||
}
|
||||
Writable.fromWeb = function (writableStream, options) {
|
||||
return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options)
|
||||
}
|
||||
Writable.toWeb = function (streamWritable) {
|
||||
return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable)
|
||||
}
|
||||
530
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/validators.js
generated
vendored
Normal file
530
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/internal/validators.js
generated
vendored
Normal file
@@ -0,0 +1,530 @@
|
||||
/* eslint jsdoc/require-jsdoc: "error" */
|
||||
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
ArrayIsArray,
|
||||
ArrayPrototypeIncludes,
|
||||
ArrayPrototypeJoin,
|
||||
ArrayPrototypeMap,
|
||||
NumberIsInteger,
|
||||
NumberIsNaN,
|
||||
NumberMAX_SAFE_INTEGER,
|
||||
NumberMIN_SAFE_INTEGER,
|
||||
NumberParseInt,
|
||||
ObjectPrototypeHasOwnProperty,
|
||||
RegExpPrototypeExec,
|
||||
String,
|
||||
StringPrototypeToUpperCase,
|
||||
StringPrototypeTrim
|
||||
} = require('../ours/primordials')
|
||||
const {
|
||||
hideStackFrames,
|
||||
codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL }
|
||||
} = require('../ours/errors')
|
||||
const { normalizeEncoding } = require('../ours/util')
|
||||
const { isAsyncFunction, isArrayBufferView } = require('../ours/util').types
|
||||
const signals = {}
|
||||
|
||||
/**
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isInt32(value) {
|
||||
return value === (value | 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isUint32(value) {
|
||||
return value === value >>> 0
|
||||
}
|
||||
const octalReg = /^[0-7]+$/
|
||||
const modeDesc = 'must be a 32-bit unsigned integer or an octal string'
|
||||
|
||||
/**
|
||||
* Parse and validate values that will be converted into mode_t (the S_*
|
||||
* constants). Only valid numbers and octal strings are allowed. They could be
|
||||
* converted to 32-bit unsigned integers or non-negative signed integers in the
|
||||
* C++ land, but any value higher than 0o777 will result in platform-specific
|
||||
* behaviors.
|
||||
* @param {*} value Values to be validated
|
||||
* @param {string} name Name of the argument
|
||||
* @param {number} [def] If specified, will be returned for invalid values
|
||||
* @returns {number}
|
||||
*/
|
||||
function parseFileMode(value, name, def) {
|
||||
if (typeof value === 'undefined') {
|
||||
value = def
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
if (RegExpPrototypeExec(octalReg, value) === null) {
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc)
|
||||
}
|
||||
value = NumberParseInt(value, 8)
|
||||
}
|
||||
validateUint32(value, name)
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateInteger
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateInteger} */
|
||||
const validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => {
|
||||
if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateInt32
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateInt32} */
|
||||
const validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => {
|
||||
// The defaults for min and max correspond to the limits of 32-bit integers.
|
||||
if (typeof value !== 'number') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
}
|
||||
if (!NumberIsInteger(value)) {
|
||||
throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
}
|
||||
if (value < min || value > max) {
|
||||
throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateUint32
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number|boolean} [positive=false]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateUint32} */
|
||||
const validateUint32 = hideStackFrames((value, name, positive = false) => {
|
||||
if (typeof value !== 'number') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
}
|
||||
if (!NumberIsInteger(value)) {
|
||||
throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
}
|
||||
const min = positive ? 1 : 0
|
||||
// 2 ** 32 === 4294967296
|
||||
const max = 4294967295
|
||||
if (value < min || value > max) {
|
||||
throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateString
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is string}
|
||||
*/
|
||||
|
||||
/** @type {validateString} */
|
||||
function validateString(value, name) {
|
||||
if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateNumber
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateNumber} */
|
||||
function validateNumber(value, name, min = undefined, max) {
|
||||
if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
if (
|
||||
(min != null && value < min) ||
|
||||
(max != null && value > max) ||
|
||||
((min != null || max != null) && NumberIsNaN(value))
|
||||
) {
|
||||
throw new ERR_OUT_OF_RANGE(
|
||||
name,
|
||||
`${min != null ? `>= ${min}` : ''}${min != null && max != null ? ' && ' : ''}${max != null ? `<= ${max}` : ''}`,
|
||||
value
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateOneOf
|
||||
* @template T
|
||||
* @param {T} value
|
||||
* @param {string} name
|
||||
* @param {T[]} oneOf
|
||||
*/
|
||||
|
||||
/** @type {validateOneOf} */
|
||||
const validateOneOf = hideStackFrames((value, name, oneOf) => {
|
||||
if (!ArrayPrototypeIncludes(oneOf, value)) {
|
||||
const allowed = ArrayPrototypeJoin(
|
||||
ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))),
|
||||
', '
|
||||
)
|
||||
const reason = 'must be one of: ' + allowed
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, reason)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateBoolean
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is boolean}
|
||||
*/
|
||||
|
||||
/** @type {validateBoolean} */
|
||||
function validateBoolean(value, name) {
|
||||
if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} options
|
||||
* @param {string} key
|
||||
* @param {boolean} defaultValue
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function getOwnPropertyValueOrDefault(options, key, defaultValue) {
|
||||
return options == null || !ObjectPrototypeHasOwnProperty(options, key) ? defaultValue : options[key]
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateObject
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {{
|
||||
* allowArray?: boolean,
|
||||
* allowFunction?: boolean,
|
||||
* nullable?: boolean
|
||||
* }} [options]
|
||||
*/
|
||||
|
||||
/** @type {validateObject} */
|
||||
const validateObject = hideStackFrames((value, name, options = null) => {
|
||||
const allowArray = getOwnPropertyValueOrDefault(options, 'allowArray', false)
|
||||
const allowFunction = getOwnPropertyValueOrDefault(options, 'allowFunction', false)
|
||||
const nullable = getOwnPropertyValueOrDefault(options, 'nullable', false)
|
||||
if (
|
||||
(!nullable && value === null) ||
|
||||
(!allowArray && ArrayIsArray(value)) ||
|
||||
(typeof value !== 'object' && (!allowFunction || typeof value !== 'function'))
|
||||
) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'Object', value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateDictionary - We are using the Web IDL Standard definition
|
||||
* of "dictionary" here, which means any value
|
||||
* whose Type is either Undefined, Null, or
|
||||
* Object (which includes functions).
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @see https://webidl.spec.whatwg.org/#es-dictionary
|
||||
* @see https://tc39.es/ecma262/#table-typeof-operator-results
|
||||
*/
|
||||
|
||||
/** @type {validateDictionary} */
|
||||
const validateDictionary = hideStackFrames((value, name) => {
|
||||
if (value != null && typeof value !== 'object' && typeof value !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [minLength]
|
||||
* @returns {asserts value is any[]}
|
||||
*/
|
||||
|
||||
/** @type {validateArray} */
|
||||
const validateArray = hideStackFrames((value, name, minLength = 0) => {
|
||||
if (!ArrayIsArray(value)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'Array', value)
|
||||
}
|
||||
if (value.length < minLength) {
|
||||
const reason = `must be longer than ${minLength}`
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, reason)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateStringArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is string[]}
|
||||
*/
|
||||
|
||||
/** @type {validateStringArray} */
|
||||
function validateStringArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
validateString(value[i], `${name}[${i}]`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateBooleanArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is boolean[]}
|
||||
*/
|
||||
|
||||
/** @type {validateBooleanArray} */
|
||||
function validateBooleanArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
validateBoolean(value[i], `${name}[${i}]`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateAbortSignalArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is AbortSignal[]}
|
||||
*/
|
||||
|
||||
/** @type {validateAbortSignalArray} */
|
||||
function validateAbortSignalArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const signal = value[i]
|
||||
const indexedName = `${name}[${i}]`
|
||||
if (signal == null) {
|
||||
throw new ERR_INVALID_ARG_TYPE(indexedName, 'AbortSignal', signal)
|
||||
}
|
||||
validateAbortSignal(signal, indexedName)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} signal
|
||||
* @param {string} [name='signal']
|
||||
* @returns {asserts signal is keyof signals}
|
||||
*/
|
||||
function validateSignalName(signal, name = 'signal') {
|
||||
validateString(signal, name)
|
||||
if (signals[signal] === undefined) {
|
||||
if (signals[StringPrototypeToUpperCase(signal)] !== undefined) {
|
||||
throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)')
|
||||
}
|
||||
throw new ERR_UNKNOWN_SIGNAL(signal)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateBuffer
|
||||
* @param {*} buffer
|
||||
* @param {string} [name='buffer']
|
||||
* @returns {asserts buffer is ArrayBufferView}
|
||||
*/
|
||||
|
||||
/** @type {validateBuffer} */
|
||||
const validateBuffer = hideStackFrames((buffer, name = 'buffer') => {
|
||||
if (!isArrayBufferView(buffer)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @param {string} data
|
||||
* @param {string} encoding
|
||||
*/
|
||||
function validateEncoding(data, encoding) {
|
||||
const normalizedEncoding = normalizeEncoding(encoding)
|
||||
const length = data.length
|
||||
if (normalizedEncoding === 'hex' && length % 2 !== 0) {
|
||||
throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the port number is not NaN when coerced to a number,
|
||||
* is an integer and that it falls within the legal range of port numbers.
|
||||
* @param {*} port
|
||||
* @param {string} [name='Port']
|
||||
* @param {boolean} [allowZero=true]
|
||||
* @returns {number}
|
||||
*/
|
||||
function validatePort(port, name = 'Port', allowZero = true) {
|
||||
if (
|
||||
(typeof port !== 'number' && typeof port !== 'string') ||
|
||||
(typeof port === 'string' && StringPrototypeTrim(port).length === 0) ||
|
||||
+port !== +port >>> 0 ||
|
||||
port > 0xffff ||
|
||||
(port === 0 && !allowZero)
|
||||
) {
|
||||
throw new ERR_SOCKET_BAD_PORT(name, port, allowZero)
|
||||
}
|
||||
return port | 0
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateAbortSignal
|
||||
* @param {*} signal
|
||||
* @param {string} name
|
||||
*/
|
||||
|
||||
/** @type {validateAbortSignal} */
|
||||
const validateAbortSignal = hideStackFrames((signal, name) => {
|
||||
if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateFunction
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is Function}
|
||||
*/
|
||||
|
||||
/** @type {validateFunction} */
|
||||
const validateFunction = hideStackFrames((value, name) => {
|
||||
if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validatePlainFunction
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is Function}
|
||||
*/
|
||||
|
||||
/** @type {validatePlainFunction} */
|
||||
const validatePlainFunction = hideStackFrames((value, name) => {
|
||||
if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateUndefined
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is undefined}
|
||||
*/
|
||||
|
||||
/** @type {validateUndefined} */
|
||||
const validateUndefined = hideStackFrames((value, name) => {
|
||||
if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T} value
|
||||
* @param {string} name
|
||||
* @param {T[]} union
|
||||
*/
|
||||
function validateUnion(value, name, union) {
|
||||
if (!ArrayPrototypeIncludes(union, value)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
The rules for the Link header field are described here:
|
||||
https://www.rfc-editor.org/rfc/rfc8288.html#section-3
|
||||
|
||||
This regex validates any string surrounded by angle brackets
|
||||
(not necessarily a valid URI reference) followed by zero or more
|
||||
link-params separated by semicolons.
|
||||
*/
|
||||
const linkValueRegExp = /^(?:<[^>]*>)(?:\s*;\s*[^;"\s]+(?:=(")?[^;"\s]*\1)?)*$/
|
||||
|
||||
/**
|
||||
* @param {any} value
|
||||
* @param {string} name
|
||||
*/
|
||||
function validateLinkHeaderFormat(value, name) {
|
||||
if (typeof value === 'undefined' || !RegExpPrototypeExec(linkValueRegExp, value)) {
|
||||
throw new ERR_INVALID_ARG_VALUE(
|
||||
name,
|
||||
value,
|
||||
'must be an array or string of format "</styles.css>; rel=preload; as=style"'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} hints
|
||||
* @return {string}
|
||||
*/
|
||||
function validateLinkHeaderValue(hints) {
|
||||
if (typeof hints === 'string') {
|
||||
validateLinkHeaderFormat(hints, 'hints')
|
||||
return hints
|
||||
} else if (ArrayIsArray(hints)) {
|
||||
const hintsLength = hints.length
|
||||
let result = ''
|
||||
if (hintsLength === 0) {
|
||||
return result
|
||||
}
|
||||
for (let i = 0; i < hintsLength; i++) {
|
||||
const link = hints[i]
|
||||
validateLinkHeaderFormat(link, 'hints')
|
||||
result += link
|
||||
if (i !== hintsLength - 1) {
|
||||
result += ', '
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
throw new ERR_INVALID_ARG_VALUE(
|
||||
'hints',
|
||||
hints,
|
||||
'must be an array or string of format "</styles.css>; rel=preload; as=style"'
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
isInt32,
|
||||
isUint32,
|
||||
parseFileMode,
|
||||
validateArray,
|
||||
validateStringArray,
|
||||
validateBooleanArray,
|
||||
validateAbortSignalArray,
|
||||
validateBoolean,
|
||||
validateBuffer,
|
||||
validateDictionary,
|
||||
validateEncoding,
|
||||
validateFunction,
|
||||
validateInt32,
|
||||
validateInteger,
|
||||
validateNumber,
|
||||
validateObject,
|
||||
validateOneOf,
|
||||
validatePlainFunction,
|
||||
validatePort,
|
||||
validateSignalName,
|
||||
validateString,
|
||||
validateUint32,
|
||||
validateUndefined,
|
||||
validateUnion,
|
||||
validateAbortSignal,
|
||||
validateLinkHeaderValue
|
||||
}
|
||||
35
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/browser.js
generated
vendored
Normal file
35
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/browser.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
const CustomStream = require('../stream')
|
||||
const promises = require('../stream/promises')
|
||||
const originalDestroy = CustomStream.Readable.destroy
|
||||
module.exports = CustomStream.Readable
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = CustomStream._isUint8Array
|
||||
module.exports.isDisturbed = CustomStream.isDisturbed
|
||||
module.exports.isErrored = CustomStream.isErrored
|
||||
module.exports.isReadable = CustomStream.isReadable
|
||||
module.exports.Readable = CustomStream.Readable
|
||||
module.exports.Writable = CustomStream.Writable
|
||||
module.exports.Duplex = CustomStream.Duplex
|
||||
module.exports.Transform = CustomStream.Transform
|
||||
module.exports.PassThrough = CustomStream.PassThrough
|
||||
module.exports.addAbortSignal = CustomStream.addAbortSignal
|
||||
module.exports.finished = CustomStream.finished
|
||||
module.exports.destroy = CustomStream.destroy
|
||||
module.exports.destroy = originalDestroy
|
||||
module.exports.pipeline = CustomStream.pipeline
|
||||
module.exports.compose = CustomStream.compose
|
||||
Object.defineProperty(CustomStream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = CustomStream.Stream
|
||||
|
||||
// Allow default importing
|
||||
module.exports.default = module.exports
|
||||
343
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/errors.js
generated
vendored
Normal file
343
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/errors.js
generated
vendored
Normal file
@@ -0,0 +1,343 @@
|
||||
'use strict'
|
||||
|
||||
const { format, inspect } = require('./util/inspect')
|
||||
const { AggregateError: CustomAggregateError } = require('./primordials')
|
||||
|
||||
/*
|
||||
This file is a reduced and adapted version of the main lib/internal/errors.js file defined at
|
||||
|
||||
https://github.com/nodejs/node/blob/main/lib/internal/errors.js
|
||||
|
||||
Don't try to replace with the original file and keep it up to date (starting from E(...) definitions)
|
||||
with the upstream file.
|
||||
*/
|
||||
|
||||
const AggregateError = globalThis.AggregateError || CustomAggregateError
|
||||
const kIsNodeError = Symbol('kIsNodeError')
|
||||
const kTypes = [
|
||||
'string',
|
||||
'function',
|
||||
'number',
|
||||
'object',
|
||||
// Accept 'Function' and 'Object' as alternative to the lower cased version.
|
||||
'Function',
|
||||
'Object',
|
||||
'boolean',
|
||||
'bigint',
|
||||
'symbol'
|
||||
]
|
||||
const classRegExp = /^([A-Z][a-z0-9]*)+$/
|
||||
const nodeInternalPrefix = '__node_internal_'
|
||||
const codes = {}
|
||||
function assert(value, message) {
|
||||
if (!value) {
|
||||
throw new codes.ERR_INTERNAL_ASSERTION(message)
|
||||
}
|
||||
}
|
||||
|
||||
// Only use this for integers! Decimal numbers do not work with this function.
|
||||
function addNumericalSeparator(val) {
|
||||
let res = ''
|
||||
let i = val.length
|
||||
const start = val[0] === '-' ? 1 : 0
|
||||
for (; i >= start + 4; i -= 3) {
|
||||
res = `_${val.slice(i - 3, i)}${res}`
|
||||
}
|
||||
return `${val.slice(0, i)}${res}`
|
||||
}
|
||||
function getMessage(key, msg, args) {
|
||||
if (typeof msg === 'function') {
|
||||
assert(
|
||||
msg.length <= args.length,
|
||||
// Default options do not count.
|
||||
`Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`
|
||||
)
|
||||
return msg(...args)
|
||||
}
|
||||
const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length
|
||||
assert(
|
||||
expectedLength === args.length,
|
||||
`Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`
|
||||
)
|
||||
if (args.length === 0) {
|
||||
return msg
|
||||
}
|
||||
return format(msg, ...args)
|
||||
}
|
||||
function E(code, message, Base) {
|
||||
if (!Base) {
|
||||
Base = Error
|
||||
}
|
||||
class NodeError extends Base {
|
||||
constructor(...args) {
|
||||
super(getMessage(code, message, args))
|
||||
}
|
||||
toString() {
|
||||
return `${this.name} [${code}]: ${this.message}`
|
||||
}
|
||||
}
|
||||
Object.defineProperties(NodeError.prototype, {
|
||||
name: {
|
||||
value: Base.name,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
},
|
||||
toString: {
|
||||
value() {
|
||||
return `${this.name} [${code}]: ${this.message}`
|
||||
},
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}
|
||||
})
|
||||
NodeError.prototype.code = code
|
||||
NodeError.prototype[kIsNodeError] = true
|
||||
codes[code] = NodeError
|
||||
}
|
||||
function hideStackFrames(fn) {
|
||||
// We rename the functions that will be hidden to cut off the stacktrace
|
||||
// at the outermost one
|
||||
const hidden = nodeInternalPrefix + fn.name
|
||||
Object.defineProperty(fn, 'name', {
|
||||
value: hidden
|
||||
})
|
||||
return fn
|
||||
}
|
||||
function aggregateTwoErrors(innerError, outerError) {
|
||||
if (innerError && outerError && innerError !== outerError) {
|
||||
if (Array.isArray(outerError.errors)) {
|
||||
// If `outerError` is already an `AggregateError`.
|
||||
outerError.errors.push(innerError)
|
||||
return outerError
|
||||
}
|
||||
const err = new AggregateError([outerError, innerError], outerError.message)
|
||||
err.code = outerError.code
|
||||
return err
|
||||
}
|
||||
return innerError || outerError
|
||||
}
|
||||
class AbortError extends Error {
|
||||
constructor(message = 'The operation was aborted', options = undefined) {
|
||||
if (options !== undefined && typeof options !== 'object') {
|
||||
throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options)
|
||||
}
|
||||
super(message, options)
|
||||
this.code = 'ABORT_ERR'
|
||||
this.name = 'AbortError'
|
||||
}
|
||||
}
|
||||
E('ERR_ASSERTION', '%s', Error)
|
||||
E(
|
||||
'ERR_INVALID_ARG_TYPE',
|
||||
(name, expected, actual) => {
|
||||
assert(typeof name === 'string', "'name' must be a string")
|
||||
if (!Array.isArray(expected)) {
|
||||
expected = [expected]
|
||||
}
|
||||
let msg = 'The '
|
||||
if (name.endsWith(' argument')) {
|
||||
// For cases like 'first argument'
|
||||
msg += `${name} `
|
||||
} else {
|
||||
msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} `
|
||||
}
|
||||
msg += 'must be '
|
||||
const types = []
|
||||
const instances = []
|
||||
const other = []
|
||||
for (const value of expected) {
|
||||
assert(typeof value === 'string', 'All expected entries have to be of type string')
|
||||
if (kTypes.includes(value)) {
|
||||
types.push(value.toLowerCase())
|
||||
} else if (classRegExp.test(value)) {
|
||||
instances.push(value)
|
||||
} else {
|
||||
assert(value !== 'object', 'The value "object" should be written as "Object"')
|
||||
other.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
// Special handle `object` in case other instances are allowed to outline
|
||||
// the differences between each other.
|
||||
if (instances.length > 0) {
|
||||
const pos = types.indexOf('object')
|
||||
if (pos !== -1) {
|
||||
types.splice(types, pos, 1)
|
||||
instances.push('Object')
|
||||
}
|
||||
}
|
||||
if (types.length > 0) {
|
||||
switch (types.length) {
|
||||
case 1:
|
||||
msg += `of type ${types[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `one of type ${types[0]} or ${types[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = types.pop()
|
||||
msg += `one of type ${types.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (instances.length > 0 || other.length > 0) {
|
||||
msg += ' or '
|
||||
}
|
||||
}
|
||||
if (instances.length > 0) {
|
||||
switch (instances.length) {
|
||||
case 1:
|
||||
msg += `an instance of ${instances[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `an instance of ${instances[0]} or ${instances[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = instances.pop()
|
||||
msg += `an instance of ${instances.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (other.length > 0) {
|
||||
msg += ' or '
|
||||
}
|
||||
}
|
||||
switch (other.length) {
|
||||
case 0:
|
||||
break
|
||||
case 1:
|
||||
if (other[0].toLowerCase() !== other[0]) {
|
||||
msg += 'an '
|
||||
}
|
||||
msg += `${other[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `one of ${other[0]} or ${other[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = other.pop()
|
||||
msg += `one of ${other.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (actual == null) {
|
||||
msg += `. Received ${actual}`
|
||||
} else if (typeof actual === 'function' && actual.name) {
|
||||
msg += `. Received function ${actual.name}`
|
||||
} else if (typeof actual === 'object') {
|
||||
var _actual$constructor
|
||||
if (
|
||||
(_actual$constructor = actual.constructor) !== null &&
|
||||
_actual$constructor !== undefined &&
|
||||
_actual$constructor.name
|
||||
) {
|
||||
msg += `. Received an instance of ${actual.constructor.name}`
|
||||
} else {
|
||||
const inspected = inspect(actual, {
|
||||
depth: -1
|
||||
})
|
||||
msg += `. Received ${inspected}`
|
||||
}
|
||||
} else {
|
||||
let inspected = inspect(actual, {
|
||||
colors: false
|
||||
})
|
||||
if (inspected.length > 25) {
|
||||
inspected = `${inspected.slice(0, 25)}...`
|
||||
}
|
||||
msg += `. Received type ${typeof actual} (${inspected})`
|
||||
}
|
||||
return msg
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_INVALID_ARG_VALUE',
|
||||
(name, value, reason = 'is invalid') => {
|
||||
let inspected = inspect(value)
|
||||
if (inspected.length > 128) {
|
||||
inspected = inspected.slice(0, 128) + '...'
|
||||
}
|
||||
const type = name.includes('.') ? 'property' : 'argument'
|
||||
return `The ${type} '${name}' ${reason}. Received ${inspected}`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_INVALID_RETURN_VALUE',
|
||||
(input, name, value) => {
|
||||
var _value$constructor
|
||||
const type =
|
||||
value !== null &&
|
||||
value !== undefined &&
|
||||
(_value$constructor = value.constructor) !== null &&
|
||||
_value$constructor !== undefined &&
|
||||
_value$constructor.name
|
||||
? `instance of ${value.constructor.name}`
|
||||
: `type ${typeof value}`
|
||||
return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_MISSING_ARGS',
|
||||
(...args) => {
|
||||
assert(args.length > 0, 'At least one arg needs to be specified')
|
||||
let msg
|
||||
const len = args.length
|
||||
args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ')
|
||||
switch (len) {
|
||||
case 1:
|
||||
msg += `The ${args[0]} argument`
|
||||
break
|
||||
case 2:
|
||||
msg += `The ${args[0]} and ${args[1]} arguments`
|
||||
break
|
||||
default:
|
||||
{
|
||||
const last = args.pop()
|
||||
msg += `The ${args.join(', ')}, and ${last} arguments`
|
||||
}
|
||||
break
|
||||
}
|
||||
return `${msg} must be specified`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_OUT_OF_RANGE',
|
||||
(str, range, input) => {
|
||||
assert(range, 'Missing "range" argument')
|
||||
let received
|
||||
if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {
|
||||
received = addNumericalSeparator(String(input))
|
||||
} else if (typeof input === 'bigint') {
|
||||
received = String(input)
|
||||
const limit = BigInt(2) ** BigInt(32)
|
||||
if (input > limit || input < -limit) {
|
||||
received = addNumericalSeparator(received)
|
||||
}
|
||||
received += 'n'
|
||||
} else {
|
||||
received = inspect(input)
|
||||
}
|
||||
return `The value of "${str}" is out of range. It must be ${range}. Received ${received}`
|
||||
},
|
||||
RangeError
|
||||
)
|
||||
E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error)
|
||||
E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error)
|
||||
E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error)
|
||||
E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error)
|
||||
E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error)
|
||||
E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError)
|
||||
E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error)
|
||||
E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error)
|
||||
E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error)
|
||||
E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error)
|
||||
E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError)
|
||||
module.exports = {
|
||||
AbortError,
|
||||
aggregateTwoErrors: hideStackFrames(aggregateTwoErrors),
|
||||
hideStackFrames,
|
||||
codes
|
||||
}
|
||||
65
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/index.js
generated
vendored
Normal file
65
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/index.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
'use strict'
|
||||
|
||||
const Stream = require('stream')
|
||||
if (Stream && process.env.READABLE_STREAM === 'disable') {
|
||||
const promises = Stream.promises
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = Stream._isUint8Array
|
||||
module.exports.isDisturbed = Stream.isDisturbed
|
||||
module.exports.isErrored = Stream.isErrored
|
||||
module.exports.isReadable = Stream.isReadable
|
||||
module.exports.Readable = Stream.Readable
|
||||
module.exports.Writable = Stream.Writable
|
||||
module.exports.Duplex = Stream.Duplex
|
||||
module.exports.Transform = Stream.Transform
|
||||
module.exports.PassThrough = Stream.PassThrough
|
||||
module.exports.addAbortSignal = Stream.addAbortSignal
|
||||
module.exports.finished = Stream.finished
|
||||
module.exports.destroy = Stream.destroy
|
||||
module.exports.pipeline = Stream.pipeline
|
||||
module.exports.compose = Stream.compose
|
||||
Object.defineProperty(Stream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = Stream.Stream
|
||||
} else {
|
||||
const CustomStream = require('../stream')
|
||||
const promises = require('../stream/promises')
|
||||
const originalDestroy = CustomStream.Readable.destroy
|
||||
module.exports = CustomStream.Readable
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = CustomStream._isUint8Array
|
||||
module.exports.isDisturbed = CustomStream.isDisturbed
|
||||
module.exports.isErrored = CustomStream.isErrored
|
||||
module.exports.isReadable = CustomStream.isReadable
|
||||
module.exports.Readable = CustomStream.Readable
|
||||
module.exports.Writable = CustomStream.Writable
|
||||
module.exports.Duplex = CustomStream.Duplex
|
||||
module.exports.Transform = CustomStream.Transform
|
||||
module.exports.PassThrough = CustomStream.PassThrough
|
||||
module.exports.addAbortSignal = CustomStream.addAbortSignal
|
||||
module.exports.finished = CustomStream.finished
|
||||
module.exports.destroy = CustomStream.destroy
|
||||
module.exports.destroy = originalDestroy
|
||||
module.exports.pipeline = CustomStream.pipeline
|
||||
module.exports.compose = CustomStream.compose
|
||||
Object.defineProperty(CustomStream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = CustomStream.Stream
|
||||
}
|
||||
|
||||
// Allow default importing
|
||||
module.exports.default = module.exports
|
||||
124
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/primordials.js
generated
vendored
Normal file
124
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/primordials.js
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
'use strict'
|
||||
|
||||
/*
|
||||
This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at
|
||||
|
||||
https://github.com/nodejs/node/blob/main/lib/internal/per_context/primordials.js
|
||||
|
||||
Don't try to replace with the original file and keep it up to date with the upstream file.
|
||||
*/
|
||||
|
||||
// This is a simplified version of AggregateError
|
||||
class AggregateError extends Error {
|
||||
constructor(errors) {
|
||||
if (!Array.isArray(errors)) {
|
||||
throw new TypeError(`Expected input to be an Array, got ${typeof errors}`)
|
||||
}
|
||||
let message = ''
|
||||
for (let i = 0; i < errors.length; i++) {
|
||||
message += ` ${errors[i].stack}\n`
|
||||
}
|
||||
super(message)
|
||||
this.name = 'AggregateError'
|
||||
this.errors = errors
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
AggregateError,
|
||||
ArrayIsArray(self) {
|
||||
return Array.isArray(self)
|
||||
},
|
||||
ArrayPrototypeIncludes(self, el) {
|
||||
return self.includes(el)
|
||||
},
|
||||
ArrayPrototypeIndexOf(self, el) {
|
||||
return self.indexOf(el)
|
||||
},
|
||||
ArrayPrototypeJoin(self, sep) {
|
||||
return self.join(sep)
|
||||
},
|
||||
ArrayPrototypeMap(self, fn) {
|
||||
return self.map(fn)
|
||||
},
|
||||
ArrayPrototypePop(self, el) {
|
||||
return self.pop(el)
|
||||
},
|
||||
ArrayPrototypePush(self, el) {
|
||||
return self.push(el)
|
||||
},
|
||||
ArrayPrototypeSlice(self, start, end) {
|
||||
return self.slice(start, end)
|
||||
},
|
||||
Error,
|
||||
FunctionPrototypeCall(fn, thisArgs, ...args) {
|
||||
return fn.call(thisArgs, ...args)
|
||||
},
|
||||
FunctionPrototypeSymbolHasInstance(self, instance) {
|
||||
return Function.prototype[Symbol.hasInstance].call(self, instance)
|
||||
},
|
||||
MathFloor: Math.floor,
|
||||
Number,
|
||||
NumberIsInteger: Number.isInteger,
|
||||
NumberIsNaN: Number.isNaN,
|
||||
NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER,
|
||||
NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER,
|
||||
NumberParseInt: Number.parseInt,
|
||||
ObjectDefineProperties(self, props) {
|
||||
return Object.defineProperties(self, props)
|
||||
},
|
||||
ObjectDefineProperty(self, name, prop) {
|
||||
return Object.defineProperty(self, name, prop)
|
||||
},
|
||||
ObjectGetOwnPropertyDescriptor(self, name) {
|
||||
return Object.getOwnPropertyDescriptor(self, name)
|
||||
},
|
||||
ObjectKeys(obj) {
|
||||
return Object.keys(obj)
|
||||
},
|
||||
ObjectSetPrototypeOf(target, proto) {
|
||||
return Object.setPrototypeOf(target, proto)
|
||||
},
|
||||
Promise,
|
||||
PromisePrototypeCatch(self, fn) {
|
||||
return self.catch(fn)
|
||||
},
|
||||
PromisePrototypeThen(self, thenFn, catchFn) {
|
||||
return self.then(thenFn, catchFn)
|
||||
},
|
||||
PromiseReject(err) {
|
||||
return Promise.reject(err)
|
||||
},
|
||||
PromiseResolve(val) {
|
||||
return Promise.resolve(val)
|
||||
},
|
||||
ReflectApply: Reflect.apply,
|
||||
RegExpPrototypeTest(self, value) {
|
||||
return self.test(value)
|
||||
},
|
||||
SafeSet: Set,
|
||||
String,
|
||||
StringPrototypeSlice(self, start, end) {
|
||||
return self.slice(start, end)
|
||||
},
|
||||
StringPrototypeToLowerCase(self) {
|
||||
return self.toLowerCase()
|
||||
},
|
||||
StringPrototypeToUpperCase(self) {
|
||||
return self.toUpperCase()
|
||||
},
|
||||
StringPrototypeTrim(self) {
|
||||
return self.trim()
|
||||
},
|
||||
Symbol,
|
||||
SymbolFor: Symbol.for,
|
||||
SymbolAsyncIterator: Symbol.asyncIterator,
|
||||
SymbolHasInstance: Symbol.hasInstance,
|
||||
SymbolIterator: Symbol.iterator,
|
||||
SymbolDispose: Symbol.dispose || Symbol('Symbol.dispose'),
|
||||
SymbolAsyncDispose: Symbol.asyncDispose || Symbol('Symbol.asyncDispose'),
|
||||
TypedArrayPrototypeSet(self, buf, len) {
|
||||
return self.set(buf, len)
|
||||
},
|
||||
Boolean,
|
||||
Uint8Array
|
||||
}
|
||||
148
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/util.js
generated
vendored
Normal file
148
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/util.js
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
'use strict'
|
||||
|
||||
const bufferModule = require('buffer')
|
||||
const { format, inspect } = require('./util/inspect')
|
||||
const {
|
||||
codes: { ERR_INVALID_ARG_TYPE }
|
||||
} = require('./errors')
|
||||
const { kResistStopPropagation, AggregateError, SymbolDispose } = require('./primordials')
|
||||
const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor
|
||||
const Blob = globalThis.Blob || bufferModule.Blob
|
||||
/* eslint-disable indent */
|
||||
const isBlob =
|
||||
typeof Blob !== 'undefined'
|
||||
? function isBlob(b) {
|
||||
// eslint-disable-next-line indent
|
||||
return b instanceof Blob
|
||||
}
|
||||
: function isBlob(b) {
|
||||
return false
|
||||
}
|
||||
/* eslint-enable indent */
|
||||
|
||||
const validateAbortSignal = (signal, name) => {
|
||||
if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
}
|
||||
const validateFunction = (value, name) => {
|
||||
if (typeof value !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
AggregateError,
|
||||
kEmptyObject: Object.freeze({}),
|
||||
once(callback) {
|
||||
let called = false
|
||||
return function (...args) {
|
||||
if (called) {
|
||||
return
|
||||
}
|
||||
called = true
|
||||
callback.apply(this, args)
|
||||
}
|
||||
},
|
||||
createDeferredPromise: function () {
|
||||
let resolve
|
||||
let reject
|
||||
|
||||
// eslint-disable-next-line promise/param-names
|
||||
const promise = new Promise((res, rej) => {
|
||||
resolve = res
|
||||
reject = rej
|
||||
})
|
||||
return {
|
||||
promise,
|
||||
resolve,
|
||||
reject
|
||||
}
|
||||
},
|
||||
promisify(fn) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fn((err, ...args) => {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
return resolve(...args)
|
||||
})
|
||||
})
|
||||
},
|
||||
debuglog() {
|
||||
return function () {}
|
||||
},
|
||||
format,
|
||||
inspect,
|
||||
types: {
|
||||
isAsyncFunction(fn) {
|
||||
return fn instanceof AsyncFunction
|
||||
},
|
||||
isArrayBufferView(arr) {
|
||||
return ArrayBuffer.isView(arr)
|
||||
}
|
||||
},
|
||||
isBlob,
|
||||
deprecate(fn, message) {
|
||||
return fn
|
||||
},
|
||||
addAbortListener:
|
||||
require('events').addAbortListener ||
|
||||
function addAbortListener(signal, listener) {
|
||||
if (signal === undefined) {
|
||||
throw new ERR_INVALID_ARG_TYPE('signal', 'AbortSignal', signal)
|
||||
}
|
||||
validateAbortSignal(signal, 'signal')
|
||||
validateFunction(listener, 'listener')
|
||||
let removeEventListener
|
||||
if (signal.aborted) {
|
||||
queueMicrotask(() => listener())
|
||||
} else {
|
||||
signal.addEventListener('abort', listener, {
|
||||
__proto__: null,
|
||||
once: true,
|
||||
[kResistStopPropagation]: true
|
||||
})
|
||||
removeEventListener = () => {
|
||||
signal.removeEventListener('abort', listener)
|
||||
}
|
||||
}
|
||||
return {
|
||||
__proto__: null,
|
||||
[SymbolDispose]() {
|
||||
var _removeEventListener
|
||||
;(_removeEventListener = removeEventListener) === null || _removeEventListener === undefined
|
||||
? undefined
|
||||
: _removeEventListener()
|
||||
}
|
||||
}
|
||||
},
|
||||
AbortSignalAny:
|
||||
AbortSignal.any ||
|
||||
function AbortSignalAny(signals) {
|
||||
// Fast path if there is only one signal.
|
||||
if (signals.length === 1) {
|
||||
return signals[0]
|
||||
}
|
||||
const ac = new AbortController()
|
||||
const abort = () => ac.abort()
|
||||
signals.forEach((signal) => {
|
||||
validateAbortSignal(signal, 'signals')
|
||||
signal.addEventListener('abort', abort, {
|
||||
once: true
|
||||
})
|
||||
})
|
||||
ac.signal.addEventListener(
|
||||
'abort',
|
||||
() => {
|
||||
signals.forEach((signal) => signal.removeEventListener('abort', abort))
|
||||
},
|
||||
{
|
||||
once: true
|
||||
}
|
||||
)
|
||||
return ac.signal
|
||||
}
|
||||
}
|
||||
module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom')
|
||||
55
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/util/inspect.js
generated
vendored
Normal file
55
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/ours/util/inspect.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict'
|
||||
|
||||
/*
|
||||
This file is a reduced and adapted version of the main lib/internal/util/inspect.js file defined at
|
||||
|
||||
https://github.com/nodejs/node/blob/main/lib/internal/util/inspect.js
|
||||
|
||||
Don't try to replace with the original file and keep it up to date with the upstream file.
|
||||
*/
|
||||
module.exports = {
|
||||
format(format, ...args) {
|
||||
// Simplified version of https://nodejs.org/api/util.html#utilformatformat-args
|
||||
return format.replace(/%([sdifj])/g, function (...[_unused, type]) {
|
||||
const replacement = args.shift()
|
||||
if (type === 'f') {
|
||||
return replacement.toFixed(6)
|
||||
} else if (type === 'j') {
|
||||
return JSON.stringify(replacement)
|
||||
} else if (type === 's' && typeof replacement === 'object') {
|
||||
const ctor = replacement.constructor !== Object ? replacement.constructor.name : ''
|
||||
return `${ctor} {}`.trim()
|
||||
} else {
|
||||
return replacement.toString()
|
||||
}
|
||||
})
|
||||
},
|
||||
inspect(value) {
|
||||
// Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
if (value.includes("'")) {
|
||||
if (!value.includes('"')) {
|
||||
return `"${value}"`
|
||||
} else if (!value.includes('`') && !value.includes('${')) {
|
||||
return `\`${value}\``
|
||||
}
|
||||
}
|
||||
return `'${value}'`
|
||||
case 'number':
|
||||
if (isNaN(value)) {
|
||||
return 'NaN'
|
||||
} else if (Object.is(value, -0)) {
|
||||
return String(value)
|
||||
}
|
||||
return value
|
||||
case 'bigint':
|
||||
return `${String(value)}n`
|
||||
case 'boolean':
|
||||
case 'undefined':
|
||||
return String(value)
|
||||
case 'object':
|
||||
return '{}'
|
||||
}
|
||||
}
|
||||
}
|
||||
143
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/stream.js
generated
vendored
Normal file
143
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/stream.js
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
'use strict'
|
||||
|
||||
/* replacement start */
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const { ObjectDefineProperty, ObjectKeys, ReflectApply } = require('./ours/primordials')
|
||||
const {
|
||||
promisify: { custom: customPromisify }
|
||||
} = require('./ours/util')
|
||||
const { streamReturningOperators, promiseReturningOperators } = require('./internal/streams/operators')
|
||||
const {
|
||||
codes: { ERR_ILLEGAL_CONSTRUCTOR }
|
||||
} = require('./ours/errors')
|
||||
const compose = require('./internal/streams/compose')
|
||||
const { setDefaultHighWaterMark, getDefaultHighWaterMark } = require('./internal/streams/state')
|
||||
const { pipeline } = require('./internal/streams/pipeline')
|
||||
const { destroyer } = require('./internal/streams/destroy')
|
||||
const eos = require('./internal/streams/end-of-stream')
|
||||
const internalBuffer = {}
|
||||
const promises = require('./stream/promises')
|
||||
const utils = require('./internal/streams/utils')
|
||||
const Stream = (module.exports = require('./internal/streams/legacy').Stream)
|
||||
Stream.isDestroyed = utils.isDestroyed
|
||||
Stream.isDisturbed = utils.isDisturbed
|
||||
Stream.isErrored = utils.isErrored
|
||||
Stream.isReadable = utils.isReadable
|
||||
Stream.isWritable = utils.isWritable
|
||||
Stream.Readable = require('./internal/streams/readable')
|
||||
for (const key of ObjectKeys(streamReturningOperators)) {
|
||||
const op = streamReturningOperators[key]
|
||||
function fn(...args) {
|
||||
if (new.target) {
|
||||
throw ERR_ILLEGAL_CONSTRUCTOR()
|
||||
}
|
||||
return Stream.Readable.from(ReflectApply(op, this, args))
|
||||
}
|
||||
ObjectDefineProperty(fn, 'name', {
|
||||
__proto__: null,
|
||||
value: op.name
|
||||
})
|
||||
ObjectDefineProperty(fn, 'length', {
|
||||
__proto__: null,
|
||||
value: op.length
|
||||
})
|
||||
ObjectDefineProperty(Stream.Readable.prototype, key, {
|
||||
__proto__: null,
|
||||
value: fn,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
for (const key of ObjectKeys(promiseReturningOperators)) {
|
||||
const op = promiseReturningOperators[key]
|
||||
function fn(...args) {
|
||||
if (new.target) {
|
||||
throw ERR_ILLEGAL_CONSTRUCTOR()
|
||||
}
|
||||
return ReflectApply(op, this, args)
|
||||
}
|
||||
ObjectDefineProperty(fn, 'name', {
|
||||
__proto__: null,
|
||||
value: op.name
|
||||
})
|
||||
ObjectDefineProperty(fn, 'length', {
|
||||
__proto__: null,
|
||||
value: op.length
|
||||
})
|
||||
ObjectDefineProperty(Stream.Readable.prototype, key, {
|
||||
__proto__: null,
|
||||
value: fn,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
Stream.Writable = require('./internal/streams/writable')
|
||||
Stream.Duplex = require('./internal/streams/duplex')
|
||||
Stream.Transform = require('./internal/streams/transform')
|
||||
Stream.PassThrough = require('./internal/streams/passthrough')
|
||||
Stream.pipeline = pipeline
|
||||
const { addAbortSignal } = require('./internal/streams/add-abort-signal')
|
||||
Stream.addAbortSignal = addAbortSignal
|
||||
Stream.finished = eos
|
||||
Stream.destroy = destroyer
|
||||
Stream.compose = compose
|
||||
Stream.setDefaultHighWaterMark = setDefaultHighWaterMark
|
||||
Stream.getDefaultHighWaterMark = getDefaultHighWaterMark
|
||||
ObjectDefineProperty(Stream, 'promises', {
|
||||
__proto__: null,
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
ObjectDefineProperty(pipeline, customPromisify, {
|
||||
__proto__: null,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises.pipeline
|
||||
}
|
||||
})
|
||||
ObjectDefineProperty(eos, customPromisify, {
|
||||
__proto__: null,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises.finished
|
||||
}
|
||||
})
|
||||
|
||||
// Backwards-compat with node 0.4.x
|
||||
Stream.Stream = Stream
|
||||
Stream._isUint8Array = function isUint8Array(value) {
|
||||
return value instanceof Uint8Array
|
||||
}
|
||||
Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) {
|
||||
return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
}
|
||||
43
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/stream/promises.js
generated
vendored
Normal file
43
node_modules/readable-web-to-node-stream/node_modules/readable-stream/lib/stream/promises.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
'use strict'
|
||||
|
||||
const { ArrayPrototypePop, Promise } = require('../ours/primordials')
|
||||
const { isIterable, isNodeStream, isWebStream } = require('../internal/streams/utils')
|
||||
const { pipelineImpl: pl } = require('../internal/streams/pipeline')
|
||||
const { finished } = require('../internal/streams/end-of-stream')
|
||||
require('../../lib/stream.js')
|
||||
function pipeline(...streams) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let signal
|
||||
let end
|
||||
const lastArg = streams[streams.length - 1]
|
||||
if (
|
||||
lastArg &&
|
||||
typeof lastArg === 'object' &&
|
||||
!isNodeStream(lastArg) &&
|
||||
!isIterable(lastArg) &&
|
||||
!isWebStream(lastArg)
|
||||
) {
|
||||
const options = ArrayPrototypePop(streams)
|
||||
signal = options.signal
|
||||
end = options.end
|
||||
}
|
||||
pl(
|
||||
streams,
|
||||
(err, value) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve(value)
|
||||
}
|
||||
},
|
||||
{
|
||||
signal,
|
||||
end
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
module.exports = {
|
||||
finished,
|
||||
pipeline
|
||||
}
|
||||
134
node_modules/readable-web-to-node-stream/node_modules/readable-stream/package.json
generated
vendored
134
node_modules/readable-web-to-node-stream/node_modules/readable-stream/package.json
generated
vendored
@@ -1,68 +1,88 @@
|
||||
{
|
||||
"name": "readable-stream",
|
||||
"version": "3.6.2",
|
||||
"description": "Streams3, a user-land copy of the stream library from Node.js",
|
||||
"main": "readable.js",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
},
|
||||
"dependencies": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.2.0",
|
||||
"@babel/core": "^7.2.0",
|
||||
"@babel/polyfill": "^7.0.0",
|
||||
"@babel/preset-env": "^7.2.0",
|
||||
"airtap": "0.0.9",
|
||||
"assert": "^1.4.0",
|
||||
"bl": "^2.0.0",
|
||||
"deep-strict-equal": "^0.2.0",
|
||||
"events.once": "^2.0.2",
|
||||
"glob": "^7.1.2",
|
||||
"gunzip-maybe": "^1.4.1",
|
||||
"hyperquest": "^2.1.3",
|
||||
"lolex": "^2.6.0",
|
||||
"nyc": "^11.0.0",
|
||||
"pump": "^3.0.0",
|
||||
"rimraf": "^2.6.2",
|
||||
"tap": "^12.0.0",
|
||||
"tape": "^4.9.0",
|
||||
"tar-fs": "^1.16.2",
|
||||
"util-promisify": "^2.1.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap -J --no-esm test/parallel/*.js test/ours/*.js",
|
||||
"ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap",
|
||||
"test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js",
|
||||
"test-browser-local": "airtap --open --local -- test/browser.js",
|
||||
"cover": "nyc npm test",
|
||||
"report": "nyc report --reporter=lcov",
|
||||
"update-browser-errors": "babel -o errors-browser.js errors.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/readable-stream"
|
||||
},
|
||||
"version": "4.7.0",
|
||||
"description": "Node.js Streams, a user-land copy of the stream library from Node.js",
|
||||
"homepage": "https://github.com/nodejs/readable-stream",
|
||||
"license": "MIT",
|
||||
"licenses": [
|
||||
{
|
||||
"type": "MIT",
|
||||
"url": "https://choosealicense.com/licenses/mit/"
|
||||
}
|
||||
],
|
||||
"keywords": [
|
||||
"readable",
|
||||
"stream",
|
||||
"pipe"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/readable-stream"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/nodejs/readable-stream/issues"
|
||||
},
|
||||
"main": "lib/ours/index.js",
|
||||
"files": [
|
||||
"lib",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"browser": {
|
||||
"util": false,
|
||||
"worker_threads": false,
|
||||
"./errors": "./errors-browser.js",
|
||||
"./readable.js": "./readable-browser.js",
|
||||
"./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js",
|
||||
"./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
|
||||
"util": "./lib/ours/util.js",
|
||||
"./lib/ours/index.js": "./lib/ours/browser.js"
|
||||
},
|
||||
"nyc": {
|
||||
"include": [
|
||||
"lib/**.js"
|
||||
]
|
||||
"scripts": {
|
||||
"build": "node build/build.mjs 18.19.0",
|
||||
"postbuild": "prettier -w lib test",
|
||||
"test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
|
||||
"test:prepare": "node test/browser/runner-prepare.mjs",
|
||||
"test:browsers": "node test/browser/runner-browser.mjs",
|
||||
"test:bundlers": "node test/browser/runner-node.mjs",
|
||||
"test:readable-stream-only": "node readable-stream-test/runner-prepare.mjs",
|
||||
"coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
|
||||
"format": "prettier -w src lib test",
|
||||
"test:format": "prettier -c src lib test",
|
||||
"lint": "eslint src"
|
||||
},
|
||||
"license": "MIT"
|
||||
"dependencies": {
|
||||
"abort-controller": "^3.0.0",
|
||||
"buffer": "^6.0.3",
|
||||
"events": "^3.3.0",
|
||||
"process": "^0.11.10",
|
||||
"string_decoder": "^1.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.17.10",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7",
|
||||
"@babel/plugin-proposal-optional-chaining": "^7.16.7",
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@rollup/plugin-commonjs": "^22.0.0",
|
||||
"@rollup/plugin-inject": "^4.0.4",
|
||||
"@rollup/plugin-node-resolve": "^13.3.0",
|
||||
"@sinonjs/fake-timers": "^9.1.2",
|
||||
"browserify": "^17.0.0",
|
||||
"c8": "^7.11.2",
|
||||
"esbuild": "^0.19.9",
|
||||
"esbuild-plugin-alias": "^0.2.1",
|
||||
"eslint": "^8.15.0",
|
||||
"eslint-config-standard": "^17.0.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-n": "^15.2.0",
|
||||
"eslint-plugin-promise": "^6.0.0",
|
||||
"playwright": "^1.21.1",
|
||||
"prettier": "^2.6.2",
|
||||
"rollup": "^2.72.1",
|
||||
"rollup-plugin-polyfill-node": "^0.9.0",
|
||||
"tap": "^16.2.0",
|
||||
"tap-mocha-reporter": "^5.0.3",
|
||||
"tape": "^5.5.3",
|
||||
"tar": "^6.1.11",
|
||||
"undici": "^5.1.1",
|
||||
"webpack": "^5.72.1",
|
||||
"webpack-cli": "^4.9.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
exports = module.exports = require('./lib/_stream_readable.js');
|
||||
exports.Stream = exports;
|
||||
exports.Readable = exports;
|
||||
exports.Writable = require('./lib/_stream_writable.js');
|
||||
exports.Duplex = require('./lib/_stream_duplex.js');
|
||||
exports.Transform = require('./lib/_stream_transform.js');
|
||||
exports.PassThrough = require('./lib/_stream_passthrough.js');
|
||||
exports.finished = require('./lib/internal/streams/end-of-stream.js');
|
||||
exports.pipeline = require('./lib/internal/streams/pipeline.js');
|
||||
16
node_modules/readable-web-to-node-stream/node_modules/readable-stream/readable.js
generated
vendored
16
node_modules/readable-web-to-node-stream/node_modules/readable-stream/readable.js
generated
vendored
@@ -1,16 +0,0 @@
|
||||
var Stream = require('stream');
|
||||
if (process.env.READABLE_STREAM === 'disable' && Stream) {
|
||||
module.exports = Stream.Readable;
|
||||
Object.assign(module.exports, Stream);
|
||||
module.exports.Stream = Stream;
|
||||
} else {
|
||||
exports = module.exports = require('./lib/_stream_readable.js');
|
||||
exports.Stream = Stream || exports;
|
||||
exports.Readable = exports;
|
||||
exports.Writable = require('./lib/_stream_writable.js');
|
||||
exports.Duplex = require('./lib/_stream_duplex.js');
|
||||
exports.Transform = require('./lib/_stream_transform.js');
|
||||
exports.PassThrough = require('./lib/_stream_passthrough.js');
|
||||
exports.finished = require('./lib/internal/streams/end-of-stream.js');
|
||||
exports.pipeline = require('./lib/internal/streams/pipeline.js');
|
||||
}
|
||||
48
node_modules/readable-web-to-node-stream/node_modules/string_decoder/LICENSE
generated
vendored
Normal file
48
node_modules/readable-web-to-node-stream/node_modules/string_decoder/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
Node.js is licensed for use as follows:
|
||||
|
||||
"""
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
This license applies to parts of Node.js originating from the
|
||||
https://github.com/joyent/node repository:
|
||||
|
||||
"""
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
47
node_modules/readable-web-to-node-stream/node_modules/string_decoder/README.md
generated
vendored
Normal file
47
node_modules/readable-web-to-node-stream/node_modules/string_decoder/README.md
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# string_decoder
|
||||
|
||||
***Node-core v8.9.4 string_decoder for userland***
|
||||
|
||||
|
||||
[](https://nodei.co/npm/string_decoder/)
|
||||
[](https://nodei.co/npm/string_decoder/)
|
||||
|
||||
|
||||
```bash
|
||||
npm install --save string_decoder
|
||||
```
|
||||
|
||||
***Node-core string_decoder for userland***
|
||||
|
||||
This package is a mirror of the string_decoder implementation in Node-core.
|
||||
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/).
|
||||
|
||||
As of version 1.0.0 **string_decoder** uses semantic versioning.
|
||||
|
||||
## Previous versions
|
||||
|
||||
Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10.
|
||||
|
||||
## Update
|
||||
|
||||
The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version.
|
||||
|
||||
## Streams Working Group
|
||||
|
||||
`string_decoder` is maintained by the Streams Working Group, which
|
||||
oversees the development and maintenance of the Streams API within
|
||||
Node.js. The responsibilities of the Streams Working Group include:
|
||||
|
||||
* Addressing stream issues on the Node.js issue tracker.
|
||||
* Authoring and editing stream documentation within the Node.js project.
|
||||
* Reviewing changes to stream subclasses within the Node.js project.
|
||||
* Redirecting changes to streams from the Node.js project to this
|
||||
project.
|
||||
* Assisting in the implementation of stream providers within Node.js.
|
||||
* Recommending versions of `readable-stream` to be included in Node.js.
|
||||
* Messaging about the future of streams to give the community advance
|
||||
notice of changes.
|
||||
|
||||
See [readable-stream](https://github.com/nodejs/readable-stream) for
|
||||
more details.
|
||||
296
node_modules/readable-web-to-node-stream/node_modules/string_decoder/lib/string_decoder.js
generated
vendored
Normal file
296
node_modules/readable-web-to-node-stream/node_modules/string_decoder/lib/string_decoder.js
generated
vendored
Normal file
@@ -0,0 +1,296 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer;
|
||||
/*</replacement>*/
|
||||
|
||||
var isEncoding = Buffer.isEncoding || function (encoding) {
|
||||
encoding = '' + encoding;
|
||||
switch (encoding && encoding.toLowerCase()) {
|
||||
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
function _normalizeEncoding(enc) {
|
||||
if (!enc) return 'utf8';
|
||||
var retried;
|
||||
while (true) {
|
||||
switch (enc) {
|
||||
case 'utf8':
|
||||
case 'utf-8':
|
||||
return 'utf8';
|
||||
case 'ucs2':
|
||||
case 'ucs-2':
|
||||
case 'utf16le':
|
||||
case 'utf-16le':
|
||||
return 'utf16le';
|
||||
case 'latin1':
|
||||
case 'binary':
|
||||
return 'latin1';
|
||||
case 'base64':
|
||||
case 'ascii':
|
||||
case 'hex':
|
||||
return enc;
|
||||
default:
|
||||
if (retried) return; // undefined
|
||||
enc = ('' + enc).toLowerCase();
|
||||
retried = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Do not cache `Buffer.isEncoding` when checking encoding names as some
|
||||
// modules monkey-patch it to support additional encodings
|
||||
function normalizeEncoding(enc) {
|
||||
var nenc = _normalizeEncoding(enc);
|
||||
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
|
||||
return nenc || enc;
|
||||
}
|
||||
|
||||
// StringDecoder provides an interface for efficiently splitting a series of
|
||||
// buffers into a series of JS strings without breaking apart multi-byte
|
||||
// characters.
|
||||
exports.StringDecoder = StringDecoder;
|
||||
function StringDecoder(encoding) {
|
||||
this.encoding = normalizeEncoding(encoding);
|
||||
var nb;
|
||||
switch (this.encoding) {
|
||||
case 'utf16le':
|
||||
this.text = utf16Text;
|
||||
this.end = utf16End;
|
||||
nb = 4;
|
||||
break;
|
||||
case 'utf8':
|
||||
this.fillLast = utf8FillLast;
|
||||
nb = 4;
|
||||
break;
|
||||
case 'base64':
|
||||
this.text = base64Text;
|
||||
this.end = base64End;
|
||||
nb = 3;
|
||||
break;
|
||||
default:
|
||||
this.write = simpleWrite;
|
||||
this.end = simpleEnd;
|
||||
return;
|
||||
}
|
||||
this.lastNeed = 0;
|
||||
this.lastTotal = 0;
|
||||
this.lastChar = Buffer.allocUnsafe(nb);
|
||||
}
|
||||
|
||||
StringDecoder.prototype.write = function (buf) {
|
||||
if (buf.length === 0) return '';
|
||||
var r;
|
||||
var i;
|
||||
if (this.lastNeed) {
|
||||
r = this.fillLast(buf);
|
||||
if (r === undefined) return '';
|
||||
i = this.lastNeed;
|
||||
this.lastNeed = 0;
|
||||
} else {
|
||||
i = 0;
|
||||
}
|
||||
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
|
||||
return r || '';
|
||||
};
|
||||
|
||||
StringDecoder.prototype.end = utf8End;
|
||||
|
||||
// Returns only complete characters in a Buffer
|
||||
StringDecoder.prototype.text = utf8Text;
|
||||
|
||||
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
|
||||
StringDecoder.prototype.fillLast = function (buf) {
|
||||
if (this.lastNeed <= buf.length) {
|
||||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
|
||||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||||
}
|
||||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
|
||||
this.lastNeed -= buf.length;
|
||||
};
|
||||
|
||||
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
|
||||
// continuation byte. If an invalid byte is detected, -2 is returned.
|
||||
function utf8CheckByte(byte) {
|
||||
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
|
||||
return byte >> 6 === 0x02 ? -1 : -2;
|
||||
}
|
||||
|
||||
// Checks at most 3 bytes at the end of a Buffer in order to detect an
|
||||
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
|
||||
// needed to complete the UTF-8 character (if applicable) are returned.
|
||||
function utf8CheckIncomplete(self, buf, i) {
|
||||
var j = buf.length - 1;
|
||||
if (j < i) return 0;
|
||||
var nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) self.lastNeed = nb - 1;
|
||||
return nb;
|
||||
}
|
||||
if (--j < i || nb === -2) return 0;
|
||||
nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) self.lastNeed = nb - 2;
|
||||
return nb;
|
||||
}
|
||||
if (--j < i || nb === -2) return 0;
|
||||
nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) {
|
||||
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
|
||||
}
|
||||
return nb;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Validates as many continuation bytes for a multi-byte UTF-8 character as
|
||||
// needed or are available. If we see a non-continuation byte where we expect
|
||||
// one, we "replace" the validated continuation bytes we've seen so far with
|
||||
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
|
||||
// behavior. The continuation byte check is included three times in the case
|
||||
// where all of the continuation bytes for a character exist in the same buffer.
|
||||
// It is also done this way as a slight performance increase instead of using a
|
||||
// loop.
|
||||
function utf8CheckExtraBytes(self, buf, p) {
|
||||
if ((buf[0] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 0;
|
||||
return '\ufffd';
|
||||
}
|
||||
if (self.lastNeed > 1 && buf.length > 1) {
|
||||
if ((buf[1] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 1;
|
||||
return '\ufffd';
|
||||
}
|
||||
if (self.lastNeed > 2 && buf.length > 2) {
|
||||
if ((buf[2] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 2;
|
||||
return '\ufffd';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
|
||||
function utf8FillLast(buf) {
|
||||
var p = this.lastTotal - this.lastNeed;
|
||||
var r = utf8CheckExtraBytes(this, buf, p);
|
||||
if (r !== undefined) return r;
|
||||
if (this.lastNeed <= buf.length) {
|
||||
buf.copy(this.lastChar, p, 0, this.lastNeed);
|
||||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||||
}
|
||||
buf.copy(this.lastChar, p, 0, buf.length);
|
||||
this.lastNeed -= buf.length;
|
||||
}
|
||||
|
||||
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
|
||||
// partial character, the character's bytes are buffered until the required
|
||||
// number of bytes are available.
|
||||
function utf8Text(buf, i) {
|
||||
var total = utf8CheckIncomplete(this, buf, i);
|
||||
if (!this.lastNeed) return buf.toString('utf8', i);
|
||||
this.lastTotal = total;
|
||||
var end = buf.length - (total - this.lastNeed);
|
||||
buf.copy(this.lastChar, 0, end);
|
||||
return buf.toString('utf8', i, end);
|
||||
}
|
||||
|
||||
// For UTF-8, a replacement character is added when ending on a partial
|
||||
// character.
|
||||
function utf8End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) return r + '\ufffd';
|
||||
return r;
|
||||
}
|
||||
|
||||
// UTF-16LE typically needs two bytes per character, but even if we have an even
|
||||
// number of bytes available, we need to check if we end on a leading/high
|
||||
// surrogate. In that case, we need to wait for the next two bytes in order to
|
||||
// decode the last character properly.
|
||||
function utf16Text(buf, i) {
|
||||
if ((buf.length - i) % 2 === 0) {
|
||||
var r = buf.toString('utf16le', i);
|
||||
if (r) {
|
||||
var c = r.charCodeAt(r.length - 1);
|
||||
if (c >= 0xD800 && c <= 0xDBFF) {
|
||||
this.lastNeed = 2;
|
||||
this.lastTotal = 4;
|
||||
this.lastChar[0] = buf[buf.length - 2];
|
||||
this.lastChar[1] = buf[buf.length - 1];
|
||||
return r.slice(0, -1);
|
||||
}
|
||||
}
|
||||
return r;
|
||||
}
|
||||
this.lastNeed = 1;
|
||||
this.lastTotal = 2;
|
||||
this.lastChar[0] = buf[buf.length - 1];
|
||||
return buf.toString('utf16le', i, buf.length - 1);
|
||||
}
|
||||
|
||||
// For UTF-16LE we do not explicitly append special replacement characters if we
|
||||
// end on a partial character, we simply let v8 handle that.
|
||||
function utf16End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) {
|
||||
var end = this.lastTotal - this.lastNeed;
|
||||
return r + this.lastChar.toString('utf16le', 0, end);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
function base64Text(buf, i) {
|
||||
var n = (buf.length - i) % 3;
|
||||
if (n === 0) return buf.toString('base64', i);
|
||||
this.lastNeed = 3 - n;
|
||||
this.lastTotal = 3;
|
||||
if (n === 1) {
|
||||
this.lastChar[0] = buf[buf.length - 1];
|
||||
} else {
|
||||
this.lastChar[0] = buf[buf.length - 2];
|
||||
this.lastChar[1] = buf[buf.length - 1];
|
||||
}
|
||||
return buf.toString('base64', i, buf.length - n);
|
||||
}
|
||||
|
||||
function base64End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
|
||||
return r;
|
||||
}
|
||||
|
||||
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
|
||||
function simpleWrite(buf) {
|
||||
return buf.toString(this.encoding);
|
||||
}
|
||||
|
||||
function simpleEnd(buf) {
|
||||
return buf && buf.length ? this.write(buf) : '';
|
||||
}
|
||||
34
node_modules/readable-web-to-node-stream/node_modules/string_decoder/package.json
generated
vendored
Normal file
34
node_modules/readable-web-to-node-stream/node_modules/string_decoder/package.json
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "string_decoder",
|
||||
"version": "1.3.0",
|
||||
"description": "The string_decoder module from Node core",
|
||||
"main": "lib/string_decoder.js",
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"babel-polyfill": "^6.23.0",
|
||||
"core-util-is": "^1.0.2",
|
||||
"inherits": "^2.0.3",
|
||||
"tap": "~0.4.8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/parallel/*.js && node test/verify-dependencies",
|
||||
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/string_decoder.git"
|
||||
},
|
||||
"homepage": "https://github.com/nodejs/string_decoder",
|
||||
"keywords": [
|
||||
"string",
|
||||
"decoder",
|
||||
"browser",
|
||||
"browserify"
|
||||
],
|
||||
"license": "MIT"
|
||||
}
|
||||
168
node_modules/readable-web-to-node-stream/package.json
generated
vendored
168
node_modules/readable-web-to-node-stream/package.json
generated
vendored
@@ -1,84 +1,84 @@
|
||||
{
|
||||
"name": "readable-web-to-node-stream",
|
||||
"version": "3.0.2",
|
||||
"description": "Converts a Web-API readable-stream into a Node readable-stream.",
|
||||
"main": "lib/index.js",
|
||||
"files": [
|
||||
"lib/**/*.js",
|
||||
"lib/**/*.d.ts"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"types": "lib/index.d.ts",
|
||||
"scripts": {
|
||||
"clean": "del-cli lib/**/*.js lib/**/*.js.map lib/**/*.d.ts coverage",
|
||||
"compile-lib": "tsc -p lib/tsconfig.json",
|
||||
"compile-test": "tsc -p lib/tsconfig.spec.json",
|
||||
"prepublishOnly": "yarn run build",
|
||||
"build": "npm run compile-lib && npm run compile-test",
|
||||
"tslint": "tslint 'lib/**/*.ts' --exclude 'lib/**/*.d.ts'",
|
||||
"eslint": "eslint karma.conf.js",
|
||||
"lint": "npm run tslint && npm run eslint",
|
||||
"test": "karma start --single-run",
|
||||
"karma": "karma start",
|
||||
"karma-firefox": "karma start --browsers Firefox",
|
||||
"karma-once": "karma start --browsers Chrome --single-run",
|
||||
"travis-karma": "karma start --browsers Firefox --single-run --reporters coverage-istanbul,spec",
|
||||
"browserstack": "karma start --browsers bs_win_chrome,bs_win_firefox,bs_osx_safari --single-run --reporters coverage-istanbul,spec",
|
||||
"travis-karma-browserstack": "karma start --browsers bs_win_chrome,bs_win_firefox,bs_osx_safari --single-run --reporters coverage-istanbul,spec,BrowserStack",
|
||||
"post-coveralls": "coveralls < coverage/lcov.info",
|
||||
"post-codacy": " codacy-coverage < coverage/lcov.info"
|
||||
},
|
||||
"keywords": [
|
||||
"stream.readable",
|
||||
"web",
|
||||
"node",
|
||||
"browser",
|
||||
"stream",
|
||||
"covert",
|
||||
"coverter",
|
||||
"readable",
|
||||
"readablestream"
|
||||
],
|
||||
"repository": "https://github.com/Borewit/readable-web-to-node-stream.git",
|
||||
"author": {
|
||||
"name": "Borewit",
|
||||
"url": "https://github.com/Borewit"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/Borewit"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Borewit/readable-web-to-node-stream/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"readable-stream": "^3.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jasmine": "^3.8.1",
|
||||
"@types/node": "^16.3.1",
|
||||
"@types/readable-stream": "^2.3.9",
|
||||
"coveralls": "^3.1.0",
|
||||
"del-cli": "^3.0.1",
|
||||
"eslint": "^7.18.0",
|
||||
"istanbul-instrumenter-loader": "^3.0.1",
|
||||
"jasmine-core": "^3.8.0",
|
||||
"karma": "^6.3.4",
|
||||
"karma-browserstack-launcher": "^1.6.0",
|
||||
"karma-chrome-launcher": "^3.1.0",
|
||||
"karma-coverage-istanbul-reporter": "^3.0.3",
|
||||
"karma-firefox-launcher": "^2.1.0",
|
||||
"karma-jasmine": "^4.0.1",
|
||||
"karma-jasmine-html-reporter": "^1.7.0",
|
||||
"karma-spec-reporter": "^0.0.32",
|
||||
"karma-webpack": "^5.0.0",
|
||||
"music-metadata-browser": "^2.2.7",
|
||||
"ts-loader": "^8.0.14",
|
||||
"tslint": "^6.1.3",
|
||||
"typescript": "^4.3.5",
|
||||
"webpack": "^4.46.0"
|
||||
}
|
||||
}
|
||||
{
|
||||
"name": "readable-web-to-node-stream",
|
||||
"version": "3.0.4",
|
||||
"description": "Converts a Web-API readable-stream into a Node readable-stream.",
|
||||
"type": "commonjs",
|
||||
"main": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"files": [
|
||||
"lib/index.js",
|
||||
"lib/index.d.ts"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "del-cli 'lib/**/*.js' 'lib/**/*.js.map' 'lib/**/*.d.ts' 'coverage'",
|
||||
"compile-lib": "tsc -p lib/tsconfig.json",
|
||||
"compile-test": "tsc -p lib/tsconfig.spec.json",
|
||||
"prepublishOnly": "yarn run build",
|
||||
"build": "yarn run compile-lib && npm run compile-test",
|
||||
"lint-ts": "biome check",
|
||||
"lint": "yarn run biome check",
|
||||
"test": "karma start --single-run",
|
||||
"karma-headless": "karma start --single-run --reporters coverage-istanbul,spec,progress",
|
||||
"karma": "karma start",
|
||||
"karma-firefox": "karma start --browsers Firefox",
|
||||
"karma-once": "karma start --browsers Chrome --single-run",
|
||||
"post-coveralls": "coveralls < coverage/lcov.info",
|
||||
"post-codacy": " codacy-coverage < coverage/lcov.info"
|
||||
},
|
||||
"keywords": [
|
||||
"stream.readable",
|
||||
"web",
|
||||
"node",
|
||||
"browser",
|
||||
"stream",
|
||||
"covert",
|
||||
"coverter",
|
||||
"readable",
|
||||
"readablestream"
|
||||
],
|
||||
"repository": "https://github.com/Borewit/readable-web-to-node-stream.git",
|
||||
"author": {
|
||||
"name": "Borewit",
|
||||
"url": "https://github.com/Borewit"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/Borewit"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Borewit/readable-web-to-node-stream/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"readable-stream": "^4.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@types/jasmine": "^5.1.6",
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/readable-stream": "^4.0.18",
|
||||
"coveralls": "^3.1.1",
|
||||
"del-cli": "^6.0.0",
|
||||
"istanbul-instrumenter-loader": "^3.0.1",
|
||||
"jasmine-core": "^5.6.0",
|
||||
"karma": "^6.4.4",
|
||||
"karma-chrome-launcher": "^3.2.0",
|
||||
"karma-coverage-istanbul-reporter": "^3.0.3",
|
||||
"karma-edge-launcher": "^0.4.2",
|
||||
"karma-firefox-launcher": "^2.1.3",
|
||||
"karma-jasmine": "^5.1.0",
|
||||
"karma-jasmine-html-reporter": "^2.1.0",
|
||||
"karma-safari-launcher": "^1.0.0",
|
||||
"karma-spec-reporter": "^0.0.36",
|
||||
"karma-webpack": "^5.0.1",
|
||||
"music-metadata-browser": "^2.5.11",
|
||||
"ts-loader": "^9.5.2",
|
||||
"typescript": "^5.7.3",
|
||||
"webpack": "^5.98.0",
|
||||
"webpack-cli": "^6.0.1"
|
||||
},
|
||||
"packageManager": "yarn@4.6.0"
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user