update, text, response

This commit is contained in:
2025-11-02 11:09:14 +01:00
parent 14776c86b0
commit eed8a4ddcf
2794 changed files with 156786 additions and 129204 deletions

View File

@@ -8,10 +8,12 @@ jobs:
strategy:
matrix:
node-version: ['0.10', '0.12', 4.x, 6.x, 8.x]
node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x]
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
with:
persist-credentials: false
- name: Use Node.js
uses: actions/setup-node@v1
@@ -31,13 +33,15 @@ jobs:
strategy:
matrix:
node-version: [10.x, 12.x, 13.x, 14.x, 15.x, 16.x]
node-version: [18.x, 20.x, 22.x]
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
with:
persist-credentials: false
- name: Use Node.js
uses: actions/setup-node@v1
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
@@ -48,3 +52,24 @@ jobs:
- name: Run tests
run: |
npm run test
types:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
persist-credentials: false
- name: Use Node.js
uses: actions/setup-node@v3
with:
node-version: 16
- name: Install
run: |
npm install
- name: Run types tests
run: |
npm run typescript

9
node_modules/fastq/README.md generated vendored
View File

@@ -2,7 +2,6 @@
![ci][ci-url]
[![npm version][npm-badge]][npm-url]
[![Dependency Status][david-badge]][david-url]
Fast, in memory work queue.
@@ -234,6 +233,12 @@ each time a task is completed, `err` will be not null if the task has thrown an
Property that returns the number of concurrent tasks that could be executed in
parallel. It can be altered at runtime.
-------------------------------------------------------
<a name="paused"></a>
### queue.paused
Property (Read-Only) that returns `true` when the queue is in a paused state.
-------------------------------------------------------
<a name="drain"></a>
### queue.drain
@@ -305,5 +310,3 @@ ISC
[ci-url]: https://github.com/mcollina/fastq/workflows/ci/badge.svg
[npm-badge]: https://badge.fury.io/js/fastq.svg
[npm-url]: https://badge.fury.io/js/fastq
[david-badge]: https://david-dm.org/mcollina/fastq.svg
[david-url]: https://david-dm.org/mcollina/fastq

15
node_modules/fastq/SECURITY.md generated vendored Normal file
View File

@@ -0,0 +1,15 @@
# Security Policy
## Supported Versions
Use this section to tell people about which versions of your project are
currently being supported with security updates.
| Version | Supported |
| ------- | ------------------ |
| 1.x | :white_check_mark: |
| < 1.0 | :x: |
## Reporting a Vulnerability
Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security).

20
node_modules/fastq/index.d.ts generated vendored
View File

@@ -8,25 +8,45 @@ declare namespace fastq {
type errorHandler<T = any> = (err: Error, task: T) => void
interface queue<T = any, R = any> {
/** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */
push(task: T, done?: done<R>): void
/** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */
unshift(task: T, done?: done<R>): void
/** Pause the processing of tasks. Currently worked tasks are not stopped. */
pause(): any
/** Resume the processing of tasks. */
resume(): any
running(): number
/** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */
idle(): boolean
/** Returns the number of tasks waiting to be processed (in the queue). */
length(): number
/** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */
getQueue(): T[]
/** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */
kill(): any
/** Same than `kill` but the `drain` function will be called before reset to empty. */
killAndDrain(): any
/** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */
error(handler: errorHandler<T>): void
/** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */
concurrency: number
/** Property (Read-Only) that returns `true` when the queue is in a paused state. */
readonly paused: boolean
/** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */
drain(): any
/** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */
empty: () => void
/** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */
saturated: () => void
}
interface queueAsPromised<T = any, R = any> extends queue<T, R> {
/** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
push(task: T): Promise<R>
/** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */
unshift(task: T): Promise<R>
/** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */
drained(): Promise<void>
}

11
node_modules/fastq/package.json generated vendored
View File

@@ -1,6 +1,6 @@
{
"name": "fastq",
"version": "1.15.0",
"version": "1.19.1",
"description": "Fast, in memory work queue",
"main": "queue.js",
"scripts": {
@@ -8,12 +8,13 @@
"unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test/test.js test/promise.js",
"coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js test/promise.js",
"test:report": "npm run lint && npm run unit:report",
"test": "npm run lint && npm run unit && npm run typescript",
"test": "npm run lint && npm run unit",
"typescript": "tsc --project ./test/tsconfig.json",
"legacy": "tape test/test.js"
},
"pre-commit": [
"test"
"test",
"typescript"
],
"repository": {
"type": "git",
@@ -34,12 +35,12 @@
"devDependencies": {
"async": "^3.1.0",
"neo-async": "^2.6.1",
"nyc": "^15.0.0",
"nyc": "^17.0.0",
"pre-commit": "^1.2.2",
"snazzy": "^9.0.0",
"standard": "^16.0.0",
"tape": "^5.0.0",
"typescript": "^4.0.2"
"typescript": "^5.0.4"
},
"dependencies": {
"reusify": "^1.0.4"

70
node_modules/fastq/queue.js generated vendored
View File

@@ -4,15 +4,15 @@
var reusify = require('reusify')
function fastqueue (context, worker, concurrency) {
function fastqueue (context, worker, _concurrency) {
if (typeof context === 'function') {
concurrency = worker
_concurrency = worker
worker = context
context = null
}
if (concurrency < 1) {
throw new Error('fastqueue concurrency must be greater than 1')
if (!(_concurrency >= 1)) {
throw new Error('fastqueue concurrency must be equal to or greater than 1')
}
var cache = reusify(Task)
@@ -27,7 +27,23 @@ function fastqueue (context, worker, concurrency) {
saturated: noop,
pause: pause,
paused: false,
concurrency: concurrency,
get concurrency () {
return _concurrency
},
set concurrency (value) {
if (!(value >= 1)) {
throw new Error('fastqueue concurrency must be equal to or greater than 1')
}
_concurrency = value
if (self.paused) return
for (; queueHead && _running < _concurrency;) {
_running++
release()
}
},
running: running,
resume: resume,
idle: idle,
@@ -77,7 +93,12 @@ function fastqueue (context, worker, concurrency) {
function resume () {
if (!self.paused) return
self.paused = false
for (var i = 0; i < self.concurrency; i++) {
if (queueHead === null) {
_running++
release()
return
}
for (; queueHead && _running < _concurrency;) {
_running++
release()
}
@@ -96,7 +117,7 @@ function fastqueue (context, worker, concurrency) {
current.callback = done || noop
current.errorHandler = errorHandler
if (_running === self.concurrency || self.paused) {
if (_running >= _concurrency || self.paused) {
if (queueTail) {
queueTail.next = current
queueTail = current
@@ -118,8 +139,9 @@ function fastqueue (context, worker, concurrency) {
current.release = release
current.value = value
current.callback = done || noop
current.errorHandler = errorHandler
if (_running === self.concurrency || self.paused) {
if (_running >= _concurrency || self.paused) {
if (queueHead) {
current.next = queueHead
queueHead = current
@@ -139,7 +161,7 @@ function fastqueue (context, worker, concurrency) {
cache.release(holder)
}
var next = queueHead
if (next) {
if (next && _running <= _concurrency) {
if (!self.paused) {
if (queueTail === queueHead) {
queueTail = null
@@ -202,9 +224,9 @@ function Task () {
}
}
function queueAsPromised (context, worker, concurrency) {
function queueAsPromised (context, worker, _concurrency) {
if (typeof context === 'function') {
concurrency = worker
_concurrency = worker
worker = context
context = null
}
@@ -216,7 +238,7 @@ function queueAsPromised (context, worker, concurrency) {
}, cb)
}
var queue = fastqueue(context, asyncWrapper, concurrency)
var queue = fastqueue(context, asyncWrapper, _concurrency)
var pushCb = queue.push
var unshiftCb = queue.unshift
@@ -266,19 +288,19 @@ function queueAsPromised (context, worker, concurrency) {
}
function drained () {
if (queue.idle()) {
return new Promise(function (resolve) {
resolve()
})
}
var previousDrain = queue.drain
var p = new Promise(function (resolve) {
queue.drain = function () {
previousDrain()
resolve()
}
process.nextTick(function () {
if (queue.idle()) {
resolve()
} else {
var previousDrain = queue.drain
queue.drain = function () {
if (typeof previousDrain === 'function') previousDrain()
resolve()
queue.drain = previousDrain
}
}
})
})
return p

2
node_modules/fastq/test/example.ts generated vendored
View File

@@ -32,6 +32,8 @@ queue.pause()
queue.resume()
queue.running()
queue.saturated = () => undefined
queue.unshift('world', (err, result) => {

43
node_modules/fastq/test/promise.js generated vendored
View File

@@ -246,3 +246,46 @@ test('no unhandledRejection (unshift)', async function (t) {
await immediate()
process.removeListener('unhandledRejection', handleRejection)
})
test('drained should resolve after async tasks complete', async function (t) {
const logs = []
async function processTask () {
await new Promise(resolve => setTimeout(resolve, 0))
logs.push('processed')
}
const queue = buildQueue(processTask, 1)
queue.drain = () => logs.push('called drain')
queue.drained().then(() => logs.push('drained promise resolved'))
await Promise.all([
queue.push(),
queue.push(),
queue.push()
])
t.deepEqual(logs, [
'processed',
'processed',
'processed',
'called drain',
'drained promise resolved'
], 'events happened in correct order')
})
test('drained should handle undefined drain function', async function (t) {
const queue = buildQueue(worker, 1)
async function worker (arg) {
await sleep(10)
return arg
}
queue.drain = undefined
queue.push(1)
await queue.drained()
t.pass('drained resolved successfully with undefined drain')
})

119
node_modules/fastq/test/test.js generated vendored
View File

@@ -6,10 +6,22 @@ var test = require('tape')
var buildQueue = require('../')
test('concurrency', function (t) {
t.plan(2)
t.plan(6)
t.throws(buildQueue.bind(null, worker, 0))
t.throws(buildQueue.bind(null, worker, NaN))
t.doesNotThrow(buildQueue.bind(null, worker, 1))
var queue = buildQueue(worker, 1)
t.throws(function () {
queue.concurrency = 0
})
t.throws(function () {
queue.concurrency = NaN
})
t.doesNotThrow(function () {
queue.concurrency = 2
})
function worker (arg, cb) {
cb(null, true)
}
@@ -137,10 +149,11 @@ test('drain', function (t) {
})
test('pause && resume', function (t) {
t.plan(7)
t.plan(13)
var queue = buildQueue(worker, 1)
var worked = false
var expected = [42, 24]
t.notOk(queue.paused, 'it should not be paused')
@@ -151,26 +164,33 @@ test('pause && resume', function (t) {
t.equal(result, true, 'result matches')
})
queue.push(24, function (err, result) {
t.error(err, 'no error')
t.equal(result, true, 'result matches')
})
t.notOk(worked, 'it should be paused')
t.ok(queue.paused, 'it should be paused')
queue.resume()
queue.pause()
queue.resume()
queue.resume() // second resume is a no-op
t.notOk(queue.paused, 'it should not be paused')
function worker (arg, cb) {
t.equal(arg, 42)
t.notOk(queue.paused, 'it should not be paused')
t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency')
t.equal(arg, expected.shift())
worked = true
cb(null, true)
process.nextTick(function () { cb(null, true) })
}
})
test('pause in flight && resume', function (t) {
t.plan(9)
t.plan(16)
var queue = buildQueue(worker, 1)
var expected = [42, 24]
var expected = [42, 24, 12]
t.notOk(queue.paused, 'it should not be paused')
@@ -178,7 +198,11 @@ test('pause in flight && resume', function (t) {
t.error(err, 'no error')
t.equal(result, true, 'result matches')
t.ok(queue.paused, 'it should be paused')
process.nextTick(function () { queue.resume() })
process.nextTick(function () {
queue.resume()
queue.pause()
queue.resume()
})
})
queue.push(24, function (err, result) {
@@ -187,40 +211,60 @@ test('pause in flight && resume', function (t) {
t.notOk(queue.paused, 'it should not be paused')
})
queue.push(12, function (err, result) {
t.error(err, 'no error')
t.equal(result, true, 'result matches')
t.notOk(queue.paused, 'it should not be paused')
})
queue.pause()
function worker (arg, cb) {
t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency')
t.equal(arg, expected.shift())
process.nextTick(function () { cb(null, true) })
}
})
test('altering concurrency', function (t) {
t.plan(7)
t.plan(24)
var queue = buildQueue(worker, 1)
var count = 0
queue.push(24, workDone)
queue.push(24, workDone)
queue.push(24, workDone)
queue.pause()
queue.push(24, workDone)
queue.push(24, workDone)
queue.concurrency = 3 // concurrency changes are ignored while paused
queue.concurrency = 2
queue.resume()
t.equal(queue.running(), 2, '2 jobs running')
queue.concurrency = 3
t.equal(queue.running(), 3, '3 jobs running')
queue.concurrency = 1
t.equal(queue.running(), 3, '3 jobs running') // running jobs can't be killed
queue.push(24, workDone)
queue.push(24, workDone)
queue.push(24, workDone)
queue.push(24, workDone)
function workDone (err, result) {
t.error(err, 'no error')
t.equal(result, true, 'result matches')
}
function worker (arg, cb) {
t.equal(0, count, 'works in parallel')
t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency')
setImmediate(function () {
count++
cb(null, true)
})
}
@@ -564,3 +608,46 @@ test('push with worker throwing error', function (t) {
t.match(err.message, /test error/, 'error message should be "test error"')
})
})
test('unshift with worker throwing error', function (t) {
t.plan(5)
var q = buildQueue(function (task, cb) {
cb(new Error('test error'), null)
}, 1)
q.error(function (err, task) {
t.ok(err instanceof Error, 'global error handler should catch the error')
t.match(err.message, /test error/, 'error message should be "test error"')
t.equal(task, 42, 'The task executed should be passed')
})
q.unshift(42, function (err) {
t.ok(err instanceof Error, 'unshift callback should catch the error')
t.match(err.message, /test error/, 'error message should be "test error"')
})
})
test('pause/resume should trigger drain event', function (t) {
t.plan(1)
var queue = buildQueue(worker, 1)
queue.pause()
queue.drain = function () {
t.pass('drain should be called')
}
function worker (arg, cb) {
cb(null, true)
}
queue.resume()
})
test('paused flag', function (t) {
t.plan(2)
var queue = buildQueue(function (arg, cb) {
cb(null)
}, 1)
t.equal(queue.paused, false)
queue.pause()
t.equal(queue.paused, true)
})