update, text, response
This commit is contained in:
375
node_modules/sass/LICENSE
generated
vendored
375
node_modules/sass/LICENSE
generated
vendored
@@ -182,8 +182,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
async, cli_util, collection, mime, source_map_stack_trace, stream_channel and
|
||||
typed_data license:
|
||||
async, cli_util, collection, mime, stream_channel and typed_data license:
|
||||
|
||||
Copyright 2015, the Dart project authors.
|
||||
|
||||
@@ -251,32 +250,32 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
charcode license:
|
||||
|
||||
Copyright 2014, the Dart project authors. All rights reserved.
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
Copyright 2014, the Dart project authors. All rights reserved.
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
@@ -311,6 +310,38 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
cli_config license:
|
||||
|
||||
Copyright 2023, the Dart project authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
cli_pkg license:
|
||||
@@ -550,7 +581,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
convert, crypto, shelf_static and vm_service license:
|
||||
convert, crypto, shelf_static, source_map_stack_trace and vm_service license:
|
||||
|
||||
Copyright 2015, the Dart project authors.
|
||||
|
||||
@@ -615,6 +646,65 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
dart_mappable and type_plus license:
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 Kilian Schulte
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
ffi and package_config license:
|
||||
|
||||
Copyright 2019, the Dart project authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
file license:
|
||||
@@ -940,6 +1030,72 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
native_stack_traces license:
|
||||
|
||||
Copyright 2020, the Dart project authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
native_synchronization license:
|
||||
|
||||
Copyright 2023, the Dart project authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
node_interop license:
|
||||
@@ -1024,46 +1180,13 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
package_config license:
|
||||
|
||||
Copyright 2019, the Dart project authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
petitparser license:
|
||||
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2006-2023 Lukas Renggli.
|
||||
Copyright (c) 2006-2024 Lukas Renggli.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
@@ -1085,32 +1208,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
pointycastle license:
|
||||
|
||||
|
||||
Copyright (c) 2000 - 2019 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
pub_api_client license:
|
||||
@@ -1138,36 +1235,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
pubspec license:
|
||||
|
||||
Copyright (c) 2015, Anders Holmgren.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the <organization> nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
pubspec_parse license:
|
||||
@@ -1203,7 +1270,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
quiver and retry license:
|
||||
retry license:
|
||||
|
||||
|
||||
Apache License
|
||||
@@ -1509,36 +1576,10 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
tuple license:
|
||||
web license:
|
||||
|
||||
Copyright (c) 2014, the tuple project authors.
|
||||
All rights reserved.
|
||||
Copyright 2023, the Dart project authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
uri license:
|
||||
|
||||
Copyright 2013, the Dart project authors. All rights reserved.
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
@@ -1548,9 +1589,43 @@ met:
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
web_socket license:
|
||||
|
||||
Copyright 2024, the Dart project authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
@@ -1570,7 +1645,7 @@ xml license:
|
||||
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2006-2022 Lukas Renggli.
|
||||
Copyright (c) 2006-2025 Lukas Renggli.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
|
||||
21
node_modules/sass/node_modules/chokidar/LICENSE
generated
vendored
Normal file
21
node_modules/sass/node_modules/chokidar/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2012 Paul Miller (https://paulmillr.com), Elan Shanker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the “Software”), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
305
node_modules/sass/node_modules/chokidar/README.md
generated
vendored
Normal file
305
node_modules/sass/node_modules/chokidar/README.md
generated
vendored
Normal file
@@ -0,0 +1,305 @@
|
||||
# Chokidar [](https://github.com/paulmillr/chokidar)
|
||||
|
||||
> Minimal and efficient cross-platform file watching library
|
||||
|
||||
## Why?
|
||||
|
||||
There are many reasons to prefer Chokidar to raw fs.watch / fs.watchFile in 2024:
|
||||
|
||||
- Events are properly reported
|
||||
- macOS events report filenames
|
||||
- events are not reported twice
|
||||
- changes are reported as add / change / unlink instead of useless `rename`
|
||||
- Atomic writes are supported, using `atomic` option
|
||||
- Some file editors use them
|
||||
- Chunked writes are supported, using `awaitWriteFinish` option
|
||||
- Large files are commonly written in chunks
|
||||
- File / dir filtering is supported
|
||||
- Symbolic links are supported
|
||||
- Recursive watching is always supported, instead of partial when using raw events
|
||||
- Includes a way to limit recursion depth
|
||||
|
||||
Chokidar relies on the Node.js core `fs` module, but when using
|
||||
`fs.watch` and `fs.watchFile` for watching, it normalizes the events it
|
||||
receives, often checking for truth by getting file stats and/or dir contents.
|
||||
The `fs.watch`-based implementation is the default, which
|
||||
avoids polling and keeps CPU usage down. Be advised that chokidar will initiate
|
||||
watchers recursively for everything within scope of the paths that have been
|
||||
specified, so be judicious about not wasting system resources by watching much
|
||||
more than needed. For some cases, `fs.watchFile`, which utilizes polling and uses more resources, is used.
|
||||
|
||||
Made for [Brunch](https://brunch.io/) in 2012,
|
||||
it is now used in [~30 million repositories](https://www.npmjs.com/browse/depended/chokidar) and
|
||||
has proven itself in production environments.
|
||||
|
||||
**Sep 2024 update:** v4 is out! It decreases dependency count from 13 to 1, removes
|
||||
support for globs, adds support for ESM / Common.js modules, and bumps minimum node.js version from v8 to v14.
|
||||
Check out [upgrading](#upgrading).
|
||||
|
||||
## Getting started
|
||||
|
||||
Install with npm:
|
||||
|
||||
```sh
|
||||
npm install chokidar
|
||||
```
|
||||
|
||||
Use it in your code:
|
||||
|
||||
```javascript
|
||||
import chokidar from 'chokidar';
|
||||
|
||||
// One-liner for current directory
|
||||
chokidar.watch('.').on('all', (event, path) => {
|
||||
console.log(event, path);
|
||||
});
|
||||
|
||||
|
||||
// Extended options
|
||||
// ----------------
|
||||
|
||||
// Initialize watcher.
|
||||
const watcher = chokidar.watch('file, dir, or array', {
|
||||
ignored: (path, stats) => stats?.isFile() && !path.endsWith('.js'), // only watch js files
|
||||
persistent: true
|
||||
});
|
||||
|
||||
// Something to use when events are received.
|
||||
const log = console.log.bind(console);
|
||||
// Add event listeners.
|
||||
watcher
|
||||
.on('add', path => log(`File ${path} has been added`))
|
||||
.on('change', path => log(`File ${path} has been changed`))
|
||||
.on('unlink', path => log(`File ${path} has been removed`));
|
||||
|
||||
// More possible events.
|
||||
watcher
|
||||
.on('addDir', path => log(`Directory ${path} has been added`))
|
||||
.on('unlinkDir', path => log(`Directory ${path} has been removed`))
|
||||
.on('error', error => log(`Watcher error: ${error}`))
|
||||
.on('ready', () => log('Initial scan complete. Ready for changes'))
|
||||
.on('raw', (event, path, details) => { // internal
|
||||
log('Raw event info:', event, path, details);
|
||||
});
|
||||
|
||||
// 'add', 'addDir' and 'change' events also receive stat() results as second
|
||||
// argument when available: https://nodejs.org/api/fs.html#fs_class_fs_stats
|
||||
watcher.on('change', (path, stats) => {
|
||||
if (stats) console.log(`File ${path} changed size to ${stats.size}`);
|
||||
});
|
||||
|
||||
// Watch new files.
|
||||
watcher.add('new-file');
|
||||
watcher.add(['new-file-2', 'new-file-3']);
|
||||
|
||||
// Get list of actual paths being watched on the filesystem
|
||||
let watchedPaths = watcher.getWatched();
|
||||
|
||||
// Un-watch some files.
|
||||
await watcher.unwatch('new-file');
|
||||
|
||||
// Stop watching. The method is async!
|
||||
await watcher.close().then(() => console.log('closed'));
|
||||
|
||||
// Full list of options. See below for descriptions.
|
||||
// Do not use this example!
|
||||
chokidar.watch('file', {
|
||||
persistent: true,
|
||||
|
||||
// ignore .txt files
|
||||
ignored: (file) => file.endsWith('.txt'),
|
||||
// watch only .txt files
|
||||
// ignored: (file, _stats) => _stats?.isFile() && !file.endsWith('.txt'),
|
||||
|
||||
awaitWriteFinish: true, // emit single event when chunked writes are completed
|
||||
atomic: true, // emit proper events when "atomic writes" (mv _tmp file) are used
|
||||
|
||||
// The options also allow specifying custom intervals in ms
|
||||
// awaitWriteFinish: {
|
||||
// stabilityThreshold: 2000,
|
||||
// pollInterval: 100
|
||||
// },
|
||||
// atomic: 100,
|
||||
|
||||
interval: 100,
|
||||
binaryInterval: 300,
|
||||
|
||||
cwd: '.',
|
||||
depth: 99,
|
||||
|
||||
followSymlinks: true,
|
||||
ignoreInitial: false,
|
||||
ignorePermissionErrors: false,
|
||||
usePolling: false,
|
||||
alwaysStat: false,
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
`chokidar.watch(paths, [options])`
|
||||
|
||||
* `paths` (string or array of strings). Paths to files, dirs to be watched
|
||||
recursively.
|
||||
* `options` (object) Options object as defined below:
|
||||
|
||||
#### Persistence
|
||||
|
||||
* `persistent` (default: `true`). Indicates whether the process
|
||||
should continue to run as long as files are being watched.
|
||||
|
||||
#### Path filtering
|
||||
|
||||
* `ignored` function, regex, or path. Defines files/paths to be ignored.
|
||||
The whole relative or absolute path is tested, not just filename. If a function with two arguments
|
||||
is provided, it gets called twice per path - once with a single argument (the path), second
|
||||
time with two arguments (the path and the
|
||||
[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)
|
||||
object of that path).
|
||||
* `ignoreInitial` (default: `false`). If set to `false` then `add`/`addDir` events are also emitted for matching paths while
|
||||
instantiating the watching as chokidar discovers these file paths (before the `ready` event).
|
||||
* `followSymlinks` (default: `true`). When `false`, only the
|
||||
symlinks themselves will be watched for changes instead of following
|
||||
the link references and bubbling events through the link's path.
|
||||
* `cwd` (no default). The base directory from which watch `paths` are to be
|
||||
derived. Paths emitted with events will be relative to this.
|
||||
|
||||
#### Performance
|
||||
|
||||
* `usePolling` (default: `false`).
|
||||
Whether to use fs.watchFile (backed by polling), or fs.watch. If polling
|
||||
leads to high CPU utilization, consider setting this to `false`. It is
|
||||
typically necessary to **set this to `true` to successfully watch files over
|
||||
a network**, and it may be necessary to successfully watch files in other
|
||||
non-standard situations. Setting to `true` explicitly on MacOS overrides the
|
||||
`useFsEvents` default. You may also set the CHOKIDAR_USEPOLLING env variable
|
||||
to true (1) or false (0) in order to override this option.
|
||||
* _Polling-specific settings_ (effective when `usePolling: true`)
|
||||
* `interval` (default: `100`). Interval of file system polling, in milliseconds. You may also
|
||||
set the CHOKIDAR_INTERVAL env variable to override this option.
|
||||
* `binaryInterval` (default: `300`). Interval of file system
|
||||
polling for binary files.
|
||||
([see list of binary extensions](https://github.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json))
|
||||
* `alwaysStat` (default: `false`). If relying upon the
|
||||
[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)
|
||||
object that may get passed with `add`, `addDir`, and `change` events, set
|
||||
this to `true` to ensure it is provided even in cases where it wasn't
|
||||
already available from the underlying watch events.
|
||||
* `depth` (default: `undefined`). If set, limits how many levels of
|
||||
subdirectories will be traversed.
|
||||
* `awaitWriteFinish` (default: `false`).
|
||||
By default, the `add` event will fire when a file first appears on disk, before
|
||||
the entire file has been written. Furthermore, in some cases some `change`
|
||||
events will be emitted while the file is being written. In some cases,
|
||||
especially when watching for large files there will be a need to wait for the
|
||||
write operation to finish before responding to a file creation or modification.
|
||||
Setting `awaitWriteFinish` to `true` (or a truthy value) will poll file size,
|
||||
holding its `add` and `change` events until the size does not change for a
|
||||
configurable amount of time. The appropriate duration setting is heavily
|
||||
dependent on the OS and hardware. For accurate detection this parameter should
|
||||
be relatively high, making file watching much less responsive.
|
||||
Use with caution.
|
||||
* *`options.awaitWriteFinish` can be set to an object in order to adjust
|
||||
timing params:*
|
||||
* `awaitWriteFinish.stabilityThreshold` (default: 2000). Amount of time in
|
||||
milliseconds for a file size to remain constant before emitting its event.
|
||||
* `awaitWriteFinish.pollInterval` (default: 100). File size polling interval, in milliseconds.
|
||||
|
||||
#### Errors
|
||||
|
||||
* `ignorePermissionErrors` (default: `false`). Indicates whether to watch files
|
||||
that don't have read permissions if possible. If watching fails due to `EPERM`
|
||||
or `EACCES` with this set to `true`, the errors will be suppressed silently.
|
||||
* `atomic` (default: `true` if `useFsEvents` and `usePolling` are `false`).
|
||||
Automatically filters out artifacts that occur when using editors that use
|
||||
"atomic writes" instead of writing directly to the source file. If a file is
|
||||
re-added within 100 ms of being deleted, Chokidar emits a `change` event
|
||||
rather than `unlink` then `add`. If the default of 100 ms does not work well
|
||||
for you, you can override it by setting `atomic` to a custom value, in
|
||||
milliseconds.
|
||||
|
||||
### Methods & Events
|
||||
|
||||
`chokidar.watch()` produces an instance of `FSWatcher`. Methods of `FSWatcher`:
|
||||
|
||||
* `.add(path / paths)`: Add files, directories for tracking.
|
||||
Takes an array of strings or just one string.
|
||||
* `.on(event, callback)`: Listen for an FS event.
|
||||
Available events: `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `ready`,
|
||||
`raw`, `error`.
|
||||
Additionally `all` is available which gets emitted with the underlying event
|
||||
name and path for every event other than `ready`, `raw`, and `error`. `raw` is internal, use it carefully.
|
||||
* `.unwatch(path / paths)`: Stop watching files or directories.
|
||||
Takes an array of strings or just one string.
|
||||
* `.close()`: **async** Removes all listeners from watched files. Asynchronous, returns Promise. Use with `await` to ensure bugs don't happen.
|
||||
* `.getWatched()`: Returns an object representing all the paths on the file
|
||||
system being watched by this `FSWatcher` instance. The object's keys are all the
|
||||
directories (using absolute paths unless the `cwd` option was used), and the
|
||||
values are arrays of the names of the items contained in each directory.
|
||||
|
||||
### CLI
|
||||
|
||||
Check out third party [chokidar-cli](https://github.com/open-cli-tools/chokidar-cli),
|
||||
which allows to execute a command on each change, or get a stdio stream of change events.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Sometimes, Chokidar runs out of file handles, causing `EMFILE` and `ENOSP` errors:
|
||||
|
||||
* `bash: cannot set terminal process group (-1): Inappropriate ioctl for device bash: no job control in this shell`
|
||||
* `Error: watch /home/ ENOSPC`
|
||||
|
||||
There are two things that can cause it.
|
||||
|
||||
1. Exhausted file handles for generic fs operations
|
||||
- Can be solved by using [graceful-fs](https://www.npmjs.com/package/graceful-fs),
|
||||
which can monkey-patch native `fs` module used by chokidar: `let fs = require('fs'); let grfs = require('graceful-fs'); grfs.gracefulify(fs);`
|
||||
- Can also be solved by tuning OS: `echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p`.
|
||||
2. Exhausted file handles for `fs.watch`
|
||||
- Can't seem to be solved by graceful-fs or OS tuning
|
||||
- It's possible to start using `usePolling: true`, which will switch backend to resource-intensive `fs.watchFile`
|
||||
|
||||
All fsevents-related issues (`WARN optional dep failed`, `fsevents is not a constructor`) are solved by upgrading to v4+.
|
||||
|
||||
## Changelog
|
||||
|
||||
- **v4 (Sep 2024):** remove glob support and bundled fsevents. Decrease dependency count from 13 to 1. Rewrite in typescript. Bumps minimum node.js requirement to v14+
|
||||
- **v3 (Apr 2019):** massive CPU & RAM consumption improvements; reduces deps / package size by a factor of 17x and bumps Node.js requirement to v8.16+.
|
||||
- **v2 (Dec 2017):** globs are now posix-style-only. Tons of bugfixes.
|
||||
- **v1 (Apr 2015):** glob support, symlink support, tons of bugfixes. Node 0.8+ is supported
|
||||
- **v0.1 (Apr 2012):** Initial release, extracted from [Brunch](https://github.com/brunch/brunch/blob/9847a065aea300da99bd0753f90354cde9de1261/src/helpers.coffee#L66)
|
||||
|
||||
### Upgrading
|
||||
|
||||
If you've used globs before and want do replicate the functionality with v4:
|
||||
|
||||
```js
|
||||
// v3
|
||||
chok.watch('**/*.js');
|
||||
chok.watch("./directory/**/*");
|
||||
|
||||
// v4
|
||||
chok.watch('.', {
|
||||
ignored: (path, stats) => stats?.isFile() && !path.endsWith('.js'), // only watch js files
|
||||
});
|
||||
chok.watch('./directory');
|
||||
|
||||
// other way
|
||||
import { glob } from 'node:fs/promises';
|
||||
const watcher = watch(await Array.fromAsync(glob('**/*.js')));
|
||||
|
||||
// unwatching
|
||||
// v3
|
||||
chok.unwatch('**/*.js');
|
||||
// v4
|
||||
chok.unwatch(await glob('**/*.js'));
|
||||
```
|
||||
|
||||
## Also
|
||||
|
||||
Why was chokidar named this way? What's the meaning behind it?
|
||||
|
||||
>Chowkidar is a transliteration of a Hindi word meaning 'watchman, gatekeeper', चौकीदार. This ultimately comes from Sanskrit _ चतुष्क_ (crossway, quadrangle, consisting-of-four). This word is also used in other languages like Urdu as (چوکیدار) which is widely used in Pakistan and India.
|
||||
|
||||
## License
|
||||
|
||||
MIT (c) Paul Miller (<https://paulmillr.com>), see [LICENSE](LICENSE) file.
|
||||
90
node_modules/sass/node_modules/chokidar/esm/handler.d.ts
generated
vendored
Normal file
90
node_modules/sass/node_modules/chokidar/esm/handler.d.ts
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import type { WatchEventType, Stats, FSWatcher as NativeFsWatcher } from 'fs';
|
||||
import type { FSWatcher, WatchHelper, Throttler } from './index.js';
|
||||
import type { EntryInfo } from 'readdirp';
|
||||
export type Path = string;
|
||||
export declare const STR_DATA = "data";
|
||||
export declare const STR_END = "end";
|
||||
export declare const STR_CLOSE = "close";
|
||||
export declare const EMPTY_FN: () => void;
|
||||
export declare const IDENTITY_FN: (val: unknown) => unknown;
|
||||
export declare const isWindows: boolean;
|
||||
export declare const isMacos: boolean;
|
||||
export declare const isLinux: boolean;
|
||||
export declare const isFreeBSD: boolean;
|
||||
export declare const isIBMi: boolean;
|
||||
export declare const EVENTS: {
|
||||
readonly ALL: "all";
|
||||
readonly READY: "ready";
|
||||
readonly ADD: "add";
|
||||
readonly CHANGE: "change";
|
||||
readonly ADD_DIR: "addDir";
|
||||
readonly UNLINK: "unlink";
|
||||
readonly UNLINK_DIR: "unlinkDir";
|
||||
readonly RAW: "raw";
|
||||
readonly ERROR: "error";
|
||||
};
|
||||
export type EventName = (typeof EVENTS)[keyof typeof EVENTS];
|
||||
export type FsWatchContainer = {
|
||||
listeners: (path: string) => void | Set<any>;
|
||||
errHandlers: (err: unknown) => void | Set<any>;
|
||||
rawEmitters: (ev: WatchEventType, path: string, opts: unknown) => void | Set<any>;
|
||||
watcher: NativeFsWatcher;
|
||||
watcherUnusable?: boolean;
|
||||
};
|
||||
export interface WatchHandlers {
|
||||
listener: (path: string) => void;
|
||||
errHandler: (err: unknown) => void;
|
||||
rawEmitter: (ev: WatchEventType, path: string, opts: unknown) => void;
|
||||
}
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
export declare class NodeFsHandler {
|
||||
fsw: FSWatcher;
|
||||
_boundHandleError: (error: unknown) => void;
|
||||
constructor(fsW: FSWatcher);
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param path to file or dir
|
||||
* @param listener on fs change
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path: string, listener: (path: string, newStats?: any) => void | Promise<void>): (() => void) | undefined;
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file: Path, stats: Stats, initialAdd: boolean): (() => void) | undefined;
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param entry returned by readdirp
|
||||
* @param directory path of dir being read
|
||||
* @param path of this item
|
||||
* @param item basename of this item
|
||||
* @returns true if no more processing is needed for this entry.
|
||||
*/
|
||||
_handleSymlink(entry: EntryInfo, directory: string, path: Path, item: string): Promise<boolean | undefined>;
|
||||
_handleRead(directory: string, initialAdd: boolean, wh: WatchHelper, target: Path, dir: Path, depth: number, throttler: Throttler): Promise<unknown> | undefined;
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param dir fs path
|
||||
* @param stats
|
||||
* @param initialAdd
|
||||
* @param depth relative to user-supplied path
|
||||
* @param target child path targeted for watch
|
||||
* @param wh Common watch helpers for this path
|
||||
* @param realpath
|
||||
* @returns closer for the watcher instance.
|
||||
*/
|
||||
_handleDir(dir: string, stats: Stats, initialAdd: boolean, depth: number, target: string, wh: WatchHelper, realpath: string): Promise<(() => void) | undefined>;
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param path to file or ir
|
||||
* @param initialAdd was the file added at watch instantiation?
|
||||
* @param priorWh depth relative to user-supplied path
|
||||
* @param depth Child path actually targeted for watch
|
||||
* @param target Child path actually targeted for watch
|
||||
*/
|
||||
_addToNodeFs(path: string, initialAdd: boolean, priorWh: WatchHelper | undefined, depth: number, target?: string): Promise<string | false | undefined>;
|
||||
}
|
||||
629
node_modules/sass/node_modules/chokidar/esm/handler.js
generated
vendored
Normal file
629
node_modules/sass/node_modules/chokidar/esm/handler.js
generated
vendored
Normal file
@@ -0,0 +1,629 @@
|
||||
import { watchFile, unwatchFile, watch as fs_watch } from 'fs';
|
||||
import { open, stat, lstat, realpath as fsrealpath } from 'fs/promises';
|
||||
import * as sysPath from 'path';
|
||||
import { type as osType } from 'os';
|
||||
export const STR_DATA = 'data';
|
||||
export const STR_END = 'end';
|
||||
export const STR_CLOSE = 'close';
|
||||
export const EMPTY_FN = () => { };
|
||||
export const IDENTITY_FN = (val) => val;
|
||||
const pl = process.platform;
|
||||
export const isWindows = pl === 'win32';
|
||||
export const isMacos = pl === 'darwin';
|
||||
export const isLinux = pl === 'linux';
|
||||
export const isFreeBSD = pl === 'freebsd';
|
||||
export const isIBMi = osType() === 'OS400';
|
||||
export const EVENTS = {
|
||||
ALL: 'all',
|
||||
READY: 'ready',
|
||||
ADD: 'add',
|
||||
CHANGE: 'change',
|
||||
ADD_DIR: 'addDir',
|
||||
UNLINK: 'unlink',
|
||||
UNLINK_DIR: 'unlinkDir',
|
||||
RAW: 'raw',
|
||||
ERROR: 'error',
|
||||
};
|
||||
const EV = EVENTS;
|
||||
const THROTTLE_MODE_WATCH = 'watch';
|
||||
const statMethods = { lstat, stat };
|
||||
const KEY_LISTENERS = 'listeners';
|
||||
const KEY_ERR = 'errHandlers';
|
||||
const KEY_RAW = 'rawEmitters';
|
||||
const HANDLER_KEYS = [KEY_LISTENERS, KEY_ERR, KEY_RAW];
|
||||
// prettier-ignore
|
||||
const binaryExtensions = new Set([
|
||||
'3dm', '3ds', '3g2', '3gp', '7z', 'a', 'aac', 'adp', 'afdesign', 'afphoto', 'afpub', 'ai',
|
||||
'aif', 'aiff', 'alz', 'ape', 'apk', 'appimage', 'ar', 'arj', 'asf', 'au', 'avi',
|
||||
'bak', 'baml', 'bh', 'bin', 'bk', 'bmp', 'btif', 'bz2', 'bzip2',
|
||||
'cab', 'caf', 'cgm', 'class', 'cmx', 'cpio', 'cr2', 'cur', 'dat', 'dcm', 'deb', 'dex', 'djvu',
|
||||
'dll', 'dmg', 'dng', 'doc', 'docm', 'docx', 'dot', 'dotm', 'dra', 'DS_Store', 'dsk', 'dts',
|
||||
'dtshd', 'dvb', 'dwg', 'dxf',
|
||||
'ecelp4800', 'ecelp7470', 'ecelp9600', 'egg', 'eol', 'eot', 'epub', 'exe',
|
||||
'f4v', 'fbs', 'fh', 'fla', 'flac', 'flatpak', 'fli', 'flv', 'fpx', 'fst', 'fvt',
|
||||
'g3', 'gh', 'gif', 'graffle', 'gz', 'gzip',
|
||||
'h261', 'h263', 'h264', 'icns', 'ico', 'ief', 'img', 'ipa', 'iso',
|
||||
'jar', 'jpeg', 'jpg', 'jpgv', 'jpm', 'jxr', 'key', 'ktx',
|
||||
'lha', 'lib', 'lvp', 'lz', 'lzh', 'lzma', 'lzo',
|
||||
'm3u', 'm4a', 'm4v', 'mar', 'mdi', 'mht', 'mid', 'midi', 'mj2', 'mka', 'mkv', 'mmr', 'mng',
|
||||
'mobi', 'mov', 'movie', 'mp3',
|
||||
'mp4', 'mp4a', 'mpeg', 'mpg', 'mpga', 'mxu',
|
||||
'nef', 'npx', 'numbers', 'nupkg',
|
||||
'o', 'odp', 'ods', 'odt', 'oga', 'ogg', 'ogv', 'otf', 'ott',
|
||||
'pages', 'pbm', 'pcx', 'pdb', 'pdf', 'pea', 'pgm', 'pic', 'png', 'pnm', 'pot', 'potm',
|
||||
'potx', 'ppa', 'ppam',
|
||||
'ppm', 'pps', 'ppsm', 'ppsx', 'ppt', 'pptm', 'pptx', 'psd', 'pya', 'pyc', 'pyo', 'pyv',
|
||||
'qt',
|
||||
'rar', 'ras', 'raw', 'resources', 'rgb', 'rip', 'rlc', 'rmf', 'rmvb', 'rpm', 'rtf', 'rz',
|
||||
's3m', 's7z', 'scpt', 'sgi', 'shar', 'snap', 'sil', 'sketch', 'slk', 'smv', 'snk', 'so',
|
||||
'stl', 'suo', 'sub', 'swf',
|
||||
'tar', 'tbz', 'tbz2', 'tga', 'tgz', 'thmx', 'tif', 'tiff', 'tlz', 'ttc', 'ttf', 'txz',
|
||||
'udf', 'uvh', 'uvi', 'uvm', 'uvp', 'uvs', 'uvu',
|
||||
'viv', 'vob',
|
||||
'war', 'wav', 'wax', 'wbmp', 'wdp', 'weba', 'webm', 'webp', 'whl', 'wim', 'wm', 'wma',
|
||||
'wmv', 'wmx', 'woff', 'woff2', 'wrm', 'wvx',
|
||||
'xbm', 'xif', 'xla', 'xlam', 'xls', 'xlsb', 'xlsm', 'xlsx', 'xlt', 'xltm', 'xltx', 'xm',
|
||||
'xmind', 'xpi', 'xpm', 'xwd', 'xz',
|
||||
'z', 'zip', 'zipx',
|
||||
]);
|
||||
const isBinaryPath = (filePath) => binaryExtensions.has(sysPath.extname(filePath).slice(1).toLowerCase());
|
||||
// TODO: emit errors properly. Example: EMFILE on Macos.
|
||||
const foreach = (val, fn) => {
|
||||
if (val instanceof Set) {
|
||||
val.forEach(fn);
|
||||
}
|
||||
else {
|
||||
fn(val);
|
||||
}
|
||||
};
|
||||
const addAndConvert = (main, prop, item) => {
|
||||
let container = main[prop];
|
||||
if (!(container instanceof Set)) {
|
||||
main[prop] = container = new Set([container]);
|
||||
}
|
||||
container.add(item);
|
||||
};
|
||||
const clearItem = (cont) => (key) => {
|
||||
const set = cont[key];
|
||||
if (set instanceof Set) {
|
||||
set.clear();
|
||||
}
|
||||
else {
|
||||
delete cont[key];
|
||||
}
|
||||
};
|
||||
const delFromSet = (main, prop, item) => {
|
||||
const container = main[prop];
|
||||
if (container instanceof Set) {
|
||||
container.delete(item);
|
||||
}
|
||||
else if (container === item) {
|
||||
delete main[prop];
|
||||
}
|
||||
};
|
||||
const isEmptySet = (val) => (val instanceof Set ? val.size === 0 : !val);
|
||||
const FsWatchInstances = new Map();
|
||||
/**
|
||||
* Instantiates the fs_watch interface
|
||||
* @param path to be watched
|
||||
* @param options to be passed to fs_watch
|
||||
* @param listener main event handler
|
||||
* @param errHandler emits info about errors
|
||||
* @param emitRaw emits raw event data
|
||||
* @returns {NativeFsWatcher}
|
||||
*/
|
||||
function createFsWatchInstance(path, options, listener, errHandler, emitRaw) {
|
||||
const handleEvent = (rawEvent, evPath) => {
|
||||
listener(path);
|
||||
emitRaw(rawEvent, evPath, { watchedPath: path });
|
||||
// emit based on events occurring for files from a directory's watcher in
|
||||
// case the file's watcher misses it (and rely on throttling to de-dupe)
|
||||
if (evPath && path !== evPath) {
|
||||
fsWatchBroadcast(sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath));
|
||||
}
|
||||
};
|
||||
try {
|
||||
return fs_watch(path, {
|
||||
persistent: options.persistent,
|
||||
}, handleEvent);
|
||||
}
|
||||
catch (error) {
|
||||
errHandler(error);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper for passing fs_watch event data to a collection of listeners
|
||||
* @param fullPath absolute path bound to fs_watch instance
|
||||
*/
|
||||
const fsWatchBroadcast = (fullPath, listenerType, val1, val2, val3) => {
|
||||
const cont = FsWatchInstances.get(fullPath);
|
||||
if (!cont)
|
||||
return;
|
||||
foreach(cont[listenerType], (listener) => {
|
||||
listener(val1, val2, val3);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Instantiates the fs_watch interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param path
|
||||
* @param fullPath absolute path
|
||||
* @param options to be passed to fs_watch
|
||||
* @param handlers container for event listener functions
|
||||
*/
|
||||
const setFsWatchListener = (path, fullPath, options, handlers) => {
|
||||
const { listener, errHandler, rawEmitter } = handlers;
|
||||
let cont = FsWatchInstances.get(fullPath);
|
||||
let watcher;
|
||||
if (!options.persistent) {
|
||||
watcher = createFsWatchInstance(path, options, listener, errHandler, rawEmitter);
|
||||
if (!watcher)
|
||||
return;
|
||||
return watcher.close.bind(watcher);
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_ERR, errHandler);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
}
|
||||
else {
|
||||
watcher = createFsWatchInstance(path, options, fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS), errHandler, // no need to use broadcast here
|
||||
fsWatchBroadcast.bind(null, fullPath, KEY_RAW));
|
||||
if (!watcher)
|
||||
return;
|
||||
watcher.on(EV.ERROR, async (error) => {
|
||||
const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR);
|
||||
if (cont)
|
||||
cont.watcherUnusable = true; // documented since Node 10.4.1
|
||||
// Workaround for https://github.com/joyent/node/issues/4337
|
||||
if (isWindows && error.code === 'EPERM') {
|
||||
try {
|
||||
const fd = await open(path, 'r');
|
||||
await fd.close();
|
||||
broadcastErr(error);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
else {
|
||||
broadcastErr(error);
|
||||
}
|
||||
});
|
||||
cont = {
|
||||
listeners: listener,
|
||||
errHandlers: errHandler,
|
||||
rawEmitters: rawEmitter,
|
||||
watcher,
|
||||
};
|
||||
FsWatchInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
// removes this instance's listeners and closes the underlying fs_watch
|
||||
// instance if there are no more listeners left
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_ERR, errHandler);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
// Check to protect against issue gh-730.
|
||||
// if (cont.watcherUnusable) {
|
||||
cont.watcher.close();
|
||||
// }
|
||||
FsWatchInstances.delete(fullPath);
|
||||
HANDLER_KEYS.forEach(clearItem(cont));
|
||||
// @ts-ignore
|
||||
cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
// fs_watchFile helpers
|
||||
// object to hold per-process fs_watchFile instances
|
||||
// (may be shared across chokidar FSWatcher instances)
|
||||
const FsWatchFileInstances = new Map();
|
||||
/**
|
||||
* Instantiates the fs_watchFile interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param path to be watched
|
||||
* @param fullPath absolute path
|
||||
* @param options options to be passed to fs_watchFile
|
||||
* @param handlers container for event listener functions
|
||||
* @returns closer
|
||||
*/
|
||||
const setFsWatchFileListener = (path, fullPath, options, handlers) => {
|
||||
const { listener, rawEmitter } = handlers;
|
||||
let cont = FsWatchFileInstances.get(fullPath);
|
||||
// let listeners = new Set();
|
||||
// let rawEmitters = new Set();
|
||||
const copts = cont && cont.options;
|
||||
if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) {
|
||||
// "Upgrade" the watcher to persistence or a quicker interval.
|
||||
// This creates some unlikely edge case issues if the user mixes
|
||||
// settings in a very weird way, but solving for those cases
|
||||
// doesn't seem worthwhile for the added complexity.
|
||||
// listeners = cont.listeners;
|
||||
// rawEmitters = cont.rawEmitters;
|
||||
unwatchFile(fullPath);
|
||||
cont = undefined;
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
}
|
||||
else {
|
||||
// TODO
|
||||
// listeners.add(listener);
|
||||
// rawEmitters.add(rawEmitter);
|
||||
cont = {
|
||||
listeners: listener,
|
||||
rawEmitters: rawEmitter,
|
||||
options,
|
||||
watcher: watchFile(fullPath, options, (curr, prev) => {
|
||||
foreach(cont.rawEmitters, (rawEmitter) => {
|
||||
rawEmitter(EV.CHANGE, fullPath, { curr, prev });
|
||||
});
|
||||
const currmtime = curr.mtimeMs;
|
||||
if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
|
||||
foreach(cont.listeners, (listener) => listener(path, curr));
|
||||
}
|
||||
}),
|
||||
};
|
||||
FsWatchFileInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
// Removes this instance's listeners and closes the underlying fs_watchFile
|
||||
// instance if there are no more listeners left.
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
FsWatchFileInstances.delete(fullPath);
|
||||
unwatchFile(fullPath);
|
||||
cont.options = cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
export class NodeFsHandler {
|
||||
constructor(fsW) {
|
||||
this.fsw = fsW;
|
||||
this._boundHandleError = (error) => fsW._handleError(error);
|
||||
}
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param path to file or dir
|
||||
* @param listener on fs change
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path, listener) {
|
||||
const opts = this.fsw.options;
|
||||
const directory = sysPath.dirname(path);
|
||||
const basename = sysPath.basename(path);
|
||||
const parent = this.fsw._getWatchedDir(directory);
|
||||
parent.add(basename);
|
||||
const absolutePath = sysPath.resolve(path);
|
||||
const options = {
|
||||
persistent: opts.persistent,
|
||||
};
|
||||
if (!listener)
|
||||
listener = EMPTY_FN;
|
||||
let closer;
|
||||
if (opts.usePolling) {
|
||||
const enableBin = opts.interval !== opts.binaryInterval;
|
||||
options.interval = enableBin && isBinaryPath(basename) ? opts.binaryInterval : opts.interval;
|
||||
closer = setFsWatchFileListener(path, absolutePath, options, {
|
||||
listener,
|
||||
rawEmitter: this.fsw._emitRaw,
|
||||
});
|
||||
}
|
||||
else {
|
||||
closer = setFsWatchListener(path, absolutePath, options, {
|
||||
listener,
|
||||
errHandler: this._boundHandleError,
|
||||
rawEmitter: this.fsw._emitRaw,
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file, stats, initialAdd) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const dirname = sysPath.dirname(file);
|
||||
const basename = sysPath.basename(file);
|
||||
const parent = this.fsw._getWatchedDir(dirname);
|
||||
// stats is always present
|
||||
let prevStats = stats;
|
||||
// if the file is already being watched, do nothing
|
||||
if (parent.has(basename))
|
||||
return;
|
||||
const listener = async (path, newStats) => {
|
||||
if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5))
|
||||
return;
|
||||
if (!newStats || newStats.mtimeMs === 0) {
|
||||
try {
|
||||
const newStats = await stat(file);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV.CHANGE, file, newStats);
|
||||
}
|
||||
if ((isMacos || isLinux || isFreeBSD) && prevStats.ino !== newStats.ino) {
|
||||
this.fsw._closeFile(path);
|
||||
prevStats = newStats;
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
if (closer)
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
}
|
||||
else {
|
||||
prevStats = newStats;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Fix issues where mtime is null but file is still present
|
||||
this.fsw._remove(dirname, basename);
|
||||
}
|
||||
// add is about to be emitted if file not already tracked in parent
|
||||
}
|
||||
else if (parent.has(basename)) {
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV.CHANGE, file, newStats);
|
||||
}
|
||||
prevStats = newStats;
|
||||
}
|
||||
};
|
||||
// kick off the watcher
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
// emit an add event if we're supposed to
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) {
|
||||
if (!this.fsw._throttle(EV.ADD, file, 0))
|
||||
return;
|
||||
this.fsw._emit(EV.ADD, file, stats);
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param entry returned by readdirp
|
||||
* @param directory path of dir being read
|
||||
* @param path of this item
|
||||
* @param item basename of this item
|
||||
* @returns true if no more processing is needed for this entry.
|
||||
*/
|
||||
async _handleSymlink(entry, directory, path, item) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const full = entry.fullPath;
|
||||
const dir = this.fsw._getWatchedDir(directory);
|
||||
if (!this.fsw.options.followSymlinks) {
|
||||
// watch symlink directly (don't follow) and detect changes
|
||||
this.fsw._incrReadyCount();
|
||||
let linkPath;
|
||||
try {
|
||||
linkPath = await fsrealpath(path);
|
||||
}
|
||||
catch (e) {
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
if (dir.has(item)) {
|
||||
if (this.fsw._symlinkPaths.get(full) !== linkPath) {
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV.CHANGE, path, entry.stats);
|
||||
}
|
||||
}
|
||||
else {
|
||||
dir.add(item);
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV.ADD, path, entry.stats);
|
||||
}
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
// don't follow the same symlink more than once
|
||||
if (this.fsw._symlinkPaths.has(full)) {
|
||||
return true;
|
||||
}
|
||||
this.fsw._symlinkPaths.set(full, true);
|
||||
}
|
||||
_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) {
|
||||
// Normalize the directory name on Windows
|
||||
directory = sysPath.join(directory, '');
|
||||
throttler = this.fsw._throttle('readdir', directory, 1000);
|
||||
if (!throttler)
|
||||
return;
|
||||
const previous = this.fsw._getWatchedDir(wh.path);
|
||||
const current = new Set();
|
||||
let stream = this.fsw._readdirp(directory, {
|
||||
fileFilter: (entry) => wh.filterPath(entry),
|
||||
directoryFilter: (entry) => wh.filterDir(entry),
|
||||
});
|
||||
if (!stream)
|
||||
return;
|
||||
stream
|
||||
.on(STR_DATA, async (entry) => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const item = entry.path;
|
||||
let path = sysPath.join(directory, item);
|
||||
current.add(item);
|
||||
if (entry.stats.isSymbolicLink() &&
|
||||
(await this._handleSymlink(entry, directory, path, item))) {
|
||||
return;
|
||||
}
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
// Files that present in current directory snapshot
|
||||
// but absent in previous are added to watch list and
|
||||
// emit `add` event.
|
||||
if (item === target || (!target && !previous.has(item))) {
|
||||
this.fsw._incrReadyCount();
|
||||
// ensure relativeness of path is preserved in case of watcher reuse
|
||||
path = sysPath.join(dir, sysPath.relative(dir, path));
|
||||
this._addToNodeFs(path, initialAdd, wh, depth + 1);
|
||||
}
|
||||
})
|
||||
.on(EV.ERROR, this._boundHandleError);
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!stream)
|
||||
return reject();
|
||||
stream.once(STR_END, () => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const wasThrottled = throttler ? throttler.clear() : false;
|
||||
resolve(undefined);
|
||||
// Files that absent in current directory snapshot
|
||||
// but present in previous emit `remove` event
|
||||
// and are removed from @watched[directory].
|
||||
previous
|
||||
.getChildren()
|
||||
.filter((item) => {
|
||||
return item !== directory && !current.has(item);
|
||||
})
|
||||
.forEach((item) => {
|
||||
this.fsw._remove(directory, item);
|
||||
});
|
||||
stream = undefined;
|
||||
// one more time for any missed in case changes came in extremely quickly
|
||||
if (wasThrottled)
|
||||
this._handleRead(directory, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param dir fs path
|
||||
* @param stats
|
||||
* @param initialAdd
|
||||
* @param depth relative to user-supplied path
|
||||
* @param target child path targeted for watch
|
||||
* @param wh Common watch helpers for this path
|
||||
* @param realpath
|
||||
* @returns closer for the watcher instance.
|
||||
*/
|
||||
async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) {
|
||||
const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir));
|
||||
const tracked = parentDir.has(sysPath.basename(dir));
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) {
|
||||
this.fsw._emit(EV.ADD_DIR, dir, stats);
|
||||
}
|
||||
// ensure dir is tracked (harmless if redundant)
|
||||
parentDir.add(sysPath.basename(dir));
|
||||
this.fsw._getWatchedDir(dir);
|
||||
let throttler;
|
||||
let closer;
|
||||
const oDepth = this.fsw.options.depth;
|
||||
if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) {
|
||||
if (!target) {
|
||||
await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
}
|
||||
closer = this._watchWithNodeFs(dir, (dirPath, stats) => {
|
||||
// if current directory is removed, do nothing
|
||||
if (stats && stats.mtimeMs === 0)
|
||||
return;
|
||||
this._handleRead(dirPath, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param path to file or ir
|
||||
* @param initialAdd was the file added at watch instantiation?
|
||||
* @param priorWh depth relative to user-supplied path
|
||||
* @param depth Child path actually targeted for watch
|
||||
* @param target Child path actually targeted for watch
|
||||
*/
|
||||
async _addToNodeFs(path, initialAdd, priorWh, depth, target) {
|
||||
const ready = this.fsw._emitReady;
|
||||
if (this.fsw._isIgnored(path) || this.fsw.closed) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
const wh = this.fsw._getWatchHelpers(path);
|
||||
if (priorWh) {
|
||||
wh.filterPath = (entry) => priorWh.filterPath(entry);
|
||||
wh.filterDir = (entry) => priorWh.filterDir(entry);
|
||||
}
|
||||
// evaluate what is at the path we're being asked to watch
|
||||
try {
|
||||
const stats = await statMethods[wh.statMethod](wh.watchPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
if (this.fsw._isIgnored(wh.watchPath, stats)) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
const follow = this.fsw.options.followSymlinks;
|
||||
let closer;
|
||||
if (stats.isDirectory()) {
|
||||
const absPath = sysPath.resolve(path);
|
||||
const targetPath = follow ? await fsrealpath(path) : path;
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// preserve this symlink's target path
|
||||
if (absPath !== targetPath && targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(absPath, targetPath);
|
||||
}
|
||||
}
|
||||
else if (stats.isSymbolicLink()) {
|
||||
const targetPath = follow ? await fsrealpath(path) : path;
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
const parent = sysPath.dirname(wh.watchPath);
|
||||
this.fsw._getWatchedDir(parent).add(wh.watchPath);
|
||||
this.fsw._emit(EV.ADD, wh.watchPath, stats);
|
||||
closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// preserve this symlink's target path
|
||||
if (targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
closer = this._handleFile(wh.watchPath, stats, initialAdd);
|
||||
}
|
||||
ready();
|
||||
if (closer)
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
return false;
|
||||
}
|
||||
catch (error) {
|
||||
if (this.fsw._handleError(error)) {
|
||||
ready();
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
215
node_modules/sass/node_modules/chokidar/esm/index.d.ts
generated
vendored
Normal file
215
node_modules/sass/node_modules/chokidar/esm/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
/*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) */
|
||||
import { Stats } from 'fs';
|
||||
import { EventEmitter } from 'events';
|
||||
import { ReaddirpStream, ReaddirpOptions, EntryInfo } from 'readdirp';
|
||||
import { NodeFsHandler, EventName, Path, EVENTS as EV, WatchHandlers } from './handler.js';
|
||||
type AWF = {
|
||||
stabilityThreshold: number;
|
||||
pollInterval: number;
|
||||
};
|
||||
type BasicOpts = {
|
||||
persistent: boolean;
|
||||
ignoreInitial: boolean;
|
||||
followSymlinks: boolean;
|
||||
cwd?: string;
|
||||
usePolling: boolean;
|
||||
interval: number;
|
||||
binaryInterval: number;
|
||||
alwaysStat?: boolean;
|
||||
depth?: number;
|
||||
ignorePermissionErrors: boolean;
|
||||
atomic: boolean | number;
|
||||
};
|
||||
export type Throttler = {
|
||||
timeoutObject: NodeJS.Timeout;
|
||||
clear: () => void;
|
||||
count: number;
|
||||
};
|
||||
export type ChokidarOptions = Partial<BasicOpts & {
|
||||
ignored: Matcher | Matcher[];
|
||||
awaitWriteFinish: boolean | Partial<AWF>;
|
||||
}>;
|
||||
export type FSWInstanceOptions = BasicOpts & {
|
||||
ignored: Matcher[];
|
||||
awaitWriteFinish: false | AWF;
|
||||
};
|
||||
export type ThrottleType = 'readdir' | 'watch' | 'add' | 'remove' | 'change';
|
||||
export type EmitArgs = [path: Path, stats?: Stats];
|
||||
export type EmitErrorArgs = [error: Error, stats?: Stats];
|
||||
export type EmitArgsWithName = [event: EventName, ...EmitArgs];
|
||||
export type MatchFunction = (val: string, stats?: Stats) => boolean;
|
||||
export interface MatcherObject {
|
||||
path: string;
|
||||
recursive?: boolean;
|
||||
}
|
||||
export type Matcher = string | RegExp | MatchFunction | MatcherObject;
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
declare class DirEntry {
|
||||
path: Path;
|
||||
_removeWatcher: (dir: string, base: string) => void;
|
||||
items: Set<Path>;
|
||||
constructor(dir: Path, removeWatcher: (dir: string, base: string) => void);
|
||||
add(item: string): void;
|
||||
remove(item: string): Promise<void>;
|
||||
has(item: string): boolean | undefined;
|
||||
getChildren(): string[];
|
||||
dispose(): void;
|
||||
}
|
||||
export declare class WatchHelper {
|
||||
fsw: FSWatcher;
|
||||
path: string;
|
||||
watchPath: string;
|
||||
fullWatchPath: string;
|
||||
dirParts: string[][];
|
||||
followSymlinks: boolean;
|
||||
statMethod: 'stat' | 'lstat';
|
||||
constructor(path: string, follow: boolean, fsw: FSWatcher);
|
||||
entryPath(entry: EntryInfo): Path;
|
||||
filterPath(entry: EntryInfo): boolean;
|
||||
filterDir(entry: EntryInfo): boolean;
|
||||
}
|
||||
export interface FSWatcherKnownEventMap {
|
||||
[EV.READY]: [];
|
||||
[EV.RAW]: Parameters<WatchHandlers['rawEmitter']>;
|
||||
[EV.ERROR]: Parameters<WatchHandlers['errHandler']>;
|
||||
[EV.ALL]: [event: EventName, ...EmitArgs];
|
||||
}
|
||||
export type FSWatcherEventMap = FSWatcherKnownEventMap & {
|
||||
[k in Exclude<EventName, keyof FSWatcherKnownEventMap>]: EmitArgs;
|
||||
};
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
export declare class FSWatcher extends EventEmitter<FSWatcherEventMap> {
|
||||
closed: boolean;
|
||||
options: FSWInstanceOptions;
|
||||
_closers: Map<string, Array<any>>;
|
||||
_ignoredPaths: Set<Matcher>;
|
||||
_throttled: Map<ThrottleType, Map<any, any>>;
|
||||
_streams: Set<ReaddirpStream>;
|
||||
_symlinkPaths: Map<Path, string | boolean>;
|
||||
_watched: Map<string, DirEntry>;
|
||||
_pendingWrites: Map<string, any>;
|
||||
_pendingUnlinks: Map<string, EmitArgsWithName>;
|
||||
_readyCount: number;
|
||||
_emitReady: () => void;
|
||||
_closePromise?: Promise<void>;
|
||||
_userIgnored?: MatchFunction;
|
||||
_readyEmitted: boolean;
|
||||
_emitRaw: WatchHandlers['rawEmitter'];
|
||||
_boundRemove: (dir: string, item: string) => void;
|
||||
_nodeFsHandler: NodeFsHandler;
|
||||
constructor(_opts?: ChokidarOptions);
|
||||
_addIgnoredPath(matcher: Matcher): void;
|
||||
_removeIgnoredPath(matcher: Matcher): void;
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance.
|
||||
* @param paths_ file or file list. Other arguments are unused
|
||||
*/
|
||||
add(paths_: Path | Path[], _origAdd?: string, _internal?: boolean): FSWatcher;
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
*/
|
||||
unwatch(paths_: Path | Path[]): FSWatcher;
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
*/
|
||||
close(): Promise<void>;
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns for chaining
|
||||
*/
|
||||
getWatched(): Record<string, string[]>;
|
||||
emitWithAll(event: EventName, args: EmitArgs): void;
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param event Type of event
|
||||
* @param path File or directory path
|
||||
* @param stats arguments to be passed with event
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_emit(event: EventName, path: Path, stats?: Stats): Promise<this | undefined>;
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @returns The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error: Error): Error | boolean;
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param actionType type being throttled
|
||||
* @param path being acted upon
|
||||
* @param timeout duration of time to suppress duplicate actions
|
||||
* @returns tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType: ThrottleType, path: Path, timeout: number): Throttler | false;
|
||||
_incrReadyCount(): number;
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param path being acted upon
|
||||
* @param threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param event
|
||||
* @param awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path: Path, threshold: number, event: EventName, awfEmit: (err?: Error, stat?: Stats) => void): void;
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
*/
|
||||
_isIgnored(path: Path, stats?: Stats): boolean;
|
||||
_isntIgnored(path: Path, stat?: Stats): boolean;
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink handling.
|
||||
* @param path file or directory pattern being watched
|
||||
*/
|
||||
_getWatchHelpers(path: Path): WatchHelper;
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param directory path of the directory
|
||||
*/
|
||||
_getWatchedDir(directory: string): DirEntry;
|
||||
/**
|
||||
* Check for read permissions: https://stackoverflow.com/a/11781404/1358405
|
||||
*/
|
||||
_hasReadPermissions(stats: Stats): boolean;
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param directory within which the following item is located
|
||||
* @param item base path of item/directory
|
||||
*/
|
||||
_remove(directory: string, item: string, isDirectory?: boolean): void;
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
*/
|
||||
_closePath(path: Path): void;
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
*/
|
||||
_closeFile(path: Path): void;
|
||||
_addPathCloser(path: Path, closer: () => void): void;
|
||||
_readdirp(root: Path, opts?: Partial<ReaddirpOptions>): ReaddirpStream | undefined;
|
||||
}
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param paths file / directory paths
|
||||
* @param options opts, such as `atomic`, `awaitWriteFinish`, `ignored`, and others
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
* @example
|
||||
* const watcher = watch('.').on('all', (event, path) => { console.log(event, path); });
|
||||
* watch('.', { atomic: true, awaitWriteFinish: true, ignored: (f, stats) => stats?.isFile() && !f.endsWith('.js') })
|
||||
*/
|
||||
export declare function watch(paths: string | string[], options?: ChokidarOptions): FSWatcher;
|
||||
declare const _default: {
|
||||
watch: typeof watch;
|
||||
FSWatcher: typeof FSWatcher;
|
||||
};
|
||||
export default _default;
|
||||
798
node_modules/sass/node_modules/chokidar/esm/index.js
generated
vendored
Normal file
798
node_modules/sass/node_modules/chokidar/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,798 @@
|
||||
/*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) */
|
||||
import { stat as statcb } from 'fs';
|
||||
import { stat, readdir } from 'fs/promises';
|
||||
import { EventEmitter } from 'events';
|
||||
import * as sysPath from 'path';
|
||||
import { readdirp } from 'readdirp';
|
||||
import { NodeFsHandler, EVENTS as EV, isWindows, isIBMi, EMPTY_FN, STR_CLOSE, STR_END, } from './handler.js';
|
||||
const SLASH = '/';
|
||||
const SLASH_SLASH = '//';
|
||||
const ONE_DOT = '.';
|
||||
const TWO_DOTS = '..';
|
||||
const STRING_TYPE = 'string';
|
||||
const BACK_SLASH_RE = /\\/g;
|
||||
const DOUBLE_SLASH_RE = /\/\//;
|
||||
const DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/;
|
||||
const REPLACER_RE = /^\.[/\\]/;
|
||||
function arrify(item) {
|
||||
return Array.isArray(item) ? item : [item];
|
||||
}
|
||||
const isMatcherObject = (matcher) => typeof matcher === 'object' && matcher !== null && !(matcher instanceof RegExp);
|
||||
function createPattern(matcher) {
|
||||
if (typeof matcher === 'function')
|
||||
return matcher;
|
||||
if (typeof matcher === 'string')
|
||||
return (string) => matcher === string;
|
||||
if (matcher instanceof RegExp)
|
||||
return (string) => matcher.test(string);
|
||||
if (typeof matcher === 'object' && matcher !== null) {
|
||||
return (string) => {
|
||||
if (matcher.path === string)
|
||||
return true;
|
||||
if (matcher.recursive) {
|
||||
const relative = sysPath.relative(matcher.path, string);
|
||||
if (!relative) {
|
||||
return false;
|
||||
}
|
||||
return !relative.startsWith('..') && !sysPath.isAbsolute(relative);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
}
|
||||
return () => false;
|
||||
}
|
||||
function normalizePath(path) {
|
||||
if (typeof path !== 'string')
|
||||
throw new Error('string expected');
|
||||
path = sysPath.normalize(path);
|
||||
path = path.replace(/\\/g, '/');
|
||||
let prepend = false;
|
||||
if (path.startsWith('//'))
|
||||
prepend = true;
|
||||
const DOUBLE_SLASH_RE = /\/\//;
|
||||
while (path.match(DOUBLE_SLASH_RE))
|
||||
path = path.replace(DOUBLE_SLASH_RE, '/');
|
||||
if (prepend)
|
||||
path = '/' + path;
|
||||
return path;
|
||||
}
|
||||
function matchPatterns(patterns, testString, stats) {
|
||||
const path = normalizePath(testString);
|
||||
for (let index = 0; index < patterns.length; index++) {
|
||||
const pattern = patterns[index];
|
||||
if (pattern(path, stats)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function anymatch(matchers, testString) {
|
||||
if (matchers == null) {
|
||||
throw new TypeError('anymatch: specify first argument');
|
||||
}
|
||||
// Early cache for matchers.
|
||||
const matchersArray = arrify(matchers);
|
||||
const patterns = matchersArray.map((matcher) => createPattern(matcher));
|
||||
if (testString == null) {
|
||||
return (testString, stats) => {
|
||||
return matchPatterns(patterns, testString, stats);
|
||||
};
|
||||
}
|
||||
return matchPatterns(patterns, testString);
|
||||
}
|
||||
const unifyPaths = (paths_) => {
|
||||
const paths = arrify(paths_).flat();
|
||||
if (!paths.every((p) => typeof p === STRING_TYPE)) {
|
||||
throw new TypeError(`Non-string provided as watch path: ${paths}`);
|
||||
}
|
||||
return paths.map(normalizePathToUnix);
|
||||
};
|
||||
// If SLASH_SLASH occurs at the beginning of path, it is not replaced
|
||||
// because "//StoragePC/DrivePool/Movies" is a valid network path
|
||||
const toUnix = (string) => {
|
||||
let str = string.replace(BACK_SLASH_RE, SLASH);
|
||||
let prepend = false;
|
||||
if (str.startsWith(SLASH_SLASH)) {
|
||||
prepend = true;
|
||||
}
|
||||
while (str.match(DOUBLE_SLASH_RE)) {
|
||||
str = str.replace(DOUBLE_SLASH_RE, SLASH);
|
||||
}
|
||||
if (prepend) {
|
||||
str = SLASH + str;
|
||||
}
|
||||
return str;
|
||||
};
|
||||
// Our version of upath.normalize
|
||||
// TODO: this is not equal to path-normalize module - investigate why
|
||||
const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path)));
|
||||
// TODO: refactor
|
||||
const normalizeIgnored = (cwd = '') => (path) => {
|
||||
if (typeof path === 'string') {
|
||||
return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path));
|
||||
}
|
||||
else {
|
||||
return path;
|
||||
}
|
||||
};
|
||||
const getAbsolutePath = (path, cwd) => {
|
||||
if (sysPath.isAbsolute(path)) {
|
||||
return path;
|
||||
}
|
||||
return sysPath.join(cwd, path);
|
||||
};
|
||||
const EMPTY_SET = Object.freeze(new Set());
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
class DirEntry {
|
||||
constructor(dir, removeWatcher) {
|
||||
this.path = dir;
|
||||
this._removeWatcher = removeWatcher;
|
||||
this.items = new Set();
|
||||
}
|
||||
add(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
if (item !== ONE_DOT && item !== TWO_DOTS)
|
||||
items.add(item);
|
||||
}
|
||||
async remove(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
items.delete(item);
|
||||
if (items.size > 0)
|
||||
return;
|
||||
const dir = this.path;
|
||||
try {
|
||||
await readdir(dir);
|
||||
}
|
||||
catch (err) {
|
||||
if (this._removeWatcher) {
|
||||
this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir));
|
||||
}
|
||||
}
|
||||
}
|
||||
has(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
return items.has(item);
|
||||
}
|
||||
getChildren() {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return [];
|
||||
return [...items.values()];
|
||||
}
|
||||
dispose() {
|
||||
this.items.clear();
|
||||
this.path = '';
|
||||
this._removeWatcher = EMPTY_FN;
|
||||
this.items = EMPTY_SET;
|
||||
Object.freeze(this);
|
||||
}
|
||||
}
|
||||
const STAT_METHOD_F = 'stat';
|
||||
const STAT_METHOD_L = 'lstat';
|
||||
export class WatchHelper {
|
||||
constructor(path, follow, fsw) {
|
||||
this.fsw = fsw;
|
||||
const watchPath = path;
|
||||
this.path = path = path.replace(REPLACER_RE, '');
|
||||
this.watchPath = watchPath;
|
||||
this.fullWatchPath = sysPath.resolve(watchPath);
|
||||
this.dirParts = [];
|
||||
this.dirParts.forEach((parts) => {
|
||||
if (parts.length > 1)
|
||||
parts.pop();
|
||||
});
|
||||
this.followSymlinks = follow;
|
||||
this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L;
|
||||
}
|
||||
entryPath(entry) {
|
||||
return sysPath.join(this.watchPath, sysPath.relative(this.watchPath, entry.fullPath));
|
||||
}
|
||||
filterPath(entry) {
|
||||
const { stats } = entry;
|
||||
if (stats && stats.isSymbolicLink())
|
||||
return this.filterDir(entry);
|
||||
const resolvedPath = this.entryPath(entry);
|
||||
// TODO: what if stats is undefined? remove !
|
||||
return this.fsw._isntIgnored(resolvedPath, stats) && this.fsw._hasReadPermissions(stats);
|
||||
}
|
||||
filterDir(entry) {
|
||||
return this.fsw._isntIgnored(this.entryPath(entry), entry.stats);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
export class FSWatcher extends EventEmitter {
|
||||
// Not indenting methods for history sake; for now.
|
||||
constructor(_opts = {}) {
|
||||
super();
|
||||
this.closed = false;
|
||||
this._closers = new Map();
|
||||
this._ignoredPaths = new Set();
|
||||
this._throttled = new Map();
|
||||
this._streams = new Set();
|
||||
this._symlinkPaths = new Map();
|
||||
this._watched = new Map();
|
||||
this._pendingWrites = new Map();
|
||||
this._pendingUnlinks = new Map();
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
const awf = _opts.awaitWriteFinish;
|
||||
const DEF_AWF = { stabilityThreshold: 2000, pollInterval: 100 };
|
||||
const opts = {
|
||||
// Defaults
|
||||
persistent: true,
|
||||
ignoreInitial: false,
|
||||
ignorePermissionErrors: false,
|
||||
interval: 100,
|
||||
binaryInterval: 300,
|
||||
followSymlinks: true,
|
||||
usePolling: false,
|
||||
// useAsync: false,
|
||||
atomic: true, // NOTE: overwritten later (depends on usePolling)
|
||||
..._opts,
|
||||
// Change format
|
||||
ignored: _opts.ignored ? arrify(_opts.ignored) : arrify([]),
|
||||
awaitWriteFinish: awf === true ? DEF_AWF : typeof awf === 'object' ? { ...DEF_AWF, ...awf } : false,
|
||||
};
|
||||
// Always default to polling on IBM i because fs.watch() is not available on IBM i.
|
||||
if (isIBMi)
|
||||
opts.usePolling = true;
|
||||
// Editor atomic write normalization enabled by default with fs.watch
|
||||
if (opts.atomic === undefined)
|
||||
opts.atomic = !opts.usePolling;
|
||||
// opts.atomic = typeof _opts.atomic === 'number' ? _opts.atomic : 100;
|
||||
// Global override. Useful for developers, who need to force polling for all
|
||||
// instances of chokidar, regardless of usage / dependency depth
|
||||
const envPoll = process.env.CHOKIDAR_USEPOLLING;
|
||||
if (envPoll !== undefined) {
|
||||
const envLower = envPoll.toLowerCase();
|
||||
if (envLower === 'false' || envLower === '0')
|
||||
opts.usePolling = false;
|
||||
else if (envLower === 'true' || envLower === '1')
|
||||
opts.usePolling = true;
|
||||
else
|
||||
opts.usePolling = !!envLower;
|
||||
}
|
||||
const envInterval = process.env.CHOKIDAR_INTERVAL;
|
||||
if (envInterval)
|
||||
opts.interval = Number.parseInt(envInterval, 10);
|
||||
// This is done to emit ready only once, but each 'add' will increase that?
|
||||
let readyCalls = 0;
|
||||
this._emitReady = () => {
|
||||
readyCalls++;
|
||||
if (readyCalls >= this._readyCount) {
|
||||
this._emitReady = EMPTY_FN;
|
||||
this._readyEmitted = true;
|
||||
// use process.nextTick to allow time for listener to be bound
|
||||
process.nextTick(() => this.emit(EV.READY));
|
||||
}
|
||||
};
|
||||
this._emitRaw = (...args) => this.emit(EV.RAW, ...args);
|
||||
this._boundRemove = this._remove.bind(this);
|
||||
this.options = opts;
|
||||
this._nodeFsHandler = new NodeFsHandler(this);
|
||||
// You’re frozen when your heart’s not open.
|
||||
Object.freeze(opts);
|
||||
}
|
||||
_addIgnoredPath(matcher) {
|
||||
if (isMatcherObject(matcher)) {
|
||||
// return early if we already have a deeply equal matcher object
|
||||
for (const ignored of this._ignoredPaths) {
|
||||
if (isMatcherObject(ignored) &&
|
||||
ignored.path === matcher.path &&
|
||||
ignored.recursive === matcher.recursive) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
this._ignoredPaths.add(matcher);
|
||||
}
|
||||
_removeIgnoredPath(matcher) {
|
||||
this._ignoredPaths.delete(matcher);
|
||||
// now find any matcher objects with the matcher as path
|
||||
if (typeof matcher === 'string') {
|
||||
for (const ignored of this._ignoredPaths) {
|
||||
// TODO (43081j): make this more efficient.
|
||||
// probably just make a `this._ignoredDirectories` or some
|
||||
// such thing.
|
||||
if (isMatcherObject(ignored) && ignored.path === matcher) {
|
||||
this._ignoredPaths.delete(ignored);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Public methods
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance.
|
||||
* @param paths_ file or file list. Other arguments are unused
|
||||
*/
|
||||
add(paths_, _origAdd, _internal) {
|
||||
const { cwd } = this.options;
|
||||
this.closed = false;
|
||||
this._closePromise = undefined;
|
||||
let paths = unifyPaths(paths_);
|
||||
if (cwd) {
|
||||
paths = paths.map((path) => {
|
||||
const absPath = getAbsolutePath(path, cwd);
|
||||
// Check `path` instead of `absPath` because the cwd portion can't be a glob
|
||||
return absPath;
|
||||
});
|
||||
}
|
||||
paths.forEach((path) => {
|
||||
this._removeIgnoredPath(path);
|
||||
});
|
||||
this._userIgnored = undefined;
|
||||
if (!this._readyCount)
|
||||
this._readyCount = 0;
|
||||
this._readyCount += paths.length;
|
||||
Promise.all(paths.map(async (path) => {
|
||||
const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, undefined, 0, _origAdd);
|
||||
if (res)
|
||||
this._emitReady();
|
||||
return res;
|
||||
})).then((results) => {
|
||||
if (this.closed)
|
||||
return;
|
||||
results.forEach((item) => {
|
||||
if (item)
|
||||
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
|
||||
});
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
*/
|
||||
unwatch(paths_) {
|
||||
if (this.closed)
|
||||
return this;
|
||||
const paths = unifyPaths(paths_);
|
||||
const { cwd } = this.options;
|
||||
paths.forEach((path) => {
|
||||
// convert to absolute path unless relative path already matches
|
||||
if (!sysPath.isAbsolute(path) && !this._closers.has(path)) {
|
||||
if (cwd)
|
||||
path = sysPath.join(cwd, path);
|
||||
path = sysPath.resolve(path);
|
||||
}
|
||||
this._closePath(path);
|
||||
this._addIgnoredPath(path);
|
||||
if (this._watched.has(path)) {
|
||||
this._addIgnoredPath({
|
||||
path,
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
// reset the cached userIgnored anymatch fn
|
||||
// to make ignoredPaths changes effective
|
||||
this._userIgnored = undefined;
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
*/
|
||||
close() {
|
||||
if (this._closePromise) {
|
||||
return this._closePromise;
|
||||
}
|
||||
this.closed = true;
|
||||
// Memory management.
|
||||
this.removeAllListeners();
|
||||
const closers = [];
|
||||
this._closers.forEach((closerList) => closerList.forEach((closer) => {
|
||||
const promise = closer();
|
||||
if (promise instanceof Promise)
|
||||
closers.push(promise);
|
||||
}));
|
||||
this._streams.forEach((stream) => stream.destroy());
|
||||
this._userIgnored = undefined;
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
this._watched.forEach((dirent) => dirent.dispose());
|
||||
this._closers.clear();
|
||||
this._watched.clear();
|
||||
this._streams.clear();
|
||||
this._symlinkPaths.clear();
|
||||
this._throttled.clear();
|
||||
this._closePromise = closers.length
|
||||
? Promise.all(closers).then(() => undefined)
|
||||
: Promise.resolve();
|
||||
return this._closePromise;
|
||||
}
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns for chaining
|
||||
*/
|
||||
getWatched() {
|
||||
const watchList = {};
|
||||
this._watched.forEach((entry, dir) => {
|
||||
const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir;
|
||||
const index = key || ONE_DOT;
|
||||
watchList[index] = entry.getChildren().sort();
|
||||
});
|
||||
return watchList;
|
||||
}
|
||||
emitWithAll(event, args) {
|
||||
this.emit(event, ...args);
|
||||
if (event !== EV.ERROR)
|
||||
this.emit(EV.ALL, event, ...args);
|
||||
}
|
||||
// Common helpers
|
||||
// --------------
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param event Type of event
|
||||
* @param path File or directory path
|
||||
* @param stats arguments to be passed with event
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
async _emit(event, path, stats) {
|
||||
if (this.closed)
|
||||
return;
|
||||
const opts = this.options;
|
||||
if (isWindows)
|
||||
path = sysPath.normalize(path);
|
||||
if (opts.cwd)
|
||||
path = sysPath.relative(opts.cwd, path);
|
||||
const args = [path];
|
||||
if (stats != null)
|
||||
args.push(stats);
|
||||
const awf = opts.awaitWriteFinish;
|
||||
let pw;
|
||||
if (awf && (pw = this._pendingWrites.get(path))) {
|
||||
pw.lastChange = new Date();
|
||||
return this;
|
||||
}
|
||||
if (opts.atomic) {
|
||||
if (event === EV.UNLINK) {
|
||||
this._pendingUnlinks.set(path, [event, ...args]);
|
||||
setTimeout(() => {
|
||||
this._pendingUnlinks.forEach((entry, path) => {
|
||||
this.emit(...entry);
|
||||
this.emit(EV.ALL, ...entry);
|
||||
this._pendingUnlinks.delete(path);
|
||||
});
|
||||
}, typeof opts.atomic === 'number' ? opts.atomic : 100);
|
||||
return this;
|
||||
}
|
||||
if (event === EV.ADD && this._pendingUnlinks.has(path)) {
|
||||
event = EV.CHANGE;
|
||||
this._pendingUnlinks.delete(path);
|
||||
}
|
||||
}
|
||||
if (awf && (event === EV.ADD || event === EV.CHANGE) && this._readyEmitted) {
|
||||
const awfEmit = (err, stats) => {
|
||||
if (err) {
|
||||
event = EV.ERROR;
|
||||
args[0] = err;
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
else if (stats) {
|
||||
// if stats doesn't exist the file must have been deleted
|
||||
if (args.length > 1) {
|
||||
args[1] = stats;
|
||||
}
|
||||
else {
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
};
|
||||
this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit);
|
||||
return this;
|
||||
}
|
||||
if (event === EV.CHANGE) {
|
||||
const isThrottled = !this._throttle(EV.CHANGE, path, 50);
|
||||
if (isThrottled)
|
||||
return this;
|
||||
}
|
||||
if (opts.alwaysStat &&
|
||||
stats === undefined &&
|
||||
(event === EV.ADD || event === EV.ADD_DIR || event === EV.CHANGE)) {
|
||||
const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path;
|
||||
let stats;
|
||||
try {
|
||||
stats = await stat(fullPath);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
// Suppress event when fs_stat fails, to avoid sending undefined 'stat'
|
||||
if (!stats || this.closed)
|
||||
return;
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @returns The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error) {
|
||||
const code = error && error.code;
|
||||
if (error &&
|
||||
code !== 'ENOENT' &&
|
||||
code !== 'ENOTDIR' &&
|
||||
(!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES'))) {
|
||||
this.emit(EV.ERROR, error);
|
||||
}
|
||||
return error || this.closed;
|
||||
}
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param actionType type being throttled
|
||||
* @param path being acted upon
|
||||
* @param timeout duration of time to suppress duplicate actions
|
||||
* @returns tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType, path, timeout) {
|
||||
if (!this._throttled.has(actionType)) {
|
||||
this._throttled.set(actionType, new Map());
|
||||
}
|
||||
const action = this._throttled.get(actionType);
|
||||
if (!action)
|
||||
throw new Error('invalid throttle');
|
||||
const actionPath = action.get(path);
|
||||
if (actionPath) {
|
||||
actionPath.count++;
|
||||
return false;
|
||||
}
|
||||
// eslint-disable-next-line prefer-const
|
||||
let timeoutObject;
|
||||
const clear = () => {
|
||||
const item = action.get(path);
|
||||
const count = item ? item.count : 0;
|
||||
action.delete(path);
|
||||
clearTimeout(timeoutObject);
|
||||
if (item)
|
||||
clearTimeout(item.timeoutObject);
|
||||
return count;
|
||||
};
|
||||
timeoutObject = setTimeout(clear, timeout);
|
||||
const thr = { timeoutObject, clear, count: 0 };
|
||||
action.set(path, thr);
|
||||
return thr;
|
||||
}
|
||||
_incrReadyCount() {
|
||||
return this._readyCount++;
|
||||
}
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param path being acted upon
|
||||
* @param threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param event
|
||||
* @param awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path, threshold, event, awfEmit) {
|
||||
const awf = this.options.awaitWriteFinish;
|
||||
if (typeof awf !== 'object')
|
||||
return;
|
||||
const pollInterval = awf.pollInterval;
|
||||
let timeoutHandler;
|
||||
let fullPath = path;
|
||||
if (this.options.cwd && !sysPath.isAbsolute(path)) {
|
||||
fullPath = sysPath.join(this.options.cwd, path);
|
||||
}
|
||||
const now = new Date();
|
||||
const writes = this._pendingWrites;
|
||||
function awaitWriteFinishFn(prevStat) {
|
||||
statcb(fullPath, (err, curStat) => {
|
||||
if (err || !writes.has(path)) {
|
||||
if (err && err.code !== 'ENOENT')
|
||||
awfEmit(err);
|
||||
return;
|
||||
}
|
||||
const now = Number(new Date());
|
||||
if (prevStat && curStat.size !== prevStat.size) {
|
||||
writes.get(path).lastChange = now;
|
||||
}
|
||||
const pw = writes.get(path);
|
||||
const df = now - pw.lastChange;
|
||||
if (df >= threshold) {
|
||||
writes.delete(path);
|
||||
awfEmit(undefined, curStat);
|
||||
}
|
||||
else {
|
||||
timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval, curStat);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!writes.has(path)) {
|
||||
writes.set(path, {
|
||||
lastChange: now,
|
||||
cancelWait: () => {
|
||||
writes.delete(path);
|
||||
clearTimeout(timeoutHandler);
|
||||
return event;
|
||||
},
|
||||
});
|
||||
timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
*/
|
||||
_isIgnored(path, stats) {
|
||||
if (this.options.atomic && DOT_RE.test(path))
|
||||
return true;
|
||||
if (!this._userIgnored) {
|
||||
const { cwd } = this.options;
|
||||
const ign = this.options.ignored;
|
||||
const ignored = (ign || []).map(normalizeIgnored(cwd));
|
||||
const ignoredPaths = [...this._ignoredPaths];
|
||||
const list = [...ignoredPaths.map(normalizeIgnored(cwd)), ...ignored];
|
||||
this._userIgnored = anymatch(list, undefined);
|
||||
}
|
||||
return this._userIgnored(path, stats);
|
||||
}
|
||||
_isntIgnored(path, stat) {
|
||||
return !this._isIgnored(path, stat);
|
||||
}
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink handling.
|
||||
* @param path file or directory pattern being watched
|
||||
*/
|
||||
_getWatchHelpers(path) {
|
||||
return new WatchHelper(path, this.options.followSymlinks, this);
|
||||
}
|
||||
// Directory helpers
|
||||
// -----------------
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param directory path of the directory
|
||||
*/
|
||||
_getWatchedDir(directory) {
|
||||
const dir = sysPath.resolve(directory);
|
||||
if (!this._watched.has(dir))
|
||||
this._watched.set(dir, new DirEntry(dir, this._boundRemove));
|
||||
return this._watched.get(dir);
|
||||
}
|
||||
// File helpers
|
||||
// ------------
|
||||
/**
|
||||
* Check for read permissions: https://stackoverflow.com/a/11781404/1358405
|
||||
*/
|
||||
_hasReadPermissions(stats) {
|
||||
if (this.options.ignorePermissionErrors)
|
||||
return true;
|
||||
return Boolean(Number(stats.mode) & 0o400);
|
||||
}
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param directory within which the following item is located
|
||||
* @param item base path of item/directory
|
||||
*/
|
||||
_remove(directory, item, isDirectory) {
|
||||
// if what is being deleted is a directory, get that directory's paths
|
||||
// for recursive deleting and cleaning of watched object
|
||||
// if it is not a directory, nestedDirectoryChildren will be empty array
|
||||
const path = sysPath.join(directory, item);
|
||||
const fullPath = sysPath.resolve(path);
|
||||
isDirectory =
|
||||
isDirectory != null ? isDirectory : this._watched.has(path) || this._watched.has(fullPath);
|
||||
// prevent duplicate handling in case of arriving here nearly simultaneously
|
||||
// via multiple paths (such as _handleFile and _handleDir)
|
||||
if (!this._throttle('remove', path, 100))
|
||||
return;
|
||||
// if the only watched file is removed, watch for its return
|
||||
if (!isDirectory && this._watched.size === 1) {
|
||||
this.add(directory, item, true);
|
||||
}
|
||||
// This will create a new entry in the watched object in either case
|
||||
// so we got to do the directory check beforehand
|
||||
const wp = this._getWatchedDir(path);
|
||||
const nestedDirectoryChildren = wp.getChildren();
|
||||
// Recursively remove children directories / files.
|
||||
nestedDirectoryChildren.forEach((nested) => this._remove(path, nested));
|
||||
// Check if item was on the watched list and remove it
|
||||
const parent = this._getWatchedDir(directory);
|
||||
const wasTracked = parent.has(item);
|
||||
parent.remove(item);
|
||||
// Fixes issue #1042 -> Relative paths were detected and added as symlinks
|
||||
// (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612),
|
||||
// but never removed from the map in case the path was deleted.
|
||||
// This leads to an incorrect state if the path was recreated:
|
||||
// https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553
|
||||
if (this._symlinkPaths.has(fullPath)) {
|
||||
this._symlinkPaths.delete(fullPath);
|
||||
}
|
||||
// If we wait for this file to be fully written, cancel the wait.
|
||||
let relPath = path;
|
||||
if (this.options.cwd)
|
||||
relPath = sysPath.relative(this.options.cwd, path);
|
||||
if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
|
||||
const event = this._pendingWrites.get(relPath).cancelWait();
|
||||
if (event === EV.ADD)
|
||||
return;
|
||||
}
|
||||
// The Entry will either be a directory that just got removed
|
||||
// or a bogus entry to a file, in either case we have to remove it
|
||||
this._watched.delete(path);
|
||||
this._watched.delete(fullPath);
|
||||
const eventName = isDirectory ? EV.UNLINK_DIR : EV.UNLINK;
|
||||
if (wasTracked && !this._isIgnored(path))
|
||||
this._emit(eventName, path);
|
||||
// Avoid conflicts if we later create another file with the same name
|
||||
this._closePath(path);
|
||||
}
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
*/
|
||||
_closePath(path) {
|
||||
this._closeFile(path);
|
||||
const dir = sysPath.dirname(path);
|
||||
this._getWatchedDir(dir).remove(sysPath.basename(path));
|
||||
}
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
*/
|
||||
_closeFile(path) {
|
||||
const closers = this._closers.get(path);
|
||||
if (!closers)
|
||||
return;
|
||||
closers.forEach((closer) => closer());
|
||||
this._closers.delete(path);
|
||||
}
|
||||
_addPathCloser(path, closer) {
|
||||
if (!closer)
|
||||
return;
|
||||
let list = this._closers.get(path);
|
||||
if (!list) {
|
||||
list = [];
|
||||
this._closers.set(path, list);
|
||||
}
|
||||
list.push(closer);
|
||||
}
|
||||
_readdirp(root, opts) {
|
||||
if (this.closed)
|
||||
return;
|
||||
const options = { type: EV.ALL, alwaysStat: true, lstat: true, ...opts, depth: 0 };
|
||||
let stream = readdirp(root, options);
|
||||
this._streams.add(stream);
|
||||
stream.once(STR_CLOSE, () => {
|
||||
stream = undefined;
|
||||
});
|
||||
stream.once(STR_END, () => {
|
||||
if (stream) {
|
||||
this._streams.delete(stream);
|
||||
stream = undefined;
|
||||
}
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param paths file / directory paths
|
||||
* @param options opts, such as `atomic`, `awaitWriteFinish`, `ignored`, and others
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
* @example
|
||||
* const watcher = watch('.').on('all', (event, path) => { console.log(event, path); });
|
||||
* watch('.', { atomic: true, awaitWriteFinish: true, ignored: (f, stats) => stats?.isFile() && !f.endsWith('.js') })
|
||||
*/
|
||||
export function watch(paths, options = {}) {
|
||||
const watcher = new FSWatcher(options);
|
||||
watcher.add(paths);
|
||||
return watcher;
|
||||
}
|
||||
export default { watch, FSWatcher };
|
||||
1
node_modules/sass/node_modules/chokidar/esm/package.json
generated
vendored
Normal file
1
node_modules/sass/node_modules/chokidar/esm/package.json
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{ "type": "module", "sideEffects": false }
|
||||
90
node_modules/sass/node_modules/chokidar/handler.d.ts
generated
vendored
Normal file
90
node_modules/sass/node_modules/chokidar/handler.d.ts
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import type { WatchEventType, Stats, FSWatcher as NativeFsWatcher } from 'fs';
|
||||
import type { FSWatcher, WatchHelper, Throttler } from './index.js';
|
||||
import type { EntryInfo } from 'readdirp';
|
||||
export type Path = string;
|
||||
export declare const STR_DATA = "data";
|
||||
export declare const STR_END = "end";
|
||||
export declare const STR_CLOSE = "close";
|
||||
export declare const EMPTY_FN: () => void;
|
||||
export declare const IDENTITY_FN: (val: unknown) => unknown;
|
||||
export declare const isWindows: boolean;
|
||||
export declare const isMacos: boolean;
|
||||
export declare const isLinux: boolean;
|
||||
export declare const isFreeBSD: boolean;
|
||||
export declare const isIBMi: boolean;
|
||||
export declare const EVENTS: {
|
||||
readonly ALL: "all";
|
||||
readonly READY: "ready";
|
||||
readonly ADD: "add";
|
||||
readonly CHANGE: "change";
|
||||
readonly ADD_DIR: "addDir";
|
||||
readonly UNLINK: "unlink";
|
||||
readonly UNLINK_DIR: "unlinkDir";
|
||||
readonly RAW: "raw";
|
||||
readonly ERROR: "error";
|
||||
};
|
||||
export type EventName = (typeof EVENTS)[keyof typeof EVENTS];
|
||||
export type FsWatchContainer = {
|
||||
listeners: (path: string) => void | Set<any>;
|
||||
errHandlers: (err: unknown) => void | Set<any>;
|
||||
rawEmitters: (ev: WatchEventType, path: string, opts: unknown) => void | Set<any>;
|
||||
watcher: NativeFsWatcher;
|
||||
watcherUnusable?: boolean;
|
||||
};
|
||||
export interface WatchHandlers {
|
||||
listener: (path: string) => void;
|
||||
errHandler: (err: unknown) => void;
|
||||
rawEmitter: (ev: WatchEventType, path: string, opts: unknown) => void;
|
||||
}
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
export declare class NodeFsHandler {
|
||||
fsw: FSWatcher;
|
||||
_boundHandleError: (error: unknown) => void;
|
||||
constructor(fsW: FSWatcher);
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param path to file or dir
|
||||
* @param listener on fs change
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path: string, listener: (path: string, newStats?: any) => void | Promise<void>): (() => void) | undefined;
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file: Path, stats: Stats, initialAdd: boolean): (() => void) | undefined;
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param entry returned by readdirp
|
||||
* @param directory path of dir being read
|
||||
* @param path of this item
|
||||
* @param item basename of this item
|
||||
* @returns true if no more processing is needed for this entry.
|
||||
*/
|
||||
_handleSymlink(entry: EntryInfo, directory: string, path: Path, item: string): Promise<boolean | undefined>;
|
||||
_handleRead(directory: string, initialAdd: boolean, wh: WatchHelper, target: Path, dir: Path, depth: number, throttler: Throttler): Promise<unknown> | undefined;
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param dir fs path
|
||||
* @param stats
|
||||
* @param initialAdd
|
||||
* @param depth relative to user-supplied path
|
||||
* @param target child path targeted for watch
|
||||
* @param wh Common watch helpers for this path
|
||||
* @param realpath
|
||||
* @returns closer for the watcher instance.
|
||||
*/
|
||||
_handleDir(dir: string, stats: Stats, initialAdd: boolean, depth: number, target: string, wh: WatchHelper, realpath: string): Promise<(() => void) | undefined>;
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param path to file or ir
|
||||
* @param initialAdd was the file added at watch instantiation?
|
||||
* @param priorWh depth relative to user-supplied path
|
||||
* @param depth Child path actually targeted for watch
|
||||
* @param target Child path actually targeted for watch
|
||||
*/
|
||||
_addToNodeFs(path: string, initialAdd: boolean, priorWh: WatchHelper | undefined, depth: number, target?: string): Promise<string | false | undefined>;
|
||||
}
|
||||
635
node_modules/sass/node_modules/chokidar/handler.js
generated
vendored
Normal file
635
node_modules/sass/node_modules/chokidar/handler.js
generated
vendored
Normal file
@@ -0,0 +1,635 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.NodeFsHandler = exports.EVENTS = exports.isIBMi = exports.isFreeBSD = exports.isLinux = exports.isMacos = exports.isWindows = exports.IDENTITY_FN = exports.EMPTY_FN = exports.STR_CLOSE = exports.STR_END = exports.STR_DATA = void 0;
|
||||
const fs_1 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
const sysPath = require("path");
|
||||
const os_1 = require("os");
|
||||
exports.STR_DATA = 'data';
|
||||
exports.STR_END = 'end';
|
||||
exports.STR_CLOSE = 'close';
|
||||
const EMPTY_FN = () => { };
|
||||
exports.EMPTY_FN = EMPTY_FN;
|
||||
const IDENTITY_FN = (val) => val;
|
||||
exports.IDENTITY_FN = IDENTITY_FN;
|
||||
const pl = process.platform;
|
||||
exports.isWindows = pl === 'win32';
|
||||
exports.isMacos = pl === 'darwin';
|
||||
exports.isLinux = pl === 'linux';
|
||||
exports.isFreeBSD = pl === 'freebsd';
|
||||
exports.isIBMi = (0, os_1.type)() === 'OS400';
|
||||
exports.EVENTS = {
|
||||
ALL: 'all',
|
||||
READY: 'ready',
|
||||
ADD: 'add',
|
||||
CHANGE: 'change',
|
||||
ADD_DIR: 'addDir',
|
||||
UNLINK: 'unlink',
|
||||
UNLINK_DIR: 'unlinkDir',
|
||||
RAW: 'raw',
|
||||
ERROR: 'error',
|
||||
};
|
||||
const EV = exports.EVENTS;
|
||||
const THROTTLE_MODE_WATCH = 'watch';
|
||||
const statMethods = { lstat: promises_1.lstat, stat: promises_1.stat };
|
||||
const KEY_LISTENERS = 'listeners';
|
||||
const KEY_ERR = 'errHandlers';
|
||||
const KEY_RAW = 'rawEmitters';
|
||||
const HANDLER_KEYS = [KEY_LISTENERS, KEY_ERR, KEY_RAW];
|
||||
// prettier-ignore
|
||||
const binaryExtensions = new Set([
|
||||
'3dm', '3ds', '3g2', '3gp', '7z', 'a', 'aac', 'adp', 'afdesign', 'afphoto', 'afpub', 'ai',
|
||||
'aif', 'aiff', 'alz', 'ape', 'apk', 'appimage', 'ar', 'arj', 'asf', 'au', 'avi',
|
||||
'bak', 'baml', 'bh', 'bin', 'bk', 'bmp', 'btif', 'bz2', 'bzip2',
|
||||
'cab', 'caf', 'cgm', 'class', 'cmx', 'cpio', 'cr2', 'cur', 'dat', 'dcm', 'deb', 'dex', 'djvu',
|
||||
'dll', 'dmg', 'dng', 'doc', 'docm', 'docx', 'dot', 'dotm', 'dra', 'DS_Store', 'dsk', 'dts',
|
||||
'dtshd', 'dvb', 'dwg', 'dxf',
|
||||
'ecelp4800', 'ecelp7470', 'ecelp9600', 'egg', 'eol', 'eot', 'epub', 'exe',
|
||||
'f4v', 'fbs', 'fh', 'fla', 'flac', 'flatpak', 'fli', 'flv', 'fpx', 'fst', 'fvt',
|
||||
'g3', 'gh', 'gif', 'graffle', 'gz', 'gzip',
|
||||
'h261', 'h263', 'h264', 'icns', 'ico', 'ief', 'img', 'ipa', 'iso',
|
||||
'jar', 'jpeg', 'jpg', 'jpgv', 'jpm', 'jxr', 'key', 'ktx',
|
||||
'lha', 'lib', 'lvp', 'lz', 'lzh', 'lzma', 'lzo',
|
||||
'm3u', 'm4a', 'm4v', 'mar', 'mdi', 'mht', 'mid', 'midi', 'mj2', 'mka', 'mkv', 'mmr', 'mng',
|
||||
'mobi', 'mov', 'movie', 'mp3',
|
||||
'mp4', 'mp4a', 'mpeg', 'mpg', 'mpga', 'mxu',
|
||||
'nef', 'npx', 'numbers', 'nupkg',
|
||||
'o', 'odp', 'ods', 'odt', 'oga', 'ogg', 'ogv', 'otf', 'ott',
|
||||
'pages', 'pbm', 'pcx', 'pdb', 'pdf', 'pea', 'pgm', 'pic', 'png', 'pnm', 'pot', 'potm',
|
||||
'potx', 'ppa', 'ppam',
|
||||
'ppm', 'pps', 'ppsm', 'ppsx', 'ppt', 'pptm', 'pptx', 'psd', 'pya', 'pyc', 'pyo', 'pyv',
|
||||
'qt',
|
||||
'rar', 'ras', 'raw', 'resources', 'rgb', 'rip', 'rlc', 'rmf', 'rmvb', 'rpm', 'rtf', 'rz',
|
||||
's3m', 's7z', 'scpt', 'sgi', 'shar', 'snap', 'sil', 'sketch', 'slk', 'smv', 'snk', 'so',
|
||||
'stl', 'suo', 'sub', 'swf',
|
||||
'tar', 'tbz', 'tbz2', 'tga', 'tgz', 'thmx', 'tif', 'tiff', 'tlz', 'ttc', 'ttf', 'txz',
|
||||
'udf', 'uvh', 'uvi', 'uvm', 'uvp', 'uvs', 'uvu',
|
||||
'viv', 'vob',
|
||||
'war', 'wav', 'wax', 'wbmp', 'wdp', 'weba', 'webm', 'webp', 'whl', 'wim', 'wm', 'wma',
|
||||
'wmv', 'wmx', 'woff', 'woff2', 'wrm', 'wvx',
|
||||
'xbm', 'xif', 'xla', 'xlam', 'xls', 'xlsb', 'xlsm', 'xlsx', 'xlt', 'xltm', 'xltx', 'xm',
|
||||
'xmind', 'xpi', 'xpm', 'xwd', 'xz',
|
||||
'z', 'zip', 'zipx',
|
||||
]);
|
||||
const isBinaryPath = (filePath) => binaryExtensions.has(sysPath.extname(filePath).slice(1).toLowerCase());
|
||||
// TODO: emit errors properly. Example: EMFILE on Macos.
|
||||
const foreach = (val, fn) => {
|
||||
if (val instanceof Set) {
|
||||
val.forEach(fn);
|
||||
}
|
||||
else {
|
||||
fn(val);
|
||||
}
|
||||
};
|
||||
const addAndConvert = (main, prop, item) => {
|
||||
let container = main[prop];
|
||||
if (!(container instanceof Set)) {
|
||||
main[prop] = container = new Set([container]);
|
||||
}
|
||||
container.add(item);
|
||||
};
|
||||
const clearItem = (cont) => (key) => {
|
||||
const set = cont[key];
|
||||
if (set instanceof Set) {
|
||||
set.clear();
|
||||
}
|
||||
else {
|
||||
delete cont[key];
|
||||
}
|
||||
};
|
||||
const delFromSet = (main, prop, item) => {
|
||||
const container = main[prop];
|
||||
if (container instanceof Set) {
|
||||
container.delete(item);
|
||||
}
|
||||
else if (container === item) {
|
||||
delete main[prop];
|
||||
}
|
||||
};
|
||||
const isEmptySet = (val) => (val instanceof Set ? val.size === 0 : !val);
|
||||
const FsWatchInstances = new Map();
|
||||
/**
|
||||
* Instantiates the fs_watch interface
|
||||
* @param path to be watched
|
||||
* @param options to be passed to fs_watch
|
||||
* @param listener main event handler
|
||||
* @param errHandler emits info about errors
|
||||
* @param emitRaw emits raw event data
|
||||
* @returns {NativeFsWatcher}
|
||||
*/
|
||||
function createFsWatchInstance(path, options, listener, errHandler, emitRaw) {
|
||||
const handleEvent = (rawEvent, evPath) => {
|
||||
listener(path);
|
||||
emitRaw(rawEvent, evPath, { watchedPath: path });
|
||||
// emit based on events occurring for files from a directory's watcher in
|
||||
// case the file's watcher misses it (and rely on throttling to de-dupe)
|
||||
if (evPath && path !== evPath) {
|
||||
fsWatchBroadcast(sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath));
|
||||
}
|
||||
};
|
||||
try {
|
||||
return (0, fs_1.watch)(path, {
|
||||
persistent: options.persistent,
|
||||
}, handleEvent);
|
||||
}
|
||||
catch (error) {
|
||||
errHandler(error);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper for passing fs_watch event data to a collection of listeners
|
||||
* @param fullPath absolute path bound to fs_watch instance
|
||||
*/
|
||||
const fsWatchBroadcast = (fullPath, listenerType, val1, val2, val3) => {
|
||||
const cont = FsWatchInstances.get(fullPath);
|
||||
if (!cont)
|
||||
return;
|
||||
foreach(cont[listenerType], (listener) => {
|
||||
listener(val1, val2, val3);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Instantiates the fs_watch interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param path
|
||||
* @param fullPath absolute path
|
||||
* @param options to be passed to fs_watch
|
||||
* @param handlers container for event listener functions
|
||||
*/
|
||||
const setFsWatchListener = (path, fullPath, options, handlers) => {
|
||||
const { listener, errHandler, rawEmitter } = handlers;
|
||||
let cont = FsWatchInstances.get(fullPath);
|
||||
let watcher;
|
||||
if (!options.persistent) {
|
||||
watcher = createFsWatchInstance(path, options, listener, errHandler, rawEmitter);
|
||||
if (!watcher)
|
||||
return;
|
||||
return watcher.close.bind(watcher);
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_ERR, errHandler);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
}
|
||||
else {
|
||||
watcher = createFsWatchInstance(path, options, fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS), errHandler, // no need to use broadcast here
|
||||
fsWatchBroadcast.bind(null, fullPath, KEY_RAW));
|
||||
if (!watcher)
|
||||
return;
|
||||
watcher.on(EV.ERROR, async (error) => {
|
||||
const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR);
|
||||
if (cont)
|
||||
cont.watcherUnusable = true; // documented since Node 10.4.1
|
||||
// Workaround for https://github.com/joyent/node/issues/4337
|
||||
if (exports.isWindows && error.code === 'EPERM') {
|
||||
try {
|
||||
const fd = await (0, promises_1.open)(path, 'r');
|
||||
await fd.close();
|
||||
broadcastErr(error);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
else {
|
||||
broadcastErr(error);
|
||||
}
|
||||
});
|
||||
cont = {
|
||||
listeners: listener,
|
||||
errHandlers: errHandler,
|
||||
rawEmitters: rawEmitter,
|
||||
watcher,
|
||||
};
|
||||
FsWatchInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
// removes this instance's listeners and closes the underlying fs_watch
|
||||
// instance if there are no more listeners left
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_ERR, errHandler);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
// Check to protect against issue gh-730.
|
||||
// if (cont.watcherUnusable) {
|
||||
cont.watcher.close();
|
||||
// }
|
||||
FsWatchInstances.delete(fullPath);
|
||||
HANDLER_KEYS.forEach(clearItem(cont));
|
||||
// @ts-ignore
|
||||
cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
// fs_watchFile helpers
|
||||
// object to hold per-process fs_watchFile instances
|
||||
// (may be shared across chokidar FSWatcher instances)
|
||||
const FsWatchFileInstances = new Map();
|
||||
/**
|
||||
* Instantiates the fs_watchFile interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param path to be watched
|
||||
* @param fullPath absolute path
|
||||
* @param options options to be passed to fs_watchFile
|
||||
* @param handlers container for event listener functions
|
||||
* @returns closer
|
||||
*/
|
||||
const setFsWatchFileListener = (path, fullPath, options, handlers) => {
|
||||
const { listener, rawEmitter } = handlers;
|
||||
let cont = FsWatchFileInstances.get(fullPath);
|
||||
// let listeners = new Set();
|
||||
// let rawEmitters = new Set();
|
||||
const copts = cont && cont.options;
|
||||
if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) {
|
||||
// "Upgrade" the watcher to persistence or a quicker interval.
|
||||
// This creates some unlikely edge case issues if the user mixes
|
||||
// settings in a very weird way, but solving for those cases
|
||||
// doesn't seem worthwhile for the added complexity.
|
||||
// listeners = cont.listeners;
|
||||
// rawEmitters = cont.rawEmitters;
|
||||
(0, fs_1.unwatchFile)(fullPath);
|
||||
cont = undefined;
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
}
|
||||
else {
|
||||
// TODO
|
||||
// listeners.add(listener);
|
||||
// rawEmitters.add(rawEmitter);
|
||||
cont = {
|
||||
listeners: listener,
|
||||
rawEmitters: rawEmitter,
|
||||
options,
|
||||
watcher: (0, fs_1.watchFile)(fullPath, options, (curr, prev) => {
|
||||
foreach(cont.rawEmitters, (rawEmitter) => {
|
||||
rawEmitter(EV.CHANGE, fullPath, { curr, prev });
|
||||
});
|
||||
const currmtime = curr.mtimeMs;
|
||||
if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
|
||||
foreach(cont.listeners, (listener) => listener(path, curr));
|
||||
}
|
||||
}),
|
||||
};
|
||||
FsWatchFileInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
// Removes this instance's listeners and closes the underlying fs_watchFile
|
||||
// instance if there are no more listeners left.
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
FsWatchFileInstances.delete(fullPath);
|
||||
(0, fs_1.unwatchFile)(fullPath);
|
||||
cont.options = cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
class NodeFsHandler {
|
||||
constructor(fsW) {
|
||||
this.fsw = fsW;
|
||||
this._boundHandleError = (error) => fsW._handleError(error);
|
||||
}
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param path to file or dir
|
||||
* @param listener on fs change
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path, listener) {
|
||||
const opts = this.fsw.options;
|
||||
const directory = sysPath.dirname(path);
|
||||
const basename = sysPath.basename(path);
|
||||
const parent = this.fsw._getWatchedDir(directory);
|
||||
parent.add(basename);
|
||||
const absolutePath = sysPath.resolve(path);
|
||||
const options = {
|
||||
persistent: opts.persistent,
|
||||
};
|
||||
if (!listener)
|
||||
listener = exports.EMPTY_FN;
|
||||
let closer;
|
||||
if (opts.usePolling) {
|
||||
const enableBin = opts.interval !== opts.binaryInterval;
|
||||
options.interval = enableBin && isBinaryPath(basename) ? opts.binaryInterval : opts.interval;
|
||||
closer = setFsWatchFileListener(path, absolutePath, options, {
|
||||
listener,
|
||||
rawEmitter: this.fsw._emitRaw,
|
||||
});
|
||||
}
|
||||
else {
|
||||
closer = setFsWatchListener(path, absolutePath, options, {
|
||||
listener,
|
||||
errHandler: this._boundHandleError,
|
||||
rawEmitter: this.fsw._emitRaw,
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file, stats, initialAdd) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const dirname = sysPath.dirname(file);
|
||||
const basename = sysPath.basename(file);
|
||||
const parent = this.fsw._getWatchedDir(dirname);
|
||||
// stats is always present
|
||||
let prevStats = stats;
|
||||
// if the file is already being watched, do nothing
|
||||
if (parent.has(basename))
|
||||
return;
|
||||
const listener = async (path, newStats) => {
|
||||
if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5))
|
||||
return;
|
||||
if (!newStats || newStats.mtimeMs === 0) {
|
||||
try {
|
||||
const newStats = await (0, promises_1.stat)(file);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV.CHANGE, file, newStats);
|
||||
}
|
||||
if ((exports.isMacos || exports.isLinux || exports.isFreeBSD) && prevStats.ino !== newStats.ino) {
|
||||
this.fsw._closeFile(path);
|
||||
prevStats = newStats;
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
if (closer)
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
}
|
||||
else {
|
||||
prevStats = newStats;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Fix issues where mtime is null but file is still present
|
||||
this.fsw._remove(dirname, basename);
|
||||
}
|
||||
// add is about to be emitted if file not already tracked in parent
|
||||
}
|
||||
else if (parent.has(basename)) {
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV.CHANGE, file, newStats);
|
||||
}
|
||||
prevStats = newStats;
|
||||
}
|
||||
};
|
||||
// kick off the watcher
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
// emit an add event if we're supposed to
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) {
|
||||
if (!this.fsw._throttle(EV.ADD, file, 0))
|
||||
return;
|
||||
this.fsw._emit(EV.ADD, file, stats);
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param entry returned by readdirp
|
||||
* @param directory path of dir being read
|
||||
* @param path of this item
|
||||
* @param item basename of this item
|
||||
* @returns true if no more processing is needed for this entry.
|
||||
*/
|
||||
async _handleSymlink(entry, directory, path, item) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const full = entry.fullPath;
|
||||
const dir = this.fsw._getWatchedDir(directory);
|
||||
if (!this.fsw.options.followSymlinks) {
|
||||
// watch symlink directly (don't follow) and detect changes
|
||||
this.fsw._incrReadyCount();
|
||||
let linkPath;
|
||||
try {
|
||||
linkPath = await (0, promises_1.realpath)(path);
|
||||
}
|
||||
catch (e) {
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
if (dir.has(item)) {
|
||||
if (this.fsw._symlinkPaths.get(full) !== linkPath) {
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV.CHANGE, path, entry.stats);
|
||||
}
|
||||
}
|
||||
else {
|
||||
dir.add(item);
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV.ADD, path, entry.stats);
|
||||
}
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
// don't follow the same symlink more than once
|
||||
if (this.fsw._symlinkPaths.has(full)) {
|
||||
return true;
|
||||
}
|
||||
this.fsw._symlinkPaths.set(full, true);
|
||||
}
|
||||
_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) {
|
||||
// Normalize the directory name on Windows
|
||||
directory = sysPath.join(directory, '');
|
||||
throttler = this.fsw._throttle('readdir', directory, 1000);
|
||||
if (!throttler)
|
||||
return;
|
||||
const previous = this.fsw._getWatchedDir(wh.path);
|
||||
const current = new Set();
|
||||
let stream = this.fsw._readdirp(directory, {
|
||||
fileFilter: (entry) => wh.filterPath(entry),
|
||||
directoryFilter: (entry) => wh.filterDir(entry),
|
||||
});
|
||||
if (!stream)
|
||||
return;
|
||||
stream
|
||||
.on(exports.STR_DATA, async (entry) => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const item = entry.path;
|
||||
let path = sysPath.join(directory, item);
|
||||
current.add(item);
|
||||
if (entry.stats.isSymbolicLink() &&
|
||||
(await this._handleSymlink(entry, directory, path, item))) {
|
||||
return;
|
||||
}
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
// Files that present in current directory snapshot
|
||||
// but absent in previous are added to watch list and
|
||||
// emit `add` event.
|
||||
if (item === target || (!target && !previous.has(item))) {
|
||||
this.fsw._incrReadyCount();
|
||||
// ensure relativeness of path is preserved in case of watcher reuse
|
||||
path = sysPath.join(dir, sysPath.relative(dir, path));
|
||||
this._addToNodeFs(path, initialAdd, wh, depth + 1);
|
||||
}
|
||||
})
|
||||
.on(EV.ERROR, this._boundHandleError);
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!stream)
|
||||
return reject();
|
||||
stream.once(exports.STR_END, () => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const wasThrottled = throttler ? throttler.clear() : false;
|
||||
resolve(undefined);
|
||||
// Files that absent in current directory snapshot
|
||||
// but present in previous emit `remove` event
|
||||
// and are removed from @watched[directory].
|
||||
previous
|
||||
.getChildren()
|
||||
.filter((item) => {
|
||||
return item !== directory && !current.has(item);
|
||||
})
|
||||
.forEach((item) => {
|
||||
this.fsw._remove(directory, item);
|
||||
});
|
||||
stream = undefined;
|
||||
// one more time for any missed in case changes came in extremely quickly
|
||||
if (wasThrottled)
|
||||
this._handleRead(directory, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param dir fs path
|
||||
* @param stats
|
||||
* @param initialAdd
|
||||
* @param depth relative to user-supplied path
|
||||
* @param target child path targeted for watch
|
||||
* @param wh Common watch helpers for this path
|
||||
* @param realpath
|
||||
* @returns closer for the watcher instance.
|
||||
*/
|
||||
async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) {
|
||||
const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir));
|
||||
const tracked = parentDir.has(sysPath.basename(dir));
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) {
|
||||
this.fsw._emit(EV.ADD_DIR, dir, stats);
|
||||
}
|
||||
// ensure dir is tracked (harmless if redundant)
|
||||
parentDir.add(sysPath.basename(dir));
|
||||
this.fsw._getWatchedDir(dir);
|
||||
let throttler;
|
||||
let closer;
|
||||
const oDepth = this.fsw.options.depth;
|
||||
if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) {
|
||||
if (!target) {
|
||||
await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
}
|
||||
closer = this._watchWithNodeFs(dir, (dirPath, stats) => {
|
||||
// if current directory is removed, do nothing
|
||||
if (stats && stats.mtimeMs === 0)
|
||||
return;
|
||||
this._handleRead(dirPath, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param path to file or ir
|
||||
* @param initialAdd was the file added at watch instantiation?
|
||||
* @param priorWh depth relative to user-supplied path
|
||||
* @param depth Child path actually targeted for watch
|
||||
* @param target Child path actually targeted for watch
|
||||
*/
|
||||
async _addToNodeFs(path, initialAdd, priorWh, depth, target) {
|
||||
const ready = this.fsw._emitReady;
|
||||
if (this.fsw._isIgnored(path) || this.fsw.closed) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
const wh = this.fsw._getWatchHelpers(path);
|
||||
if (priorWh) {
|
||||
wh.filterPath = (entry) => priorWh.filterPath(entry);
|
||||
wh.filterDir = (entry) => priorWh.filterDir(entry);
|
||||
}
|
||||
// evaluate what is at the path we're being asked to watch
|
||||
try {
|
||||
const stats = await statMethods[wh.statMethod](wh.watchPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
if (this.fsw._isIgnored(wh.watchPath, stats)) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
const follow = this.fsw.options.followSymlinks;
|
||||
let closer;
|
||||
if (stats.isDirectory()) {
|
||||
const absPath = sysPath.resolve(path);
|
||||
const targetPath = follow ? await (0, promises_1.realpath)(path) : path;
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// preserve this symlink's target path
|
||||
if (absPath !== targetPath && targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(absPath, targetPath);
|
||||
}
|
||||
}
|
||||
else if (stats.isSymbolicLink()) {
|
||||
const targetPath = follow ? await (0, promises_1.realpath)(path) : path;
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
const parent = sysPath.dirname(wh.watchPath);
|
||||
this.fsw._getWatchedDir(parent).add(wh.watchPath);
|
||||
this.fsw._emit(EV.ADD, wh.watchPath, stats);
|
||||
closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// preserve this symlink's target path
|
||||
if (targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
closer = this._handleFile(wh.watchPath, stats, initialAdd);
|
||||
}
|
||||
ready();
|
||||
if (closer)
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
return false;
|
||||
}
|
||||
catch (error) {
|
||||
if (this.fsw._handleError(error)) {
|
||||
ready();
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.NodeFsHandler = NodeFsHandler;
|
||||
215
node_modules/sass/node_modules/chokidar/index.d.ts
generated
vendored
Normal file
215
node_modules/sass/node_modules/chokidar/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
/*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) */
|
||||
import { Stats } from 'fs';
|
||||
import { EventEmitter } from 'events';
|
||||
import { ReaddirpStream, ReaddirpOptions, EntryInfo } from 'readdirp';
|
||||
import { NodeFsHandler, EventName, Path, EVENTS as EV, WatchHandlers } from './handler.js';
|
||||
type AWF = {
|
||||
stabilityThreshold: number;
|
||||
pollInterval: number;
|
||||
};
|
||||
type BasicOpts = {
|
||||
persistent: boolean;
|
||||
ignoreInitial: boolean;
|
||||
followSymlinks: boolean;
|
||||
cwd?: string;
|
||||
usePolling: boolean;
|
||||
interval: number;
|
||||
binaryInterval: number;
|
||||
alwaysStat?: boolean;
|
||||
depth?: number;
|
||||
ignorePermissionErrors: boolean;
|
||||
atomic: boolean | number;
|
||||
};
|
||||
export type Throttler = {
|
||||
timeoutObject: NodeJS.Timeout;
|
||||
clear: () => void;
|
||||
count: number;
|
||||
};
|
||||
export type ChokidarOptions = Partial<BasicOpts & {
|
||||
ignored: Matcher | Matcher[];
|
||||
awaitWriteFinish: boolean | Partial<AWF>;
|
||||
}>;
|
||||
export type FSWInstanceOptions = BasicOpts & {
|
||||
ignored: Matcher[];
|
||||
awaitWriteFinish: false | AWF;
|
||||
};
|
||||
export type ThrottleType = 'readdir' | 'watch' | 'add' | 'remove' | 'change';
|
||||
export type EmitArgs = [path: Path, stats?: Stats];
|
||||
export type EmitErrorArgs = [error: Error, stats?: Stats];
|
||||
export type EmitArgsWithName = [event: EventName, ...EmitArgs];
|
||||
export type MatchFunction = (val: string, stats?: Stats) => boolean;
|
||||
export interface MatcherObject {
|
||||
path: string;
|
||||
recursive?: boolean;
|
||||
}
|
||||
export type Matcher = string | RegExp | MatchFunction | MatcherObject;
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
declare class DirEntry {
|
||||
path: Path;
|
||||
_removeWatcher: (dir: string, base: string) => void;
|
||||
items: Set<Path>;
|
||||
constructor(dir: Path, removeWatcher: (dir: string, base: string) => void);
|
||||
add(item: string): void;
|
||||
remove(item: string): Promise<void>;
|
||||
has(item: string): boolean | undefined;
|
||||
getChildren(): string[];
|
||||
dispose(): void;
|
||||
}
|
||||
export declare class WatchHelper {
|
||||
fsw: FSWatcher;
|
||||
path: string;
|
||||
watchPath: string;
|
||||
fullWatchPath: string;
|
||||
dirParts: string[][];
|
||||
followSymlinks: boolean;
|
||||
statMethod: 'stat' | 'lstat';
|
||||
constructor(path: string, follow: boolean, fsw: FSWatcher);
|
||||
entryPath(entry: EntryInfo): Path;
|
||||
filterPath(entry: EntryInfo): boolean;
|
||||
filterDir(entry: EntryInfo): boolean;
|
||||
}
|
||||
export interface FSWatcherKnownEventMap {
|
||||
[EV.READY]: [];
|
||||
[EV.RAW]: Parameters<WatchHandlers['rawEmitter']>;
|
||||
[EV.ERROR]: Parameters<WatchHandlers['errHandler']>;
|
||||
[EV.ALL]: [event: EventName, ...EmitArgs];
|
||||
}
|
||||
export type FSWatcherEventMap = FSWatcherKnownEventMap & {
|
||||
[k in Exclude<EventName, keyof FSWatcherKnownEventMap>]: EmitArgs;
|
||||
};
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
export declare class FSWatcher extends EventEmitter<FSWatcherEventMap> {
|
||||
closed: boolean;
|
||||
options: FSWInstanceOptions;
|
||||
_closers: Map<string, Array<any>>;
|
||||
_ignoredPaths: Set<Matcher>;
|
||||
_throttled: Map<ThrottleType, Map<any, any>>;
|
||||
_streams: Set<ReaddirpStream>;
|
||||
_symlinkPaths: Map<Path, string | boolean>;
|
||||
_watched: Map<string, DirEntry>;
|
||||
_pendingWrites: Map<string, any>;
|
||||
_pendingUnlinks: Map<string, EmitArgsWithName>;
|
||||
_readyCount: number;
|
||||
_emitReady: () => void;
|
||||
_closePromise?: Promise<void>;
|
||||
_userIgnored?: MatchFunction;
|
||||
_readyEmitted: boolean;
|
||||
_emitRaw: WatchHandlers['rawEmitter'];
|
||||
_boundRemove: (dir: string, item: string) => void;
|
||||
_nodeFsHandler: NodeFsHandler;
|
||||
constructor(_opts?: ChokidarOptions);
|
||||
_addIgnoredPath(matcher: Matcher): void;
|
||||
_removeIgnoredPath(matcher: Matcher): void;
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance.
|
||||
* @param paths_ file or file list. Other arguments are unused
|
||||
*/
|
||||
add(paths_: Path | Path[], _origAdd?: string, _internal?: boolean): FSWatcher;
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
*/
|
||||
unwatch(paths_: Path | Path[]): FSWatcher;
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
*/
|
||||
close(): Promise<void>;
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns for chaining
|
||||
*/
|
||||
getWatched(): Record<string, string[]>;
|
||||
emitWithAll(event: EventName, args: EmitArgs): void;
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param event Type of event
|
||||
* @param path File or directory path
|
||||
* @param stats arguments to be passed with event
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_emit(event: EventName, path: Path, stats?: Stats): Promise<this | undefined>;
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @returns The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error: Error): Error | boolean;
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param actionType type being throttled
|
||||
* @param path being acted upon
|
||||
* @param timeout duration of time to suppress duplicate actions
|
||||
* @returns tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType: ThrottleType, path: Path, timeout: number): Throttler | false;
|
||||
_incrReadyCount(): number;
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param path being acted upon
|
||||
* @param threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param event
|
||||
* @param awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path: Path, threshold: number, event: EventName, awfEmit: (err?: Error, stat?: Stats) => void): void;
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
*/
|
||||
_isIgnored(path: Path, stats?: Stats): boolean;
|
||||
_isntIgnored(path: Path, stat?: Stats): boolean;
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink handling.
|
||||
* @param path file or directory pattern being watched
|
||||
*/
|
||||
_getWatchHelpers(path: Path): WatchHelper;
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param directory path of the directory
|
||||
*/
|
||||
_getWatchedDir(directory: string): DirEntry;
|
||||
/**
|
||||
* Check for read permissions: https://stackoverflow.com/a/11781404/1358405
|
||||
*/
|
||||
_hasReadPermissions(stats: Stats): boolean;
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param directory within which the following item is located
|
||||
* @param item base path of item/directory
|
||||
*/
|
||||
_remove(directory: string, item: string, isDirectory?: boolean): void;
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
*/
|
||||
_closePath(path: Path): void;
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
*/
|
||||
_closeFile(path: Path): void;
|
||||
_addPathCloser(path: Path, closer: () => void): void;
|
||||
_readdirp(root: Path, opts?: Partial<ReaddirpOptions>): ReaddirpStream | undefined;
|
||||
}
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param paths file / directory paths
|
||||
* @param options opts, such as `atomic`, `awaitWriteFinish`, `ignored`, and others
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
* @example
|
||||
* const watcher = watch('.').on('all', (event, path) => { console.log(event, path); });
|
||||
* watch('.', { atomic: true, awaitWriteFinish: true, ignored: (f, stats) => stats?.isFile() && !f.endsWith('.js') })
|
||||
*/
|
||||
export declare function watch(paths: string | string[], options?: ChokidarOptions): FSWatcher;
|
||||
declare const _default: {
|
||||
watch: typeof watch;
|
||||
FSWatcher: typeof FSWatcher;
|
||||
};
|
||||
export default _default;
|
||||
804
node_modules/sass/node_modules/chokidar/index.js
generated
vendored
Normal file
804
node_modules/sass/node_modules/chokidar/index.js
generated
vendored
Normal file
@@ -0,0 +1,804 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.FSWatcher = exports.WatchHelper = void 0;
|
||||
exports.watch = watch;
|
||||
/*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) */
|
||||
const fs_1 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
const events_1 = require("events");
|
||||
const sysPath = require("path");
|
||||
const readdirp_1 = require("readdirp");
|
||||
const handler_js_1 = require("./handler.js");
|
||||
const SLASH = '/';
|
||||
const SLASH_SLASH = '//';
|
||||
const ONE_DOT = '.';
|
||||
const TWO_DOTS = '..';
|
||||
const STRING_TYPE = 'string';
|
||||
const BACK_SLASH_RE = /\\/g;
|
||||
const DOUBLE_SLASH_RE = /\/\//;
|
||||
const DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/;
|
||||
const REPLACER_RE = /^\.[/\\]/;
|
||||
function arrify(item) {
|
||||
return Array.isArray(item) ? item : [item];
|
||||
}
|
||||
const isMatcherObject = (matcher) => typeof matcher === 'object' && matcher !== null && !(matcher instanceof RegExp);
|
||||
function createPattern(matcher) {
|
||||
if (typeof matcher === 'function')
|
||||
return matcher;
|
||||
if (typeof matcher === 'string')
|
||||
return (string) => matcher === string;
|
||||
if (matcher instanceof RegExp)
|
||||
return (string) => matcher.test(string);
|
||||
if (typeof matcher === 'object' && matcher !== null) {
|
||||
return (string) => {
|
||||
if (matcher.path === string)
|
||||
return true;
|
||||
if (matcher.recursive) {
|
||||
const relative = sysPath.relative(matcher.path, string);
|
||||
if (!relative) {
|
||||
return false;
|
||||
}
|
||||
return !relative.startsWith('..') && !sysPath.isAbsolute(relative);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
}
|
||||
return () => false;
|
||||
}
|
||||
function normalizePath(path) {
|
||||
if (typeof path !== 'string')
|
||||
throw new Error('string expected');
|
||||
path = sysPath.normalize(path);
|
||||
path = path.replace(/\\/g, '/');
|
||||
let prepend = false;
|
||||
if (path.startsWith('//'))
|
||||
prepend = true;
|
||||
const DOUBLE_SLASH_RE = /\/\//;
|
||||
while (path.match(DOUBLE_SLASH_RE))
|
||||
path = path.replace(DOUBLE_SLASH_RE, '/');
|
||||
if (prepend)
|
||||
path = '/' + path;
|
||||
return path;
|
||||
}
|
||||
function matchPatterns(patterns, testString, stats) {
|
||||
const path = normalizePath(testString);
|
||||
for (let index = 0; index < patterns.length; index++) {
|
||||
const pattern = patterns[index];
|
||||
if (pattern(path, stats)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function anymatch(matchers, testString) {
|
||||
if (matchers == null) {
|
||||
throw new TypeError('anymatch: specify first argument');
|
||||
}
|
||||
// Early cache for matchers.
|
||||
const matchersArray = arrify(matchers);
|
||||
const patterns = matchersArray.map((matcher) => createPattern(matcher));
|
||||
if (testString == null) {
|
||||
return (testString, stats) => {
|
||||
return matchPatterns(patterns, testString, stats);
|
||||
};
|
||||
}
|
||||
return matchPatterns(patterns, testString);
|
||||
}
|
||||
const unifyPaths = (paths_) => {
|
||||
const paths = arrify(paths_).flat();
|
||||
if (!paths.every((p) => typeof p === STRING_TYPE)) {
|
||||
throw new TypeError(`Non-string provided as watch path: ${paths}`);
|
||||
}
|
||||
return paths.map(normalizePathToUnix);
|
||||
};
|
||||
// If SLASH_SLASH occurs at the beginning of path, it is not replaced
|
||||
// because "//StoragePC/DrivePool/Movies" is a valid network path
|
||||
const toUnix = (string) => {
|
||||
let str = string.replace(BACK_SLASH_RE, SLASH);
|
||||
let prepend = false;
|
||||
if (str.startsWith(SLASH_SLASH)) {
|
||||
prepend = true;
|
||||
}
|
||||
while (str.match(DOUBLE_SLASH_RE)) {
|
||||
str = str.replace(DOUBLE_SLASH_RE, SLASH);
|
||||
}
|
||||
if (prepend) {
|
||||
str = SLASH + str;
|
||||
}
|
||||
return str;
|
||||
};
|
||||
// Our version of upath.normalize
|
||||
// TODO: this is not equal to path-normalize module - investigate why
|
||||
const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path)));
|
||||
// TODO: refactor
|
||||
const normalizeIgnored = (cwd = '') => (path) => {
|
||||
if (typeof path === 'string') {
|
||||
return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path));
|
||||
}
|
||||
else {
|
||||
return path;
|
||||
}
|
||||
};
|
||||
const getAbsolutePath = (path, cwd) => {
|
||||
if (sysPath.isAbsolute(path)) {
|
||||
return path;
|
||||
}
|
||||
return sysPath.join(cwd, path);
|
||||
};
|
||||
const EMPTY_SET = Object.freeze(new Set());
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
class DirEntry {
|
||||
constructor(dir, removeWatcher) {
|
||||
this.path = dir;
|
||||
this._removeWatcher = removeWatcher;
|
||||
this.items = new Set();
|
||||
}
|
||||
add(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
if (item !== ONE_DOT && item !== TWO_DOTS)
|
||||
items.add(item);
|
||||
}
|
||||
async remove(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
items.delete(item);
|
||||
if (items.size > 0)
|
||||
return;
|
||||
const dir = this.path;
|
||||
try {
|
||||
await (0, promises_1.readdir)(dir);
|
||||
}
|
||||
catch (err) {
|
||||
if (this._removeWatcher) {
|
||||
this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir));
|
||||
}
|
||||
}
|
||||
}
|
||||
has(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
return items.has(item);
|
||||
}
|
||||
getChildren() {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return [];
|
||||
return [...items.values()];
|
||||
}
|
||||
dispose() {
|
||||
this.items.clear();
|
||||
this.path = '';
|
||||
this._removeWatcher = handler_js_1.EMPTY_FN;
|
||||
this.items = EMPTY_SET;
|
||||
Object.freeze(this);
|
||||
}
|
||||
}
|
||||
const STAT_METHOD_F = 'stat';
|
||||
const STAT_METHOD_L = 'lstat';
|
||||
class WatchHelper {
|
||||
constructor(path, follow, fsw) {
|
||||
this.fsw = fsw;
|
||||
const watchPath = path;
|
||||
this.path = path = path.replace(REPLACER_RE, '');
|
||||
this.watchPath = watchPath;
|
||||
this.fullWatchPath = sysPath.resolve(watchPath);
|
||||
this.dirParts = [];
|
||||
this.dirParts.forEach((parts) => {
|
||||
if (parts.length > 1)
|
||||
parts.pop();
|
||||
});
|
||||
this.followSymlinks = follow;
|
||||
this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L;
|
||||
}
|
||||
entryPath(entry) {
|
||||
return sysPath.join(this.watchPath, sysPath.relative(this.watchPath, entry.fullPath));
|
||||
}
|
||||
filterPath(entry) {
|
||||
const { stats } = entry;
|
||||
if (stats && stats.isSymbolicLink())
|
||||
return this.filterDir(entry);
|
||||
const resolvedPath = this.entryPath(entry);
|
||||
// TODO: what if stats is undefined? remove !
|
||||
return this.fsw._isntIgnored(resolvedPath, stats) && this.fsw._hasReadPermissions(stats);
|
||||
}
|
||||
filterDir(entry) {
|
||||
return this.fsw._isntIgnored(this.entryPath(entry), entry.stats);
|
||||
}
|
||||
}
|
||||
exports.WatchHelper = WatchHelper;
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
class FSWatcher extends events_1.EventEmitter {
|
||||
// Not indenting methods for history sake; for now.
|
||||
constructor(_opts = {}) {
|
||||
super();
|
||||
this.closed = false;
|
||||
this._closers = new Map();
|
||||
this._ignoredPaths = new Set();
|
||||
this._throttled = new Map();
|
||||
this._streams = new Set();
|
||||
this._symlinkPaths = new Map();
|
||||
this._watched = new Map();
|
||||
this._pendingWrites = new Map();
|
||||
this._pendingUnlinks = new Map();
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
const awf = _opts.awaitWriteFinish;
|
||||
const DEF_AWF = { stabilityThreshold: 2000, pollInterval: 100 };
|
||||
const opts = {
|
||||
// Defaults
|
||||
persistent: true,
|
||||
ignoreInitial: false,
|
||||
ignorePermissionErrors: false,
|
||||
interval: 100,
|
||||
binaryInterval: 300,
|
||||
followSymlinks: true,
|
||||
usePolling: false,
|
||||
// useAsync: false,
|
||||
atomic: true, // NOTE: overwritten later (depends on usePolling)
|
||||
..._opts,
|
||||
// Change format
|
||||
ignored: _opts.ignored ? arrify(_opts.ignored) : arrify([]),
|
||||
awaitWriteFinish: awf === true ? DEF_AWF : typeof awf === 'object' ? { ...DEF_AWF, ...awf } : false,
|
||||
};
|
||||
// Always default to polling on IBM i because fs.watch() is not available on IBM i.
|
||||
if (handler_js_1.isIBMi)
|
||||
opts.usePolling = true;
|
||||
// Editor atomic write normalization enabled by default with fs.watch
|
||||
if (opts.atomic === undefined)
|
||||
opts.atomic = !opts.usePolling;
|
||||
// opts.atomic = typeof _opts.atomic === 'number' ? _opts.atomic : 100;
|
||||
// Global override. Useful for developers, who need to force polling for all
|
||||
// instances of chokidar, regardless of usage / dependency depth
|
||||
const envPoll = process.env.CHOKIDAR_USEPOLLING;
|
||||
if (envPoll !== undefined) {
|
||||
const envLower = envPoll.toLowerCase();
|
||||
if (envLower === 'false' || envLower === '0')
|
||||
opts.usePolling = false;
|
||||
else if (envLower === 'true' || envLower === '1')
|
||||
opts.usePolling = true;
|
||||
else
|
||||
opts.usePolling = !!envLower;
|
||||
}
|
||||
const envInterval = process.env.CHOKIDAR_INTERVAL;
|
||||
if (envInterval)
|
||||
opts.interval = Number.parseInt(envInterval, 10);
|
||||
// This is done to emit ready only once, but each 'add' will increase that?
|
||||
let readyCalls = 0;
|
||||
this._emitReady = () => {
|
||||
readyCalls++;
|
||||
if (readyCalls >= this._readyCount) {
|
||||
this._emitReady = handler_js_1.EMPTY_FN;
|
||||
this._readyEmitted = true;
|
||||
// use process.nextTick to allow time for listener to be bound
|
||||
process.nextTick(() => this.emit(handler_js_1.EVENTS.READY));
|
||||
}
|
||||
};
|
||||
this._emitRaw = (...args) => this.emit(handler_js_1.EVENTS.RAW, ...args);
|
||||
this._boundRemove = this._remove.bind(this);
|
||||
this.options = opts;
|
||||
this._nodeFsHandler = new handler_js_1.NodeFsHandler(this);
|
||||
// You’re frozen when your heart’s not open.
|
||||
Object.freeze(opts);
|
||||
}
|
||||
_addIgnoredPath(matcher) {
|
||||
if (isMatcherObject(matcher)) {
|
||||
// return early if we already have a deeply equal matcher object
|
||||
for (const ignored of this._ignoredPaths) {
|
||||
if (isMatcherObject(ignored) &&
|
||||
ignored.path === matcher.path &&
|
||||
ignored.recursive === matcher.recursive) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
this._ignoredPaths.add(matcher);
|
||||
}
|
||||
_removeIgnoredPath(matcher) {
|
||||
this._ignoredPaths.delete(matcher);
|
||||
// now find any matcher objects with the matcher as path
|
||||
if (typeof matcher === 'string') {
|
||||
for (const ignored of this._ignoredPaths) {
|
||||
// TODO (43081j): make this more efficient.
|
||||
// probably just make a `this._ignoredDirectories` or some
|
||||
// such thing.
|
||||
if (isMatcherObject(ignored) && ignored.path === matcher) {
|
||||
this._ignoredPaths.delete(ignored);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Public methods
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance.
|
||||
* @param paths_ file or file list. Other arguments are unused
|
||||
*/
|
||||
add(paths_, _origAdd, _internal) {
|
||||
const { cwd } = this.options;
|
||||
this.closed = false;
|
||||
this._closePromise = undefined;
|
||||
let paths = unifyPaths(paths_);
|
||||
if (cwd) {
|
||||
paths = paths.map((path) => {
|
||||
const absPath = getAbsolutePath(path, cwd);
|
||||
// Check `path` instead of `absPath` because the cwd portion can't be a glob
|
||||
return absPath;
|
||||
});
|
||||
}
|
||||
paths.forEach((path) => {
|
||||
this._removeIgnoredPath(path);
|
||||
});
|
||||
this._userIgnored = undefined;
|
||||
if (!this._readyCount)
|
||||
this._readyCount = 0;
|
||||
this._readyCount += paths.length;
|
||||
Promise.all(paths.map(async (path) => {
|
||||
const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, undefined, 0, _origAdd);
|
||||
if (res)
|
||||
this._emitReady();
|
||||
return res;
|
||||
})).then((results) => {
|
||||
if (this.closed)
|
||||
return;
|
||||
results.forEach((item) => {
|
||||
if (item)
|
||||
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
|
||||
});
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
*/
|
||||
unwatch(paths_) {
|
||||
if (this.closed)
|
||||
return this;
|
||||
const paths = unifyPaths(paths_);
|
||||
const { cwd } = this.options;
|
||||
paths.forEach((path) => {
|
||||
// convert to absolute path unless relative path already matches
|
||||
if (!sysPath.isAbsolute(path) && !this._closers.has(path)) {
|
||||
if (cwd)
|
||||
path = sysPath.join(cwd, path);
|
||||
path = sysPath.resolve(path);
|
||||
}
|
||||
this._closePath(path);
|
||||
this._addIgnoredPath(path);
|
||||
if (this._watched.has(path)) {
|
||||
this._addIgnoredPath({
|
||||
path,
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
// reset the cached userIgnored anymatch fn
|
||||
// to make ignoredPaths changes effective
|
||||
this._userIgnored = undefined;
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
*/
|
||||
close() {
|
||||
if (this._closePromise) {
|
||||
return this._closePromise;
|
||||
}
|
||||
this.closed = true;
|
||||
// Memory management.
|
||||
this.removeAllListeners();
|
||||
const closers = [];
|
||||
this._closers.forEach((closerList) => closerList.forEach((closer) => {
|
||||
const promise = closer();
|
||||
if (promise instanceof Promise)
|
||||
closers.push(promise);
|
||||
}));
|
||||
this._streams.forEach((stream) => stream.destroy());
|
||||
this._userIgnored = undefined;
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
this._watched.forEach((dirent) => dirent.dispose());
|
||||
this._closers.clear();
|
||||
this._watched.clear();
|
||||
this._streams.clear();
|
||||
this._symlinkPaths.clear();
|
||||
this._throttled.clear();
|
||||
this._closePromise = closers.length
|
||||
? Promise.all(closers).then(() => undefined)
|
||||
: Promise.resolve();
|
||||
return this._closePromise;
|
||||
}
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns for chaining
|
||||
*/
|
||||
getWatched() {
|
||||
const watchList = {};
|
||||
this._watched.forEach((entry, dir) => {
|
||||
const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir;
|
||||
const index = key || ONE_DOT;
|
||||
watchList[index] = entry.getChildren().sort();
|
||||
});
|
||||
return watchList;
|
||||
}
|
||||
emitWithAll(event, args) {
|
||||
this.emit(event, ...args);
|
||||
if (event !== handler_js_1.EVENTS.ERROR)
|
||||
this.emit(handler_js_1.EVENTS.ALL, event, ...args);
|
||||
}
|
||||
// Common helpers
|
||||
// --------------
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param event Type of event
|
||||
* @param path File or directory path
|
||||
* @param stats arguments to be passed with event
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
async _emit(event, path, stats) {
|
||||
if (this.closed)
|
||||
return;
|
||||
const opts = this.options;
|
||||
if (handler_js_1.isWindows)
|
||||
path = sysPath.normalize(path);
|
||||
if (opts.cwd)
|
||||
path = sysPath.relative(opts.cwd, path);
|
||||
const args = [path];
|
||||
if (stats != null)
|
||||
args.push(stats);
|
||||
const awf = opts.awaitWriteFinish;
|
||||
let pw;
|
||||
if (awf && (pw = this._pendingWrites.get(path))) {
|
||||
pw.lastChange = new Date();
|
||||
return this;
|
||||
}
|
||||
if (opts.atomic) {
|
||||
if (event === handler_js_1.EVENTS.UNLINK) {
|
||||
this._pendingUnlinks.set(path, [event, ...args]);
|
||||
setTimeout(() => {
|
||||
this._pendingUnlinks.forEach((entry, path) => {
|
||||
this.emit(...entry);
|
||||
this.emit(handler_js_1.EVENTS.ALL, ...entry);
|
||||
this._pendingUnlinks.delete(path);
|
||||
});
|
||||
}, typeof opts.atomic === 'number' ? opts.atomic : 100);
|
||||
return this;
|
||||
}
|
||||
if (event === handler_js_1.EVENTS.ADD && this._pendingUnlinks.has(path)) {
|
||||
event = handler_js_1.EVENTS.CHANGE;
|
||||
this._pendingUnlinks.delete(path);
|
||||
}
|
||||
}
|
||||
if (awf && (event === handler_js_1.EVENTS.ADD || event === handler_js_1.EVENTS.CHANGE) && this._readyEmitted) {
|
||||
const awfEmit = (err, stats) => {
|
||||
if (err) {
|
||||
event = handler_js_1.EVENTS.ERROR;
|
||||
args[0] = err;
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
else if (stats) {
|
||||
// if stats doesn't exist the file must have been deleted
|
||||
if (args.length > 1) {
|
||||
args[1] = stats;
|
||||
}
|
||||
else {
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
};
|
||||
this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit);
|
||||
return this;
|
||||
}
|
||||
if (event === handler_js_1.EVENTS.CHANGE) {
|
||||
const isThrottled = !this._throttle(handler_js_1.EVENTS.CHANGE, path, 50);
|
||||
if (isThrottled)
|
||||
return this;
|
||||
}
|
||||
if (opts.alwaysStat &&
|
||||
stats === undefined &&
|
||||
(event === handler_js_1.EVENTS.ADD || event === handler_js_1.EVENTS.ADD_DIR || event === handler_js_1.EVENTS.CHANGE)) {
|
||||
const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path;
|
||||
let stats;
|
||||
try {
|
||||
stats = await (0, promises_1.stat)(fullPath);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
// Suppress event when fs_stat fails, to avoid sending undefined 'stat'
|
||||
if (!stats || this.closed)
|
||||
return;
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @returns The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error) {
|
||||
const code = error && error.code;
|
||||
if (error &&
|
||||
code !== 'ENOENT' &&
|
||||
code !== 'ENOTDIR' &&
|
||||
(!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES'))) {
|
||||
this.emit(handler_js_1.EVENTS.ERROR, error);
|
||||
}
|
||||
return error || this.closed;
|
||||
}
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param actionType type being throttled
|
||||
* @param path being acted upon
|
||||
* @param timeout duration of time to suppress duplicate actions
|
||||
* @returns tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType, path, timeout) {
|
||||
if (!this._throttled.has(actionType)) {
|
||||
this._throttled.set(actionType, new Map());
|
||||
}
|
||||
const action = this._throttled.get(actionType);
|
||||
if (!action)
|
||||
throw new Error('invalid throttle');
|
||||
const actionPath = action.get(path);
|
||||
if (actionPath) {
|
||||
actionPath.count++;
|
||||
return false;
|
||||
}
|
||||
// eslint-disable-next-line prefer-const
|
||||
let timeoutObject;
|
||||
const clear = () => {
|
||||
const item = action.get(path);
|
||||
const count = item ? item.count : 0;
|
||||
action.delete(path);
|
||||
clearTimeout(timeoutObject);
|
||||
if (item)
|
||||
clearTimeout(item.timeoutObject);
|
||||
return count;
|
||||
};
|
||||
timeoutObject = setTimeout(clear, timeout);
|
||||
const thr = { timeoutObject, clear, count: 0 };
|
||||
action.set(path, thr);
|
||||
return thr;
|
||||
}
|
||||
_incrReadyCount() {
|
||||
return this._readyCount++;
|
||||
}
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param path being acted upon
|
||||
* @param threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param event
|
||||
* @param awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path, threshold, event, awfEmit) {
|
||||
const awf = this.options.awaitWriteFinish;
|
||||
if (typeof awf !== 'object')
|
||||
return;
|
||||
const pollInterval = awf.pollInterval;
|
||||
let timeoutHandler;
|
||||
let fullPath = path;
|
||||
if (this.options.cwd && !sysPath.isAbsolute(path)) {
|
||||
fullPath = sysPath.join(this.options.cwd, path);
|
||||
}
|
||||
const now = new Date();
|
||||
const writes = this._pendingWrites;
|
||||
function awaitWriteFinishFn(prevStat) {
|
||||
(0, fs_1.stat)(fullPath, (err, curStat) => {
|
||||
if (err || !writes.has(path)) {
|
||||
if (err && err.code !== 'ENOENT')
|
||||
awfEmit(err);
|
||||
return;
|
||||
}
|
||||
const now = Number(new Date());
|
||||
if (prevStat && curStat.size !== prevStat.size) {
|
||||
writes.get(path).lastChange = now;
|
||||
}
|
||||
const pw = writes.get(path);
|
||||
const df = now - pw.lastChange;
|
||||
if (df >= threshold) {
|
||||
writes.delete(path);
|
||||
awfEmit(undefined, curStat);
|
||||
}
|
||||
else {
|
||||
timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval, curStat);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!writes.has(path)) {
|
||||
writes.set(path, {
|
||||
lastChange: now,
|
||||
cancelWait: () => {
|
||||
writes.delete(path);
|
||||
clearTimeout(timeoutHandler);
|
||||
return event;
|
||||
},
|
||||
});
|
||||
timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
*/
|
||||
_isIgnored(path, stats) {
|
||||
if (this.options.atomic && DOT_RE.test(path))
|
||||
return true;
|
||||
if (!this._userIgnored) {
|
||||
const { cwd } = this.options;
|
||||
const ign = this.options.ignored;
|
||||
const ignored = (ign || []).map(normalizeIgnored(cwd));
|
||||
const ignoredPaths = [...this._ignoredPaths];
|
||||
const list = [...ignoredPaths.map(normalizeIgnored(cwd)), ...ignored];
|
||||
this._userIgnored = anymatch(list, undefined);
|
||||
}
|
||||
return this._userIgnored(path, stats);
|
||||
}
|
||||
_isntIgnored(path, stat) {
|
||||
return !this._isIgnored(path, stat);
|
||||
}
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink handling.
|
||||
* @param path file or directory pattern being watched
|
||||
*/
|
||||
_getWatchHelpers(path) {
|
||||
return new WatchHelper(path, this.options.followSymlinks, this);
|
||||
}
|
||||
// Directory helpers
|
||||
// -----------------
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param directory path of the directory
|
||||
*/
|
||||
_getWatchedDir(directory) {
|
||||
const dir = sysPath.resolve(directory);
|
||||
if (!this._watched.has(dir))
|
||||
this._watched.set(dir, new DirEntry(dir, this._boundRemove));
|
||||
return this._watched.get(dir);
|
||||
}
|
||||
// File helpers
|
||||
// ------------
|
||||
/**
|
||||
* Check for read permissions: https://stackoverflow.com/a/11781404/1358405
|
||||
*/
|
||||
_hasReadPermissions(stats) {
|
||||
if (this.options.ignorePermissionErrors)
|
||||
return true;
|
||||
return Boolean(Number(stats.mode) & 0o400);
|
||||
}
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param directory within which the following item is located
|
||||
* @param item base path of item/directory
|
||||
*/
|
||||
_remove(directory, item, isDirectory) {
|
||||
// if what is being deleted is a directory, get that directory's paths
|
||||
// for recursive deleting and cleaning of watched object
|
||||
// if it is not a directory, nestedDirectoryChildren will be empty array
|
||||
const path = sysPath.join(directory, item);
|
||||
const fullPath = sysPath.resolve(path);
|
||||
isDirectory =
|
||||
isDirectory != null ? isDirectory : this._watched.has(path) || this._watched.has(fullPath);
|
||||
// prevent duplicate handling in case of arriving here nearly simultaneously
|
||||
// via multiple paths (such as _handleFile and _handleDir)
|
||||
if (!this._throttle('remove', path, 100))
|
||||
return;
|
||||
// if the only watched file is removed, watch for its return
|
||||
if (!isDirectory && this._watched.size === 1) {
|
||||
this.add(directory, item, true);
|
||||
}
|
||||
// This will create a new entry in the watched object in either case
|
||||
// so we got to do the directory check beforehand
|
||||
const wp = this._getWatchedDir(path);
|
||||
const nestedDirectoryChildren = wp.getChildren();
|
||||
// Recursively remove children directories / files.
|
||||
nestedDirectoryChildren.forEach((nested) => this._remove(path, nested));
|
||||
// Check if item was on the watched list and remove it
|
||||
const parent = this._getWatchedDir(directory);
|
||||
const wasTracked = parent.has(item);
|
||||
parent.remove(item);
|
||||
// Fixes issue #1042 -> Relative paths were detected and added as symlinks
|
||||
// (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612),
|
||||
// but never removed from the map in case the path was deleted.
|
||||
// This leads to an incorrect state if the path was recreated:
|
||||
// https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553
|
||||
if (this._symlinkPaths.has(fullPath)) {
|
||||
this._symlinkPaths.delete(fullPath);
|
||||
}
|
||||
// If we wait for this file to be fully written, cancel the wait.
|
||||
let relPath = path;
|
||||
if (this.options.cwd)
|
||||
relPath = sysPath.relative(this.options.cwd, path);
|
||||
if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
|
||||
const event = this._pendingWrites.get(relPath).cancelWait();
|
||||
if (event === handler_js_1.EVENTS.ADD)
|
||||
return;
|
||||
}
|
||||
// The Entry will either be a directory that just got removed
|
||||
// or a bogus entry to a file, in either case we have to remove it
|
||||
this._watched.delete(path);
|
||||
this._watched.delete(fullPath);
|
||||
const eventName = isDirectory ? handler_js_1.EVENTS.UNLINK_DIR : handler_js_1.EVENTS.UNLINK;
|
||||
if (wasTracked && !this._isIgnored(path))
|
||||
this._emit(eventName, path);
|
||||
// Avoid conflicts if we later create another file with the same name
|
||||
this._closePath(path);
|
||||
}
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
*/
|
||||
_closePath(path) {
|
||||
this._closeFile(path);
|
||||
const dir = sysPath.dirname(path);
|
||||
this._getWatchedDir(dir).remove(sysPath.basename(path));
|
||||
}
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
*/
|
||||
_closeFile(path) {
|
||||
const closers = this._closers.get(path);
|
||||
if (!closers)
|
||||
return;
|
||||
closers.forEach((closer) => closer());
|
||||
this._closers.delete(path);
|
||||
}
|
||||
_addPathCloser(path, closer) {
|
||||
if (!closer)
|
||||
return;
|
||||
let list = this._closers.get(path);
|
||||
if (!list) {
|
||||
list = [];
|
||||
this._closers.set(path, list);
|
||||
}
|
||||
list.push(closer);
|
||||
}
|
||||
_readdirp(root, opts) {
|
||||
if (this.closed)
|
||||
return;
|
||||
const options = { type: handler_js_1.EVENTS.ALL, alwaysStat: true, lstat: true, ...opts, depth: 0 };
|
||||
let stream = (0, readdirp_1.readdirp)(root, options);
|
||||
this._streams.add(stream);
|
||||
stream.once(handler_js_1.STR_CLOSE, () => {
|
||||
stream = undefined;
|
||||
});
|
||||
stream.once(handler_js_1.STR_END, () => {
|
||||
if (stream) {
|
||||
this._streams.delete(stream);
|
||||
stream = undefined;
|
||||
}
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
exports.FSWatcher = FSWatcher;
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param paths file / directory paths
|
||||
* @param options opts, such as `atomic`, `awaitWriteFinish`, `ignored`, and others
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
* @example
|
||||
* const watcher = watch('.').on('all', (event, path) => { console.log(event, path); });
|
||||
* watch('.', { atomic: true, awaitWriteFinish: true, ignored: (f, stats) => stats?.isFile() && !f.endsWith('.js') })
|
||||
*/
|
||||
function watch(paths, options = {}) {
|
||||
const watcher = new FSWatcher(options);
|
||||
watcher.add(paths);
|
||||
return watcher;
|
||||
}
|
||||
exports.default = { watch, FSWatcher };
|
||||
69
node_modules/sass/node_modules/chokidar/package.json
generated
vendored
Normal file
69
node_modules/sass/node_modules/chokidar/package.json
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"name": "chokidar",
|
||||
"description": "Minimal and efficient cross-platform file watching library",
|
||||
"version": "4.0.3",
|
||||
"homepage": "https://github.com/paulmillr/chokidar",
|
||||
"author": "Paul Miller (https://paulmillr.com)",
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"handler.js",
|
||||
"handler.d.ts",
|
||||
"esm"
|
||||
],
|
||||
"main": "./index.js",
|
||||
"module": "./esm/index.js",
|
||||
"types": "./index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./esm/index.js",
|
||||
"require": "./index.js"
|
||||
},
|
||||
"./handler.js": {
|
||||
"import": "./esm/handler.js",
|
||||
"require": "./handler.js"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"readdirp": "^4.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@paulmillr/jsbt": "0.2.1",
|
||||
"@types/node": "20.14.8",
|
||||
"chai": "4.3.4",
|
||||
"prettier": "3.1.1",
|
||||
"rimraf": "5.0.5",
|
||||
"sinon": "12.0.1",
|
||||
"sinon-chai": "3.7.0",
|
||||
"typescript": "5.5.2",
|
||||
"upath": "2.0.1"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"engines": {
|
||||
"node": ">= 14.16.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/paulmillr/chokidar.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/paulmillr/chokidar/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc && tsc -p tsconfig.esm.json",
|
||||
"lint": "prettier --check src",
|
||||
"format": "prettier --write src",
|
||||
"test": "node --test"
|
||||
},
|
||||
"keywords": [
|
||||
"fs",
|
||||
"watch",
|
||||
"watchFile",
|
||||
"watcher",
|
||||
"watching",
|
||||
"file",
|
||||
"fsevents"
|
||||
],
|
||||
"funding": "https://paulmillr.com/funding/"
|
||||
}
|
||||
21
node_modules/sass/node_modules/readdirp/LICENSE
generated
vendored
Normal file
21
node_modules/sass/node_modules/readdirp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
120
node_modules/sass/node_modules/readdirp/README.md
generated
vendored
Normal file
120
node_modules/sass/node_modules/readdirp/README.md
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
# readdirp [](https://github.com/paulmillr/readdirp)
|
||||
|
||||
Recursive version of fs.readdir. Exposes a **stream API** (with small RAM & CPU footprint) and a **promise API**.
|
||||
|
||||
```sh
|
||||
npm install readdirp
|
||||
jsr add jsr:@paulmillr/readdirp
|
||||
```
|
||||
|
||||
```javascript
|
||||
// Use streams to achieve small RAM & CPU footprint.
|
||||
// 1) Streams example with for-await.
|
||||
import readdirp from 'readdirp';
|
||||
for await (const entry of readdirp('.')) {
|
||||
const {path} = entry;
|
||||
console.log(`${JSON.stringify({path})}`);
|
||||
}
|
||||
|
||||
// 2) Streams example, non for-await.
|
||||
// Print out all JS files along with their size within the current folder & subfolders.
|
||||
import readdirp from 'readdirp';
|
||||
readdirp('.', {alwaysStat: true, fileFilter: (f) => f.basename.endsWith('.js')})
|
||||
.on('data', (entry) => {
|
||||
const {path, stats: {size}} = entry;
|
||||
console.log(`${JSON.stringify({path, size})}`);
|
||||
})
|
||||
// Optionally call stream.destroy() in `warn()` in order to abort and cause 'close' to be emitted
|
||||
.on('warn', error => console.error('non-fatal error', error))
|
||||
.on('error', error => console.error('fatal error', error))
|
||||
.on('end', () => console.log('done'));
|
||||
|
||||
// 3) Promise example. More RAM and CPU than streams / for-await.
|
||||
import { readdirpPromise } from 'readdirp';
|
||||
const files = await readdirpPromise('.');
|
||||
console.log(files.map(file => file.path));
|
||||
|
||||
// Other options.
|
||||
import readdirp from 'readdirp';
|
||||
readdirp('test', {
|
||||
fileFilter: (f) => f.basename.endsWith('.js'),
|
||||
directoryFilter: (d) => d.basename !== '.git',
|
||||
// directoryFilter: (di) => di.basename.length === 9
|
||||
type: 'files_directories',
|
||||
depth: 1
|
||||
});
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
`const stream = readdirp(root[, options])` — **Stream API**
|
||||
|
||||
- Reads given root recursively and returns a `stream` of [entry infos](#entryinfo)
|
||||
- Optionally can be used like `for await (const entry of stream)` with node.js 10+ (`asyncIterator`).
|
||||
- `on('data', (entry) => {})` [entry info](#entryinfo) for every file / dir.
|
||||
- `on('warn', (error) => {})` non-fatal `Error` that prevents a file / dir from being processed. Example: inaccessible to the user.
|
||||
- `on('error', (error) => {})` fatal `Error` which also ends the stream. Example: illegal options where passed.
|
||||
- `on('end')` — we are done. Called when all entries were found and no more will be emitted.
|
||||
- `on('close')` — stream is destroyed via `stream.destroy()`.
|
||||
Could be useful if you want to manually abort even on a non fatal error.
|
||||
At that point the stream is no longer `readable` and no more entries, warning or errors are emitted
|
||||
- To learn more about streams, consult the very detailed [nodejs streams documentation](https://nodejs.org/api/stream.html)
|
||||
or the [stream-handbook](https://github.com/substack/stream-handbook)
|
||||
|
||||
`const entries = await readdirp.promise(root[, options])` — **Promise API**. Returns a list of [entry infos](#entryinfo).
|
||||
|
||||
First argument is awalys `root`, path in which to start reading and recursing into subdirectories.
|
||||
|
||||
### options
|
||||
|
||||
- `fileFilter`: filter to include or exclude files
|
||||
- **Function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry
|
||||
- `directoryFilter`: filter to include/exclude directories found and to recurse into. Directories that do not pass a filter will not be recursed into.
|
||||
- `depth: 5`: depth at which to stop recursing even if more subdirectories are found
|
||||
- `type: 'files'`: determines if data events on the stream should be emitted for `'files'` (default), `'directories'`, `'files_directories'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes.
|
||||
- `alwaysStat: false`: always return `stats` property for every file. Default is `false`, readdirp will return `Dirent` entries. Setting it to `true` can double readdir execution time - use it only when you need file `size`, `mtime` etc. Cannot be enabled on node <10.10.0.
|
||||
- `lstat: false`: include symlink entries in the stream along with files. When `true`, `fs.lstat` would be used instead of `fs.stat`
|
||||
|
||||
### `EntryInfo`
|
||||
|
||||
Has the following properties:
|
||||
|
||||
- `path: 'assets/javascripts/react.js'`: path to the file/directory (relative to given root)
|
||||
- `fullPath: '/Users/dev/projects/app/assets/javascripts/react.js'`: full path to the file/directory found
|
||||
- `basename: 'react.js'`: name of the file/directory
|
||||
- `dirent: fs.Dirent`: built-in [dir entry object](https://nodejs.org/api/fs.html#fs_class_fs_dirent) - only with `alwaysStat: false`
|
||||
- `stats: fs.Stats`: built in [stat object](https://nodejs.org/api/fs.html#fs_class_fs_stats) - only with `alwaysStat: true`
|
||||
|
||||
## Changelog
|
||||
|
||||
- 4.0 (Aug 25, 2024) rewritten in typescript, producing hybrid common.js / esm module.
|
||||
- Remove glob support and all dependencies
|
||||
- Make sure you're using `let {readdirp} = require('readdirp')` in common.js
|
||||
- 3.5 (Oct 13, 2020) disallows recursive directory-based symlinks.
|
||||
Before, it could have entered infinite loop.
|
||||
- 3.4 (Mar 19, 2020) adds support for directory-based symlinks.
|
||||
- 3.3 (Dec 6, 2019) stabilizes RAM consumption and enables perf management with `highWaterMark` option. Fixes race conditions related to `for-await` looping.
|
||||
- 3.2 (Oct 14, 2019) improves performance by 250% and makes streams implementation more idiomatic.
|
||||
- 3.1 (Jul 7, 2019) brings `bigint` support to `stat` output on Windows. This is backwards-incompatible for some cases. Be careful. It you use it incorrectly, you'll see "TypeError: Cannot mix BigInt and other types, use explicit conversions".
|
||||
- 3.0 brings huge performance improvements and stream backpressure support.
|
||||
- Upgrading 2.x to 3.x:
|
||||
- Signature changed from `readdirp(options)` to `readdirp(root, options)`
|
||||
- Replaced callback API with promise API.
|
||||
- Renamed `entryType` option to `type`
|
||||
- Renamed `entryType: 'both'` to `'files_directories'`
|
||||
- `EntryInfo`
|
||||
- Renamed `stat` to `stats`
|
||||
- Emitted only when `alwaysStat: true`
|
||||
- `dirent` is emitted instead of `stats` by default with `alwaysStat: false`
|
||||
- Renamed `name` to `basename`
|
||||
- Removed `parentDir` and `fullParentDir` properties
|
||||
- Supported node.js versions:
|
||||
- 4.x: node 14+
|
||||
- 3.x: node 8+
|
||||
- 2.x: node 0.6+
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (<https://paulmillr.com>)
|
||||
|
||||
MIT License, see [LICENSE](LICENSE) file.
|
||||
108
node_modules/sass/node_modules/readdirp/esm/index.d.ts
generated
vendored
Normal file
108
node_modules/sass/node_modules/readdirp/esm/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Recursive version of readdir. Exposes a streaming API and promise API.
|
||||
* Streaming API allows to use a small amount of RAM.
|
||||
*
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import readdirp from 'readdirp';
|
||||
for await (const entry of readdirp('.')) {
|
||||
const {path} = entry;
|
||||
console.log(`${JSON.stringify({path})}`);
|
||||
}
|
||||
```
|
||||
*/
|
||||
/*! readdirp - MIT License (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) */
|
||||
import type { Stats, Dirent } from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
/** Path in file system. */
|
||||
export type Path = string;
|
||||
/** Emitted entry. Contains relative & absolute path, basename, and either stats or dirent. */
|
||||
export interface EntryInfo {
|
||||
path: string;
|
||||
fullPath: string;
|
||||
stats?: Stats;
|
||||
dirent?: Dirent;
|
||||
basename: string;
|
||||
}
|
||||
/** Path or dir entries (files) */
|
||||
export type PathOrDirent = Dirent | Path;
|
||||
/** Filterer for files */
|
||||
export type Tester = (entryInfo: EntryInfo) => boolean;
|
||||
export type Predicate = string[] | string | Tester;
|
||||
export declare const EntryTypes: {
|
||||
readonly FILE_TYPE: "files";
|
||||
readonly DIR_TYPE: "directories";
|
||||
readonly FILE_DIR_TYPE: "files_directories";
|
||||
readonly EVERYTHING_TYPE: "all";
|
||||
};
|
||||
export type EntryType = (typeof EntryTypes)[keyof typeof EntryTypes];
|
||||
/**
|
||||
* Options for readdirp.
|
||||
* * type: files, directories, or both
|
||||
* * lstat: whether to use symlink-friendly stat
|
||||
* * depth: max depth
|
||||
* * alwaysStat: whether to use stat (more resources) or dirent
|
||||
* * highWaterMark: streaming param, specifies max amount of resources per entry
|
||||
*/
|
||||
export type ReaddirpOptions = {
|
||||
root: string;
|
||||
fileFilter?: Predicate;
|
||||
directoryFilter?: Predicate;
|
||||
type?: EntryType;
|
||||
lstat?: boolean;
|
||||
depth?: number;
|
||||
alwaysStat?: boolean;
|
||||
highWaterMark?: number;
|
||||
};
|
||||
/** Directory entry. Contains path, depth count, and files. */
|
||||
export interface DirEntry {
|
||||
files: PathOrDirent[];
|
||||
depth: number;
|
||||
path: Path;
|
||||
}
|
||||
/** Readable readdir stream, emitting new files as they're being listed. */
|
||||
export declare class ReaddirpStream extends Readable {
|
||||
parents: any[];
|
||||
reading: boolean;
|
||||
parent?: DirEntry;
|
||||
_stat: Function;
|
||||
_maxDepth: number;
|
||||
_wantsDir: boolean;
|
||||
_wantsFile: boolean;
|
||||
_wantsEverything: boolean;
|
||||
_root: Path;
|
||||
_isDirent: boolean;
|
||||
_statsProp: 'dirent' | 'stats';
|
||||
_rdOptions: {
|
||||
encoding: 'utf8';
|
||||
withFileTypes: boolean;
|
||||
};
|
||||
_fileFilter: Tester;
|
||||
_directoryFilter: Tester;
|
||||
constructor(options?: Partial<ReaddirpOptions>);
|
||||
_read(batch: number): Promise<void>;
|
||||
_exploreDir(path: Path, depth: number): Promise<{
|
||||
files: string[] | undefined;
|
||||
depth: number;
|
||||
path: string;
|
||||
}>;
|
||||
_formatEntry(dirent: PathOrDirent, path: Path): Promise<EntryInfo | undefined>;
|
||||
_onError(err: Error): void;
|
||||
_getEntryType(entry: EntryInfo): Promise<void | '' | 'file' | 'directory'>;
|
||||
_includeAsFile(entry: EntryInfo): boolean | undefined;
|
||||
}
|
||||
/**
|
||||
* Streaming version: Reads all files and directories in given root recursively.
|
||||
* Consumes ~constant small amount of RAM.
|
||||
* @param root Root directory
|
||||
* @param options Options to specify root (start directory), filters and recursion depth
|
||||
*/
|
||||
export declare function readdirp(root: Path, options?: Partial<ReaddirpOptions>): ReaddirpStream;
|
||||
/**
|
||||
* Promise version: Reads all files and directories in given root recursively.
|
||||
* Compared to streaming version, will consume a lot of RAM e.g. when 1 million files are listed.
|
||||
* @returns array of paths and their entry infos
|
||||
*/
|
||||
export declare function readdirpPromise(root: Path, options?: Partial<ReaddirpOptions>): Promise<EntryInfo[]>;
|
||||
export default readdirp;
|
||||
257
node_modules/sass/node_modules/readdirp/esm/index.js
generated
vendored
Normal file
257
node_modules/sass/node_modules/readdirp/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,257 @@
|
||||
import { stat, lstat, readdir, realpath } from 'node:fs/promises';
|
||||
import { Readable } from 'node:stream';
|
||||
import { resolve as presolve, relative as prelative, join as pjoin, sep as psep } from 'node:path';
|
||||
export const EntryTypes = {
|
||||
FILE_TYPE: 'files',
|
||||
DIR_TYPE: 'directories',
|
||||
FILE_DIR_TYPE: 'files_directories',
|
||||
EVERYTHING_TYPE: 'all',
|
||||
};
|
||||
const defaultOptions = {
|
||||
root: '.',
|
||||
fileFilter: (_entryInfo) => true,
|
||||
directoryFilter: (_entryInfo) => true,
|
||||
type: EntryTypes.FILE_TYPE,
|
||||
lstat: false,
|
||||
depth: 2147483648,
|
||||
alwaysStat: false,
|
||||
highWaterMark: 4096,
|
||||
};
|
||||
Object.freeze(defaultOptions);
|
||||
const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR';
|
||||
const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]);
|
||||
const ALL_TYPES = [
|
||||
EntryTypes.DIR_TYPE,
|
||||
EntryTypes.EVERYTHING_TYPE,
|
||||
EntryTypes.FILE_DIR_TYPE,
|
||||
EntryTypes.FILE_TYPE,
|
||||
];
|
||||
const DIR_TYPES = new Set([
|
||||
EntryTypes.DIR_TYPE,
|
||||
EntryTypes.EVERYTHING_TYPE,
|
||||
EntryTypes.FILE_DIR_TYPE,
|
||||
]);
|
||||
const FILE_TYPES = new Set([
|
||||
EntryTypes.EVERYTHING_TYPE,
|
||||
EntryTypes.FILE_DIR_TYPE,
|
||||
EntryTypes.FILE_TYPE,
|
||||
]);
|
||||
const isNormalFlowError = (error) => NORMAL_FLOW_ERRORS.has(error.code);
|
||||
const wantBigintFsStats = process.platform === 'win32';
|
||||
const emptyFn = (_entryInfo) => true;
|
||||
const normalizeFilter = (filter) => {
|
||||
if (filter === undefined)
|
||||
return emptyFn;
|
||||
if (typeof filter === 'function')
|
||||
return filter;
|
||||
if (typeof filter === 'string') {
|
||||
const fl = filter.trim();
|
||||
return (entry) => entry.basename === fl;
|
||||
}
|
||||
if (Array.isArray(filter)) {
|
||||
const trItems = filter.map((item) => item.trim());
|
||||
return (entry) => trItems.some((f) => entry.basename === f);
|
||||
}
|
||||
return emptyFn;
|
||||
};
|
||||
/** Readable readdir stream, emitting new files as they're being listed. */
|
||||
export class ReaddirpStream extends Readable {
|
||||
constructor(options = {}) {
|
||||
super({
|
||||
objectMode: true,
|
||||
autoDestroy: true,
|
||||
highWaterMark: options.highWaterMark,
|
||||
});
|
||||
const opts = { ...defaultOptions, ...options };
|
||||
const { root, type } = opts;
|
||||
this._fileFilter = normalizeFilter(opts.fileFilter);
|
||||
this._directoryFilter = normalizeFilter(opts.directoryFilter);
|
||||
const statMethod = opts.lstat ? lstat : stat;
|
||||
// Use bigint stats if it's windows and stat() supports options (node 10+).
|
||||
if (wantBigintFsStats) {
|
||||
this._stat = (path) => statMethod(path, { bigint: true });
|
||||
}
|
||||
else {
|
||||
this._stat = statMethod;
|
||||
}
|
||||
this._maxDepth = opts.depth ?? defaultOptions.depth;
|
||||
this._wantsDir = type ? DIR_TYPES.has(type) : false;
|
||||
this._wantsFile = type ? FILE_TYPES.has(type) : false;
|
||||
this._wantsEverything = type === EntryTypes.EVERYTHING_TYPE;
|
||||
this._root = presolve(root);
|
||||
this._isDirent = !opts.alwaysStat;
|
||||
this._statsProp = this._isDirent ? 'dirent' : 'stats';
|
||||
this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent };
|
||||
// Launch stream with one parent, the root dir.
|
||||
this.parents = [this._exploreDir(root, 1)];
|
||||
this.reading = false;
|
||||
this.parent = undefined;
|
||||
}
|
||||
async _read(batch) {
|
||||
if (this.reading)
|
||||
return;
|
||||
this.reading = true;
|
||||
try {
|
||||
while (!this.destroyed && batch > 0) {
|
||||
const par = this.parent;
|
||||
const fil = par && par.files;
|
||||
if (fil && fil.length > 0) {
|
||||
const { path, depth } = par;
|
||||
const slice = fil.splice(0, batch).map((dirent) => this._formatEntry(dirent, path));
|
||||
const awaited = await Promise.all(slice);
|
||||
for (const entry of awaited) {
|
||||
if (!entry)
|
||||
continue;
|
||||
if (this.destroyed)
|
||||
return;
|
||||
const entryType = await this._getEntryType(entry);
|
||||
if (entryType === 'directory' && this._directoryFilter(entry)) {
|
||||
if (depth <= this._maxDepth) {
|
||||
this.parents.push(this._exploreDir(entry.fullPath, depth + 1));
|
||||
}
|
||||
if (this._wantsDir) {
|
||||
this.push(entry);
|
||||
batch--;
|
||||
}
|
||||
}
|
||||
else if ((entryType === 'file' || this._includeAsFile(entry)) &&
|
||||
this._fileFilter(entry)) {
|
||||
if (this._wantsFile) {
|
||||
this.push(entry);
|
||||
batch--;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
const parent = this.parents.pop();
|
||||
if (!parent) {
|
||||
this.push(null);
|
||||
break;
|
||||
}
|
||||
this.parent = await parent;
|
||||
if (this.destroyed)
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this.destroy(error);
|
||||
}
|
||||
finally {
|
||||
this.reading = false;
|
||||
}
|
||||
}
|
||||
async _exploreDir(path, depth) {
|
||||
let files;
|
||||
try {
|
||||
files = await readdir(path, this._rdOptions);
|
||||
}
|
||||
catch (error) {
|
||||
this._onError(error);
|
||||
}
|
||||
return { files, depth, path };
|
||||
}
|
||||
async _formatEntry(dirent, path) {
|
||||
let entry;
|
||||
const basename = this._isDirent ? dirent.name : dirent;
|
||||
try {
|
||||
const fullPath = presolve(pjoin(path, basename));
|
||||
entry = { path: prelative(this._root, fullPath), fullPath, basename };
|
||||
entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath);
|
||||
}
|
||||
catch (err) {
|
||||
this._onError(err);
|
||||
return;
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
_onError(err) {
|
||||
if (isNormalFlowError(err) && !this.destroyed) {
|
||||
this.emit('warn', err);
|
||||
}
|
||||
else {
|
||||
this.destroy(err);
|
||||
}
|
||||
}
|
||||
async _getEntryType(entry) {
|
||||
// entry may be undefined, because a warning or an error were emitted
|
||||
// and the statsProp is undefined
|
||||
if (!entry && this._statsProp in entry) {
|
||||
return '';
|
||||
}
|
||||
const stats = entry[this._statsProp];
|
||||
if (stats.isFile())
|
||||
return 'file';
|
||||
if (stats.isDirectory())
|
||||
return 'directory';
|
||||
if (stats && stats.isSymbolicLink()) {
|
||||
const full = entry.fullPath;
|
||||
try {
|
||||
const entryRealPath = await realpath(full);
|
||||
const entryRealPathStats = await lstat(entryRealPath);
|
||||
if (entryRealPathStats.isFile()) {
|
||||
return 'file';
|
||||
}
|
||||
if (entryRealPathStats.isDirectory()) {
|
||||
const len = entryRealPath.length;
|
||||
if (full.startsWith(entryRealPath) && full.substr(len, 1) === psep) {
|
||||
const recursiveError = new Error(`Circular symlink detected: "${full}" points to "${entryRealPath}"`);
|
||||
// @ts-ignore
|
||||
recursiveError.code = RECURSIVE_ERROR_CODE;
|
||||
return this._onError(recursiveError);
|
||||
}
|
||||
return 'directory';
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this._onError(error);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
_includeAsFile(entry) {
|
||||
const stats = entry && entry[this._statsProp];
|
||||
return stats && this._wantsEverything && !stats.isDirectory();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Streaming version: Reads all files and directories in given root recursively.
|
||||
* Consumes ~constant small amount of RAM.
|
||||
* @param root Root directory
|
||||
* @param options Options to specify root (start directory), filters and recursion depth
|
||||
*/
|
||||
export function readdirp(root, options = {}) {
|
||||
// @ts-ignore
|
||||
let type = options.entryType || options.type;
|
||||
if (type === 'both')
|
||||
type = EntryTypes.FILE_DIR_TYPE; // backwards-compatibility
|
||||
if (type)
|
||||
options.type = type;
|
||||
if (!root) {
|
||||
throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)');
|
||||
}
|
||||
else if (typeof root !== 'string') {
|
||||
throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)');
|
||||
}
|
||||
else if (type && !ALL_TYPES.includes(type)) {
|
||||
throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`);
|
||||
}
|
||||
options.root = root;
|
||||
return new ReaddirpStream(options);
|
||||
}
|
||||
/**
|
||||
* Promise version: Reads all files and directories in given root recursively.
|
||||
* Compared to streaming version, will consume a lot of RAM e.g. when 1 million files are listed.
|
||||
* @returns array of paths and their entry infos
|
||||
*/
|
||||
export function readdirpPromise(root, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const files = [];
|
||||
readdirp(root, options)
|
||||
.on('data', (entry) => files.push(entry))
|
||||
.on('end', () => resolve(files))
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
}
|
||||
export default readdirp;
|
||||
1
node_modules/sass/node_modules/readdirp/esm/package.json
generated
vendored
Normal file
1
node_modules/sass/node_modules/readdirp/esm/package.json
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{ "type": "module", "sideEffects": false }
|
||||
108
node_modules/sass/node_modules/readdirp/index.d.ts
generated
vendored
Normal file
108
node_modules/sass/node_modules/readdirp/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Recursive version of readdir. Exposes a streaming API and promise API.
|
||||
* Streaming API allows to use a small amount of RAM.
|
||||
*
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import readdirp from 'readdirp';
|
||||
for await (const entry of readdirp('.')) {
|
||||
const {path} = entry;
|
||||
console.log(`${JSON.stringify({path})}`);
|
||||
}
|
||||
```
|
||||
*/
|
||||
/*! readdirp - MIT License (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) */
|
||||
import type { Stats, Dirent } from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
/** Path in file system. */
|
||||
export type Path = string;
|
||||
/** Emitted entry. Contains relative & absolute path, basename, and either stats or dirent. */
|
||||
export interface EntryInfo {
|
||||
path: string;
|
||||
fullPath: string;
|
||||
stats?: Stats;
|
||||
dirent?: Dirent;
|
||||
basename: string;
|
||||
}
|
||||
/** Path or dir entries (files) */
|
||||
export type PathOrDirent = Dirent | Path;
|
||||
/** Filterer for files */
|
||||
export type Tester = (entryInfo: EntryInfo) => boolean;
|
||||
export type Predicate = string[] | string | Tester;
|
||||
export declare const EntryTypes: {
|
||||
readonly FILE_TYPE: "files";
|
||||
readonly DIR_TYPE: "directories";
|
||||
readonly FILE_DIR_TYPE: "files_directories";
|
||||
readonly EVERYTHING_TYPE: "all";
|
||||
};
|
||||
export type EntryType = (typeof EntryTypes)[keyof typeof EntryTypes];
|
||||
/**
|
||||
* Options for readdirp.
|
||||
* * type: files, directories, or both
|
||||
* * lstat: whether to use symlink-friendly stat
|
||||
* * depth: max depth
|
||||
* * alwaysStat: whether to use stat (more resources) or dirent
|
||||
* * highWaterMark: streaming param, specifies max amount of resources per entry
|
||||
*/
|
||||
export type ReaddirpOptions = {
|
||||
root: string;
|
||||
fileFilter?: Predicate;
|
||||
directoryFilter?: Predicate;
|
||||
type?: EntryType;
|
||||
lstat?: boolean;
|
||||
depth?: number;
|
||||
alwaysStat?: boolean;
|
||||
highWaterMark?: number;
|
||||
};
|
||||
/** Directory entry. Contains path, depth count, and files. */
|
||||
export interface DirEntry {
|
||||
files: PathOrDirent[];
|
||||
depth: number;
|
||||
path: Path;
|
||||
}
|
||||
/** Readable readdir stream, emitting new files as they're being listed. */
|
||||
export declare class ReaddirpStream extends Readable {
|
||||
parents: any[];
|
||||
reading: boolean;
|
||||
parent?: DirEntry;
|
||||
_stat: Function;
|
||||
_maxDepth: number;
|
||||
_wantsDir: boolean;
|
||||
_wantsFile: boolean;
|
||||
_wantsEverything: boolean;
|
||||
_root: Path;
|
||||
_isDirent: boolean;
|
||||
_statsProp: 'dirent' | 'stats';
|
||||
_rdOptions: {
|
||||
encoding: 'utf8';
|
||||
withFileTypes: boolean;
|
||||
};
|
||||
_fileFilter: Tester;
|
||||
_directoryFilter: Tester;
|
||||
constructor(options?: Partial<ReaddirpOptions>);
|
||||
_read(batch: number): Promise<void>;
|
||||
_exploreDir(path: Path, depth: number): Promise<{
|
||||
files: string[] | undefined;
|
||||
depth: number;
|
||||
path: string;
|
||||
}>;
|
||||
_formatEntry(dirent: PathOrDirent, path: Path): Promise<EntryInfo | undefined>;
|
||||
_onError(err: Error): void;
|
||||
_getEntryType(entry: EntryInfo): Promise<void | '' | 'file' | 'directory'>;
|
||||
_includeAsFile(entry: EntryInfo): boolean | undefined;
|
||||
}
|
||||
/**
|
||||
* Streaming version: Reads all files and directories in given root recursively.
|
||||
* Consumes ~constant small amount of RAM.
|
||||
* @param root Root directory
|
||||
* @param options Options to specify root (start directory), filters and recursion depth
|
||||
*/
|
||||
export declare function readdirp(root: Path, options?: Partial<ReaddirpOptions>): ReaddirpStream;
|
||||
/**
|
||||
* Promise version: Reads all files and directories in given root recursively.
|
||||
* Compared to streaming version, will consume a lot of RAM e.g. when 1 million files are listed.
|
||||
* @returns array of paths and their entry infos
|
||||
*/
|
||||
export declare function readdirpPromise(root: Path, options?: Partial<ReaddirpOptions>): Promise<EntryInfo[]>;
|
||||
export default readdirp;
|
||||
263
node_modules/sass/node_modules/readdirp/index.js
generated
vendored
Normal file
263
node_modules/sass/node_modules/readdirp/index.js
generated
vendored
Normal file
@@ -0,0 +1,263 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ReaddirpStream = exports.EntryTypes = void 0;
|
||||
exports.readdirp = readdirp;
|
||||
exports.readdirpPromise = readdirpPromise;
|
||||
const promises_1 = require("node:fs/promises");
|
||||
const node_stream_1 = require("node:stream");
|
||||
const node_path_1 = require("node:path");
|
||||
exports.EntryTypes = {
|
||||
FILE_TYPE: 'files',
|
||||
DIR_TYPE: 'directories',
|
||||
FILE_DIR_TYPE: 'files_directories',
|
||||
EVERYTHING_TYPE: 'all',
|
||||
};
|
||||
const defaultOptions = {
|
||||
root: '.',
|
||||
fileFilter: (_entryInfo) => true,
|
||||
directoryFilter: (_entryInfo) => true,
|
||||
type: exports.EntryTypes.FILE_TYPE,
|
||||
lstat: false,
|
||||
depth: 2147483648,
|
||||
alwaysStat: false,
|
||||
highWaterMark: 4096,
|
||||
};
|
||||
Object.freeze(defaultOptions);
|
||||
const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR';
|
||||
const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]);
|
||||
const ALL_TYPES = [
|
||||
exports.EntryTypes.DIR_TYPE,
|
||||
exports.EntryTypes.EVERYTHING_TYPE,
|
||||
exports.EntryTypes.FILE_DIR_TYPE,
|
||||
exports.EntryTypes.FILE_TYPE,
|
||||
];
|
||||
const DIR_TYPES = new Set([
|
||||
exports.EntryTypes.DIR_TYPE,
|
||||
exports.EntryTypes.EVERYTHING_TYPE,
|
||||
exports.EntryTypes.FILE_DIR_TYPE,
|
||||
]);
|
||||
const FILE_TYPES = new Set([
|
||||
exports.EntryTypes.EVERYTHING_TYPE,
|
||||
exports.EntryTypes.FILE_DIR_TYPE,
|
||||
exports.EntryTypes.FILE_TYPE,
|
||||
]);
|
||||
const isNormalFlowError = (error) => NORMAL_FLOW_ERRORS.has(error.code);
|
||||
const wantBigintFsStats = process.platform === 'win32';
|
||||
const emptyFn = (_entryInfo) => true;
|
||||
const normalizeFilter = (filter) => {
|
||||
if (filter === undefined)
|
||||
return emptyFn;
|
||||
if (typeof filter === 'function')
|
||||
return filter;
|
||||
if (typeof filter === 'string') {
|
||||
const fl = filter.trim();
|
||||
return (entry) => entry.basename === fl;
|
||||
}
|
||||
if (Array.isArray(filter)) {
|
||||
const trItems = filter.map((item) => item.trim());
|
||||
return (entry) => trItems.some((f) => entry.basename === f);
|
||||
}
|
||||
return emptyFn;
|
||||
};
|
||||
/** Readable readdir stream, emitting new files as they're being listed. */
|
||||
class ReaddirpStream extends node_stream_1.Readable {
|
||||
constructor(options = {}) {
|
||||
super({
|
||||
objectMode: true,
|
||||
autoDestroy: true,
|
||||
highWaterMark: options.highWaterMark,
|
||||
});
|
||||
const opts = { ...defaultOptions, ...options };
|
||||
const { root, type } = opts;
|
||||
this._fileFilter = normalizeFilter(opts.fileFilter);
|
||||
this._directoryFilter = normalizeFilter(opts.directoryFilter);
|
||||
const statMethod = opts.lstat ? promises_1.lstat : promises_1.stat;
|
||||
// Use bigint stats if it's windows and stat() supports options (node 10+).
|
||||
if (wantBigintFsStats) {
|
||||
this._stat = (path) => statMethod(path, { bigint: true });
|
||||
}
|
||||
else {
|
||||
this._stat = statMethod;
|
||||
}
|
||||
this._maxDepth = opts.depth ?? defaultOptions.depth;
|
||||
this._wantsDir = type ? DIR_TYPES.has(type) : false;
|
||||
this._wantsFile = type ? FILE_TYPES.has(type) : false;
|
||||
this._wantsEverything = type === exports.EntryTypes.EVERYTHING_TYPE;
|
||||
this._root = (0, node_path_1.resolve)(root);
|
||||
this._isDirent = !opts.alwaysStat;
|
||||
this._statsProp = this._isDirent ? 'dirent' : 'stats';
|
||||
this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent };
|
||||
// Launch stream with one parent, the root dir.
|
||||
this.parents = [this._exploreDir(root, 1)];
|
||||
this.reading = false;
|
||||
this.parent = undefined;
|
||||
}
|
||||
async _read(batch) {
|
||||
if (this.reading)
|
||||
return;
|
||||
this.reading = true;
|
||||
try {
|
||||
while (!this.destroyed && batch > 0) {
|
||||
const par = this.parent;
|
||||
const fil = par && par.files;
|
||||
if (fil && fil.length > 0) {
|
||||
const { path, depth } = par;
|
||||
const slice = fil.splice(0, batch).map((dirent) => this._formatEntry(dirent, path));
|
||||
const awaited = await Promise.all(slice);
|
||||
for (const entry of awaited) {
|
||||
if (!entry)
|
||||
continue;
|
||||
if (this.destroyed)
|
||||
return;
|
||||
const entryType = await this._getEntryType(entry);
|
||||
if (entryType === 'directory' && this._directoryFilter(entry)) {
|
||||
if (depth <= this._maxDepth) {
|
||||
this.parents.push(this._exploreDir(entry.fullPath, depth + 1));
|
||||
}
|
||||
if (this._wantsDir) {
|
||||
this.push(entry);
|
||||
batch--;
|
||||
}
|
||||
}
|
||||
else if ((entryType === 'file' || this._includeAsFile(entry)) &&
|
||||
this._fileFilter(entry)) {
|
||||
if (this._wantsFile) {
|
||||
this.push(entry);
|
||||
batch--;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
const parent = this.parents.pop();
|
||||
if (!parent) {
|
||||
this.push(null);
|
||||
break;
|
||||
}
|
||||
this.parent = await parent;
|
||||
if (this.destroyed)
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this.destroy(error);
|
||||
}
|
||||
finally {
|
||||
this.reading = false;
|
||||
}
|
||||
}
|
||||
async _exploreDir(path, depth) {
|
||||
let files;
|
||||
try {
|
||||
files = await (0, promises_1.readdir)(path, this._rdOptions);
|
||||
}
|
||||
catch (error) {
|
||||
this._onError(error);
|
||||
}
|
||||
return { files, depth, path };
|
||||
}
|
||||
async _formatEntry(dirent, path) {
|
||||
let entry;
|
||||
const basename = this._isDirent ? dirent.name : dirent;
|
||||
try {
|
||||
const fullPath = (0, node_path_1.resolve)((0, node_path_1.join)(path, basename));
|
||||
entry = { path: (0, node_path_1.relative)(this._root, fullPath), fullPath, basename };
|
||||
entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath);
|
||||
}
|
||||
catch (err) {
|
||||
this._onError(err);
|
||||
return;
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
_onError(err) {
|
||||
if (isNormalFlowError(err) && !this.destroyed) {
|
||||
this.emit('warn', err);
|
||||
}
|
||||
else {
|
||||
this.destroy(err);
|
||||
}
|
||||
}
|
||||
async _getEntryType(entry) {
|
||||
// entry may be undefined, because a warning or an error were emitted
|
||||
// and the statsProp is undefined
|
||||
if (!entry && this._statsProp in entry) {
|
||||
return '';
|
||||
}
|
||||
const stats = entry[this._statsProp];
|
||||
if (stats.isFile())
|
||||
return 'file';
|
||||
if (stats.isDirectory())
|
||||
return 'directory';
|
||||
if (stats && stats.isSymbolicLink()) {
|
||||
const full = entry.fullPath;
|
||||
try {
|
||||
const entryRealPath = await (0, promises_1.realpath)(full);
|
||||
const entryRealPathStats = await (0, promises_1.lstat)(entryRealPath);
|
||||
if (entryRealPathStats.isFile()) {
|
||||
return 'file';
|
||||
}
|
||||
if (entryRealPathStats.isDirectory()) {
|
||||
const len = entryRealPath.length;
|
||||
if (full.startsWith(entryRealPath) && full.substr(len, 1) === node_path_1.sep) {
|
||||
const recursiveError = new Error(`Circular symlink detected: "${full}" points to "${entryRealPath}"`);
|
||||
// @ts-ignore
|
||||
recursiveError.code = RECURSIVE_ERROR_CODE;
|
||||
return this._onError(recursiveError);
|
||||
}
|
||||
return 'directory';
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this._onError(error);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
_includeAsFile(entry) {
|
||||
const stats = entry && entry[this._statsProp];
|
||||
return stats && this._wantsEverything && !stats.isDirectory();
|
||||
}
|
||||
}
|
||||
exports.ReaddirpStream = ReaddirpStream;
|
||||
/**
|
||||
* Streaming version: Reads all files and directories in given root recursively.
|
||||
* Consumes ~constant small amount of RAM.
|
||||
* @param root Root directory
|
||||
* @param options Options to specify root (start directory), filters and recursion depth
|
||||
*/
|
||||
function readdirp(root, options = {}) {
|
||||
// @ts-ignore
|
||||
let type = options.entryType || options.type;
|
||||
if (type === 'both')
|
||||
type = exports.EntryTypes.FILE_DIR_TYPE; // backwards-compatibility
|
||||
if (type)
|
||||
options.type = type;
|
||||
if (!root) {
|
||||
throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)');
|
||||
}
|
||||
else if (typeof root !== 'string') {
|
||||
throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)');
|
||||
}
|
||||
else if (type && !ALL_TYPES.includes(type)) {
|
||||
throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`);
|
||||
}
|
||||
options.root = root;
|
||||
return new ReaddirpStream(options);
|
||||
}
|
||||
/**
|
||||
* Promise version: Reads all files and directories in given root recursively.
|
||||
* Compared to streaming version, will consume a lot of RAM e.g. when 1 million files are listed.
|
||||
* @returns array of paths and their entry infos
|
||||
*/
|
||||
function readdirpPromise(root, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const files = [];
|
||||
readdirp(root, options)
|
||||
.on('data', (entry) => files.push(entry))
|
||||
.on('end', () => resolve(files))
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
}
|
||||
exports.default = readdirp;
|
||||
70
node_modules/sass/node_modules/readdirp/package.json
generated
vendored
Normal file
70
node_modules/sass/node_modules/readdirp/package.json
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
{
|
||||
"name": "readdirp",
|
||||
"description": "Recursive version of fs.readdir with small RAM & CPU footprint.",
|
||||
"version": "4.1.2",
|
||||
"homepage": "https://github.com/paulmillr/readdirp",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/paulmillr/readdirp.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/paulmillr/readdirp/issues"
|
||||
},
|
||||
"author": "Thorsten Lorenz <thlorenz@gmx.de> (thlorenz.com)",
|
||||
"contributors": [
|
||||
"Thorsten Lorenz <thlorenz@gmx.de> (thlorenz.com)",
|
||||
"Paul Miller (https://paulmillr.com)"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 14.18.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"index.d.ts.map",
|
||||
"index.js.map",
|
||||
"esm"
|
||||
],
|
||||
"main": "./index.js",
|
||||
"module": "./esm/index.js",
|
||||
"types": "./index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./esm/index.js",
|
||||
"require": "./index.js"
|
||||
}
|
||||
},
|
||||
"sideEffects": false,
|
||||
"keywords": [
|
||||
"recursive",
|
||||
"fs",
|
||||
"stream",
|
||||
"streams",
|
||||
"readdir",
|
||||
"filesystem",
|
||||
"find",
|
||||
"filter"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && tsc -p tsconfig.cjs.json",
|
||||
"lint": "prettier --check index.ts test/index.test.js",
|
||||
"format": "prettier --write index.ts test/index.test.js",
|
||||
"test": "node test/index.test.js",
|
||||
"test:coverage": "c8 node test/index.test.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@paulmillr/jsbt": "0.3.1",
|
||||
"@types/node": "20.14.8",
|
||||
"c8": "10.1.3",
|
||||
"chai": "4.3.4",
|
||||
"chai-subset": "1.6.0",
|
||||
"micro-should": "0.5.0",
|
||||
"prettier": "3.1.1",
|
||||
"typescript": "5.5.2"
|
||||
},
|
||||
"funding": {
|
||||
"type": "individual",
|
||||
"url": "https://paulmillr.com/funding/"
|
||||
}
|
||||
}
|
||||
2
node_modules/sass/package.json
generated
vendored
2
node_modules/sass/package.json
generated
vendored
@@ -1 +1 @@
|
||||
{"name":"sass","description":"A pure JavaScript implementation of Sass.","license":"MIT","bugs":"https://github.com/sass/dart-sass/issues","homepage":"https://github.com/sass/dart-sass","repository":{"type":"git","url":"https://github.com/sass/dart-sass"},"author":{"name":"Natalie Weizenbaum","email":"nweiz@google.com","url":"https://github.com/nex3"},"engines":{"node":">=14.0.0"},"dependencies":{"chokidar":">=3.0.0 <4.0.0","immutable":"^4.0.0","source-map-js":">=0.6.2 <2.0.0"},"keywords":["style","scss","sass","preprocessor","css"],"types":"types/index.d.ts","exports":{"types":"./types/index.d.ts","node":{"require":"./sass.node.js","default":"./sass.node.mjs"},"default":{"require":"./sass.default.cjs","default":"./sass.default.js"}},"version":"1.64.2","bin":{"sass":"sass.js"},"main":"sass.node.js"}
|
||||
{"name":"sass","description":"A pure JavaScript implementation of Sass.","license":"MIT","bugs":"https://github.com/sass/dart-sass/issues","homepage":"https://github.com/sass/dart-sass","repository":{"type":"git","url":"https://github.com/sass/dart-sass"},"author":{"name":"Natalie Weizenbaum","email":"nweiz@google.com","url":"https://github.com/nex3"},"engines":{"node":">=14.0.0"},"dependencies":{"chokidar":"^4.0.0","immutable":"^5.0.2","source-map-js":">=0.6.2 <2.0.0"},"optionalDependencies":{"@parcel/watcher":"^2.4.1"},"keywords":["style","scss","sass","preprocessor","css"],"types":"types/index.d.ts","exports":{"types":"./types/index.d.ts","node":{"require":"./sass.node.js","default":"./sass.node.mjs"},"default":{"require":"./sass.default.cjs","default":"./sass.default.js"}},"version":"1.93.2","bin":{"sass":"sass.js"},"main":"sass.node.js"}
|
||||
80762
node_modules/sass/sass.dart.js
generated
vendored
80762
node_modules/sass/sass.dart.js
generated
vendored
File diff suppressed because one or more lines are too long
9
node_modules/sass/sass.default.js
generated
vendored
9
node_modules/sass/sass.default.js
generated
vendored
@@ -10,6 +10,10 @@ export const compile = _cliPkgExports.compile;
|
||||
export const compileAsync = _cliPkgExports.compileAsync;
|
||||
export const compileString = _cliPkgExports.compileString;
|
||||
export const compileStringAsync = _cliPkgExports.compileStringAsync;
|
||||
export const initCompiler = _cliPkgExports.initCompiler;
|
||||
export const initAsyncCompiler = _cliPkgExports.initAsyncCompiler;
|
||||
export const Compiler = _cliPkgExports.Compiler;
|
||||
export const AsyncCompiler = _cliPkgExports.AsyncCompiler;
|
||||
export const Logger = _cliPkgExports.Logger;
|
||||
export const SassArgumentList = _cliPkgExports.SassArgumentList;
|
||||
export const SassBoolean = _cliPkgExports.SassBoolean;
|
||||
@@ -20,6 +24,7 @@ export const SassColor = _cliPkgExports.SassColor;
|
||||
export const SassFunction = _cliPkgExports.SassFunction;
|
||||
export const SassList = _cliPkgExports.SassList;
|
||||
export const SassMap = _cliPkgExports.SassMap;
|
||||
export const SassMixin = _cliPkgExports.SassMixin;
|
||||
export const SassNumber = _cliPkgExports.SassNumber;
|
||||
export const SassString = _cliPkgExports.SassString;
|
||||
export const Value = _cliPkgExports.Value;
|
||||
@@ -37,3 +42,7 @@ export const TRUE = _cliPkgExports.TRUE;
|
||||
export const FALSE = _cliPkgExports.FALSE;
|
||||
export const NULL = _cliPkgExports.NULL;
|
||||
export const types = _cliPkgExports.types;
|
||||
export const NodePackageImporter = _cliPkgExports.NodePackageImporter;
|
||||
export const deprecations = _cliPkgExports.deprecations;
|
||||
export const Version = _cliPkgExports.Version;
|
||||
export const parser_ = _cliPkgExports.parser_;
|
||||
|
||||
2
node_modules/sass/sass.js
generated
vendored
2
node_modules/sass/sass.js
generated
vendored
@@ -7,8 +7,10 @@ if (globalThis._cliPkgExports.length === 0) delete globalThis._cliPkgExports;
|
||||
library.load({
|
||||
readline: require("readline"),
|
||||
chokidar: require("chokidar"),
|
||||
parcel_watcher: (function(i){let r;return function parcel_watcher(){if(void 0!==r)return r;try{r=require(i)}catch(e){if('MODULE_NOT_FOUND'!==e.code)console.error(e);r=null}return r}})("@parcel/watcher"),
|
||||
util: require("util"),
|
||||
stream: require("stream"),
|
||||
nodeModule: require("module"),
|
||||
fs: require("fs"),
|
||||
immutable: require("immutable"),
|
||||
});
|
||||
|
||||
1
node_modules/sass/sass.node.js
generated
vendored
1
node_modules/sass/sass.node.js
generated
vendored
@@ -4,6 +4,7 @@ if (globalThis._cliPkgExports.length === 0) delete globalThis._cliPkgExports;
|
||||
library.load({
|
||||
util: require("util"),
|
||||
stream: require("stream"),
|
||||
nodeModule: require("module"),
|
||||
fs: require("fs"),
|
||||
immutable: require("immutable"),
|
||||
});
|
||||
|
||||
45
node_modules/sass/sass.node.mjs
generated
vendored
45
node_modules/sass/sass.node.mjs
generated
vendored
@@ -4,6 +4,10 @@ export const compile = cjs.compile;
|
||||
export const compileAsync = cjs.compileAsync;
|
||||
export const compileString = cjs.compileString;
|
||||
export const compileStringAsync = cjs.compileStringAsync;
|
||||
export const initCompiler = cjs.initCompiler;
|
||||
export const initAsyncCompiler = cjs.initAsyncCompiler;
|
||||
export const Compiler = cjs.Compiler;
|
||||
export const AsyncCompiler = cjs.AsyncCompiler;
|
||||
export const Logger = cjs.Logger;
|
||||
export const SassArgumentList = cjs.SassArgumentList;
|
||||
export const SassBoolean = cjs.SassBoolean;
|
||||
@@ -14,6 +18,7 @@ export const SassColor = cjs.SassColor;
|
||||
export const SassFunction = cjs.SassFunction;
|
||||
export const SassList = cjs.SassList;
|
||||
export const SassMap = cjs.SassMap;
|
||||
export const SassMixin = cjs.SassMixin;
|
||||
export const SassNumber = cjs.SassNumber;
|
||||
export const SassString = cjs.SassString;
|
||||
export const Value = cjs.Value;
|
||||
@@ -31,6 +36,10 @@ export const TRUE = cjs.TRUE;
|
||||
export const FALSE = cjs.FALSE;
|
||||
export const NULL = cjs.NULL;
|
||||
export const types = cjs.types;
|
||||
export const NodePackageImporter = cjs.NodePackageImporter;
|
||||
export const deprecations = cjs.deprecations;
|
||||
export const Version = cjs.Version;
|
||||
export const parser_ = cjs.parser_;
|
||||
|
||||
let printedDefaultExportDeprecation = false;
|
||||
function defaultExportDeprecation() {
|
||||
@@ -58,6 +67,22 @@ export default {
|
||||
defaultExportDeprecation();
|
||||
return cjs.compileStringAsync;
|
||||
},
|
||||
get initCompiler() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.initCompiler;
|
||||
},
|
||||
get initAsyncCompiler() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.initAsyncCompiler;
|
||||
},
|
||||
get Compiler() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.Compiler;
|
||||
},
|
||||
get AsyncCompiler() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.AsyncCompiler;
|
||||
},
|
||||
get Logger() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.Logger;
|
||||
@@ -98,6 +123,10 @@ export default {
|
||||
defaultExportDeprecation();
|
||||
return cjs.SassMap;
|
||||
},
|
||||
get SassMixin() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.SassMixin;
|
||||
},
|
||||
get SassNumber() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.SassNumber;
|
||||
@@ -166,4 +195,20 @@ export default {
|
||||
defaultExportDeprecation();
|
||||
return cjs.types;
|
||||
},
|
||||
get NodePackageImporter() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.NodePackageImporter;
|
||||
},
|
||||
get deprecations() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.deprecations;
|
||||
},
|
||||
get Version() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.Version;
|
||||
},
|
||||
get parser_() {
|
||||
defaultExportDeprecation();
|
||||
return cjs.parser_;
|
||||
},
|
||||
};
|
||||
|
||||
201
node_modules/sass/types/compile.d.ts
generated
vendored
201
node_modules/sass/types/compile.d.ts
generated
vendored
@@ -37,6 +37,104 @@ export interface CompileResult {
|
||||
sourceMap?: RawSourceMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of creating a synchronous compiler. Returned by
|
||||
* {@link initCompiler}.
|
||||
*
|
||||
* @category Compile
|
||||
*/
|
||||
export class Compiler {
|
||||
/**
|
||||
* Throws an error if constructed directly, instead of via
|
||||
* {@link initCompiler}.
|
||||
*/
|
||||
private constructor();
|
||||
|
||||
/**
|
||||
* The {@link compile} method exposed through a Compiler instance while it is
|
||||
* active. If this is called after {@link dispose} on the Compiler
|
||||
* instance, an error will be thrown.
|
||||
*
|
||||
* During the Compiler instance's lifespan, given the same input, this will
|
||||
* return an identical result to the {@link compile} method exposed at the
|
||||
* module root.
|
||||
*/
|
||||
compile(path: string, options?: Options<'sync'>): CompileResult;
|
||||
|
||||
/**
|
||||
* The {@link compileString} method exposed through a Compiler instance while
|
||||
* it is active. If this is called after {@link dispose} on the Compiler
|
||||
* instance, an error will be thrown.
|
||||
*
|
||||
* During the Compiler instance's lifespan, given the same input, this will
|
||||
* return an identical result to the {@link compileString} method exposed at
|
||||
* the module root.
|
||||
*/
|
||||
compileString(source: string, options?: StringOptions<'sync'>): CompileResult;
|
||||
|
||||
/**
|
||||
* Ends the lifespan of this Compiler instance. After this is invoked, all
|
||||
* calls to the Compiler instance's {@link compile} or {@link compileString}
|
||||
* methods will result in an error.
|
||||
*/
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of creating an asynchronous compiler. Returned by
|
||||
* {@link initAsyncCompiler}.
|
||||
*
|
||||
* @category Compile
|
||||
*/
|
||||
export class AsyncCompiler {
|
||||
/**
|
||||
* Throws an error if constructed directly, instead of via
|
||||
* {@link initAsyncCompiler}.
|
||||
*/
|
||||
private constructor();
|
||||
|
||||
/**
|
||||
* The {@link compileAsync} method exposed through an Async Compiler instance
|
||||
* while it is active. If this is called after {@link dispose} on the Async
|
||||
* Compiler instance, an error will be thrown.
|
||||
*
|
||||
* During the Async Compiler instance's lifespan, given the same input, this
|
||||
* will return an identical result to the {@link compileAsync} method exposed
|
||||
* at the module root.
|
||||
*/
|
||||
compileAsync(
|
||||
path: string,
|
||||
options?: Options<'async'>
|
||||
): Promise<CompileResult>;
|
||||
|
||||
/**
|
||||
* The {@link compileStringAsync} method exposed through an Async Compiler
|
||||
* instance while it is active. If this is called after {@link dispose} on the
|
||||
* Async Compiler instance, an error will be thrown.
|
||||
*
|
||||
* During the Async Compiler instance's lifespan, given the same input, this
|
||||
* will return an identical result to the {@link compileStringAsync} method
|
||||
* exposed at the module root.
|
||||
*/
|
||||
compileStringAsync(
|
||||
source: string,
|
||||
options?: StringOptions<'async'>
|
||||
): Promise<CompileResult>;
|
||||
|
||||
/**
|
||||
* Ends the lifespan of this Async Compiler instance. After this is invoked,
|
||||
* all subsequent calls to the Compiler instance's `compileAsync` or
|
||||
* `compileStringAsync` methods will result in an error.
|
||||
*
|
||||
* Any compilations that are submitted before `dispose` will not be cancelled,
|
||||
* and will be allowed to settle.
|
||||
*
|
||||
* After all compilations have been settled and Sass completes any internal
|
||||
* task cleanup, `dispose` will resolve its promise.
|
||||
*/
|
||||
dispose(): Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronously compiles the Sass file at `path` to CSS. If it succeeds it
|
||||
* returns a {@link CompileResult}, and if it fails it throws an {@link
|
||||
@@ -44,6 +142,17 @@ export interface CompileResult {
|
||||
*
|
||||
* This only allows synchronous {@link Importer}s and {@link CustomFunction}s.
|
||||
*
|
||||
* **Heads up!** When using the [sass-embedded] npm package for single
|
||||
* compilations, **{@link compileAsync} is almost always faster than
|
||||
* {@link compile}**, due to the overhead of emulating synchronous messaging
|
||||
* with worker threads and concurrent compilations being blocked on main thread.
|
||||
*
|
||||
* If you are running multiple compilations with the [sass-embedded] npm
|
||||
* package, using a {@link Compiler} will provide some speed improvements over
|
||||
* the module-level methods, and an {@link AsyncCompiler} will be much faster.
|
||||
*
|
||||
* [sass-embedded]: https://www.npmjs.com/package/sass-embedded
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```js
|
||||
@@ -66,9 +175,9 @@ export function compile(path: string, options?: Options<'sync'>): CompileResult;
|
||||
* This only allows synchronous or asynchronous {@link Importer}s and
|
||||
* {@link CustomFunction}s.
|
||||
*
|
||||
* **Heads up!** When using Dart Sass, **{@link compile} is almost twice as fast
|
||||
* as {@link compileAsync}**, due to the overhead of making the entire
|
||||
* evaluation process asynchronous.
|
||||
* **Heads up!** When using the `sass` npm package, **{@link compile} is almost
|
||||
* twice as fast as {@link compileAsync}**, due to the overhead of making the
|
||||
* entire evaluation process asynchronous.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
@@ -94,6 +203,18 @@ export function compileAsync(
|
||||
*
|
||||
* This only allows synchronous {@link Importer}s and {@link CustomFunction}s.
|
||||
*
|
||||
* **Heads up!** When using the [sass-embedded] npm package for single
|
||||
* compilations, **{@link compileStringAsync} is almost always faster than
|
||||
* {@link compileString}**, due to the overhead of emulating synchronous
|
||||
* messaging with worker threads and concurrent compilations being blocked on
|
||||
* main thread.
|
||||
*
|
||||
* If you are running multiple compilations with the [sass-embedded] npm
|
||||
* package, using a {@link Compiler} will provide some speed improvements over
|
||||
* the module-level methods, and an {@link AsyncCompiler} will be much faster.
|
||||
*
|
||||
* [sass-embedded]: https://www.npmjs.com/package/sass-embedded
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```js
|
||||
@@ -125,9 +246,9 @@ export function compileString(
|
||||
* This only allows synchronous or asynchronous {@link Importer}s and {@link
|
||||
* CustomFunction}s.
|
||||
*
|
||||
* **Heads up!** When using Dart Sass, **{@link compile} is almost twice as fast
|
||||
* as {@link compileAsync}**, due to the overhead of making the entire
|
||||
* evaluation process asynchronous.
|
||||
* **Heads up!** When using the `sass` npm package, **{@link compileString} is
|
||||
* almost twice as fast as {@link compileStringAsync}**, due to the overhead
|
||||
* of making the entire evaluation process asynchronous.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
@@ -151,3 +272,71 @@ export function compileStringAsync(
|
||||
source: string,
|
||||
options?: StringOptions<'async'>
|
||||
): Promise<CompileResult>;
|
||||
|
||||
/**
|
||||
* Creates a synchronous {@link Compiler}. Each compiler instance exposes the
|
||||
* {@link compile} and {@link compileString} methods within the lifespan of the
|
||||
* Compiler. Given identical input, these methods will return results identical
|
||||
* to their counterparts exposed at the module root. To use asynchronous
|
||||
* compilation, use {@link initAsyncCompiler}.
|
||||
*
|
||||
* When calling the compile functions multiple times, using a compiler instance
|
||||
* with the [sass-embedded] npm package is much faster than using the top-level
|
||||
* compilation methods or the [sass] npm package.
|
||||
*
|
||||
* [sass-embedded]: https://www.npmjs.com/package/sass-embedded
|
||||
*
|
||||
* [sass]: https://www.npmjs.com/package/sass
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```js
|
||||
* const sass = require('sass');
|
||||
* function setup() {
|
||||
* const compiler = sass.initCompiler();
|
||||
* const result1 = compiler.compileString('a {b: c}').css;
|
||||
* const result2 = compiler.compileString('a {b: c}').css;
|
||||
* compiler.dispose();
|
||||
*
|
||||
* // throws error
|
||||
* const result3 = sass.compileString('a {b: c}').css;
|
||||
* }
|
||||
* ```
|
||||
* @category Compile
|
||||
* @compatibility dart: "1.70.0", node: false
|
||||
*/
|
||||
export function initCompiler(): Compiler;
|
||||
|
||||
/**
|
||||
* Creates an asynchronous {@link AsyncCompiler}. Each compiler
|
||||
* instance exposes the {@link compileAsync} and {@link compileStringAsync}
|
||||
* methods within the lifespan of the Compiler. Given identical input, these
|
||||
* methods will return results identical to their counterparts exposed at the
|
||||
* module root. To use synchronous compilation, use {@link initCompiler};
|
||||
*
|
||||
* When calling the compile functions multiple times, using a compiler instance
|
||||
* with the [sass-embedded] npm package is much faster than using the top-level
|
||||
* compilation methods or the [sass] npm package.
|
||||
*
|
||||
* [sass-embedded]: https://www.npmjs.com/package/sass-embedded
|
||||
*
|
||||
* [sass]: https://www.npmjs.com/package/sass
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```js
|
||||
* const sass = require('sass');
|
||||
* async function setup() {
|
||||
* const compiler = await sass.initAsyncCompiler();
|
||||
* const result1 = await compiler.compileStringAsync('a {b: c}').css;
|
||||
* const result2 = await compiler.compileStringAsync('a {b: c}').css;
|
||||
* await compiler.dispose();
|
||||
*
|
||||
* // throws error
|
||||
* const result3 = await sass.compileStringAsync('a {b: c}').css;
|
||||
* }
|
||||
* ```
|
||||
* @category Compile
|
||||
* @compatibility dart: "1.70.0", node: false
|
||||
*/
|
||||
export function initAsyncCompiler(): Promise<AsyncCompiler>;
|
||||
|
||||
290
node_modules/sass/types/deprecations.d.ts
generated
vendored
Normal file
290
node_modules/sass/types/deprecations.d.ts
generated
vendored
Normal file
@@ -0,0 +1,290 @@
|
||||
/**
|
||||
* All of the deprecation types currently used by Sass.
|
||||
*
|
||||
* Any of these IDs or the deprecation objects they point to can be passed to
|
||||
* `fatalDeprecations`, `futureDeprecations`, or `silenceDeprecations`.
|
||||
*/
|
||||
export interface Deprecations {
|
||||
// START AUTOGENERATED LIST
|
||||
// Checksum: 0d3df25297a4e76b865aee1e908baf355e26d665
|
||||
|
||||
/**
|
||||
* Deprecation for passing a string directly to meta.call().
|
||||
*
|
||||
* This deprecation was active in the first version of Dart Sass.
|
||||
*/
|
||||
'call-string': Deprecation<'call-string'>;
|
||||
|
||||
/**
|
||||
* Deprecation for @elseif.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.3.2.
|
||||
*/
|
||||
elseif: Deprecation<'elseif'>;
|
||||
|
||||
/**
|
||||
* Deprecation for @-moz-document.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.7.2.
|
||||
*/
|
||||
'moz-document': Deprecation<'moz-document'>;
|
||||
|
||||
/**
|
||||
* Deprecation for imports using relative canonical URLs.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.14.2.
|
||||
*/
|
||||
'relative-canonical': Deprecation<'relative-canonical'>;
|
||||
|
||||
/**
|
||||
* Deprecation for declaring new variables with !global.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.17.2.
|
||||
*/
|
||||
'new-global': Deprecation<'new-global'>;
|
||||
|
||||
/**
|
||||
* Deprecation for using color module functions in place of plain CSS functions.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.23.0.
|
||||
*/
|
||||
'color-module-compat': Deprecation<'color-module-compat'>;
|
||||
|
||||
/**
|
||||
* Deprecation for / operator for division.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.33.0.
|
||||
*/
|
||||
'slash-div': Deprecation<'slash-div'>;
|
||||
|
||||
/**
|
||||
* Deprecation for leading, trailing, and repeated combinators.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.54.0.
|
||||
*/
|
||||
'bogus-combinators': Deprecation<'bogus-combinators'>;
|
||||
|
||||
/**
|
||||
* Deprecation for ambiguous + and - operators.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.55.0.
|
||||
*/
|
||||
'strict-unary': Deprecation<'strict-unary'>;
|
||||
|
||||
/**
|
||||
* Deprecation for passing invalid units to built-in functions.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.56.0.
|
||||
*/
|
||||
'function-units': Deprecation<'function-units'>;
|
||||
|
||||
/**
|
||||
* Deprecation for using !default or !global multiple times for one variable.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.62.0.
|
||||
*/
|
||||
'duplicate-var-flags': Deprecation<'duplicate-var-flags'>;
|
||||
|
||||
/**
|
||||
* Deprecation for passing null as alpha in the JS API.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.62.3.
|
||||
*/
|
||||
'null-alpha': Deprecation<'null-alpha'>;
|
||||
|
||||
/**
|
||||
* Deprecation for passing percentages to the Sass abs() function.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.65.0.
|
||||
*/
|
||||
'abs-percent': Deprecation<'abs-percent'>;
|
||||
|
||||
/**
|
||||
* Deprecation for using the current working directory as an implicit load path.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.73.0.
|
||||
*/
|
||||
'fs-importer-cwd': Deprecation<'fs-importer-cwd'>;
|
||||
|
||||
/**
|
||||
* Deprecation for function and mixin names beginning with --.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.76.0.
|
||||
*/
|
||||
'css-function-mixin': Deprecation<'css-function-mixin'>;
|
||||
|
||||
/**
|
||||
* Deprecation for declarations after or between nested rules.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.77.7.
|
||||
* It became obsolete in Dart Sass 1.92.0.
|
||||
*/
|
||||
'mixed-decls': Deprecation<'mixed-decls'>;
|
||||
|
||||
/**
|
||||
* Deprecation for meta.feature-exists
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.78.0.
|
||||
*/
|
||||
'feature-exists': Deprecation<'feature-exists'>;
|
||||
|
||||
/**
|
||||
* Deprecation for certain uses of built-in sass:color functions.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.79.0.
|
||||
*/
|
||||
'color-4-api': Deprecation<'color-4-api'>;
|
||||
|
||||
/**
|
||||
* Deprecation for using global color functions instead of sass:color.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.79.0.
|
||||
*/
|
||||
'color-functions': Deprecation<'color-functions'>;
|
||||
|
||||
/**
|
||||
* Deprecation for legacy JS API.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.79.0.
|
||||
*/
|
||||
'legacy-js-api': Deprecation<'legacy-js-api'>;
|
||||
|
||||
/**
|
||||
* Deprecation for @import rules.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.80.0.
|
||||
*/
|
||||
import: Deprecation<'import'>;
|
||||
|
||||
/**
|
||||
* Deprecation for global built-in functions that are available in sass: modules.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.80.0.
|
||||
*/
|
||||
'global-builtin': Deprecation<'global-builtin'>;
|
||||
|
||||
/**
|
||||
* Deprecation for functions named "type".
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.86.0.
|
||||
* It became obsolete in Dart Sass 1.92.0.
|
||||
*/
|
||||
'type-function': Deprecation<'type-function'>;
|
||||
|
||||
/**
|
||||
* Deprecation for passing a relative url to compileString().
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.88.0.
|
||||
*/
|
||||
'compile-string-relative-url': Deprecation<'compile-string-relative-url'>;
|
||||
|
||||
/**
|
||||
* Deprecation for a rest parameter before a positional or named parameter.
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.91.0.
|
||||
*/
|
||||
'misplaced-rest': Deprecation<'misplaced-rest'>;
|
||||
|
||||
/**
|
||||
* Deprecation for configuring private variables in @use, @forward, or load-css().
|
||||
*
|
||||
* This deprecation became active in Dart Sass 1.92.0.
|
||||
*/
|
||||
'with-private': Deprecation<'with-private'>;
|
||||
|
||||
// END AUTOGENERATED LIST
|
||||
|
||||
/**
|
||||
* Used for any user-emitted deprecation warnings.
|
||||
*/
|
||||
'user-authored': Deprecation<'user-authored', 'user'>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Either a deprecation or its ID, either of which can be passed to any of
|
||||
* the relevant compiler options.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatibility dart: "1.74.0", node: false
|
||||
*/
|
||||
export type DeprecationOrId = Deprecation | keyof Deprecations;
|
||||
|
||||
/**
|
||||
* The possible statuses that each deprecation can have.
|
||||
*
|
||||
* "active" deprecations are currently emitting deprecation warnings.
|
||||
* "future" deprecations are not yet active, but will be in the future.
|
||||
* "obsolete" deprecations were once active, but no longer are.
|
||||
*
|
||||
* The only "user" deprecation is "user-authored", which is used for deprecation
|
||||
* warnings coming from user code.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatibility dart: "1.74.0", node: false
|
||||
*/
|
||||
export type DeprecationStatus = 'active' | 'user' | 'future' | 'obsolete';
|
||||
|
||||
/**
|
||||
* A deprecated feature in the language.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatibility dart: "1.74.0", node: false
|
||||
*/
|
||||
export interface Deprecation<
|
||||
id extends keyof Deprecations = keyof Deprecations,
|
||||
status extends DeprecationStatus = DeprecationStatus
|
||||
> {
|
||||
/** The unique ID of this deprecation. */
|
||||
id: id;
|
||||
|
||||
/** The current status of this deprecation. */
|
||||
status: status;
|
||||
|
||||
/** A human-readable description of this deprecation. */
|
||||
description?: string;
|
||||
|
||||
/** The version this deprecation first became active in. */
|
||||
deprecatedIn: status extends 'future' | 'user' ? null : Version;
|
||||
|
||||
/** The version this deprecation became obsolete in. */
|
||||
obsoleteIn: status extends 'obsolete' ? Version : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* A semantic version of the compiler.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatibility dart: "1.74.0", node: false
|
||||
*/
|
||||
export class Version {
|
||||
/**
|
||||
* Constructs a new version.
|
||||
*
|
||||
* All components must be non-negative integers.
|
||||
*
|
||||
* @param major - The major version.
|
||||
* @param minor - The minor version.
|
||||
* @param patch - The patch version.
|
||||
*/
|
||||
constructor(major: number, minor: number, patch: number);
|
||||
readonly major: number;
|
||||
readonly minor: number;
|
||||
readonly patch: number;
|
||||
|
||||
/**
|
||||
* Parses a version from a string.
|
||||
*
|
||||
* This throws an error if a valid version can't be parsed.
|
||||
*
|
||||
* @param version - A string in the form "major.minor.patch".
|
||||
*/
|
||||
static parse(version: string): Version;
|
||||
}
|
||||
|
||||
/**
|
||||
* An object containing all deprecation types.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatibility dart: "1.74.0", node: false
|
||||
*/
|
||||
export const deprecations: Deprecations;
|
||||
217
node_modules/sass/types/importer.d.ts
generated
vendored
217
node_modules/sass/types/importer.d.ts
generated
vendored
@@ -1,6 +1,38 @@
|
||||
import {Syntax} from './options';
|
||||
import {PromiseOr} from './util/promise_or';
|
||||
|
||||
/**
|
||||
* Contextual information passed to {@link Importer.canonicalize} and {@link
|
||||
* FileImporter.findFileUrl}. Not all importers will need this information to
|
||||
* resolve loads, but some may find it useful.
|
||||
*/
|
||||
export interface CanonicalizeContext {
|
||||
/**
|
||||
* Whether this is being invoked because of a Sass
|
||||
* `@import` rule, as opposed to a `@use` or `@forward` rule.
|
||||
*
|
||||
* This should *only* be used for determining whether or not to load
|
||||
* [import-only files](https://sass-lang.com/documentation/at-rules/import#import-only-files).
|
||||
*/
|
||||
fromImport: boolean;
|
||||
|
||||
/**
|
||||
* The canonical URL of the file that contains the load, if that information
|
||||
* is available.
|
||||
*
|
||||
* For an {@link Importer}, this is only passed when the `url` parameter is a
|
||||
* relative URL _or_ when its [URL scheme] is included in {@link
|
||||
* Importer.nonCanonicalScheme}. This ensures that canonical URLs are always
|
||||
* resolved the same way regardless of context.
|
||||
*
|
||||
* [URL scheme]: https://developer.mozilla.org/en-US/docs/Learn/Common_questions/Web_mechanics/What_is_a_URL#scheme
|
||||
*
|
||||
* For a {@link FileImporter}, this is always available as long as Sass knows
|
||||
* the canonical URL of the containing file.
|
||||
*/
|
||||
containingUrl: URL | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* A special type of importer that redirects all loads to existing files on
|
||||
* disk. Although this is less powerful than a full {@link Importer}, it
|
||||
@@ -10,7 +42,7 @@ import {PromiseOr} from './util/promise_or';
|
||||
* Like all importers, this implements custom Sass loading logic for [`@use`
|
||||
* rules](https://sass-lang.com/documentation/at-rules/use) and [`@import`
|
||||
* rules](https://sass-lang.com/documentation/at-rules/import). It can be passed
|
||||
* to {@link Options.importers} or {@link StringOptionsWithImporter.importer}.
|
||||
* to {@link Options.importers} or {@link StringOptions.importer}.
|
||||
*
|
||||
* @typeParam sync - A `FileImporter<'sync'>`'s {@link findFileUrl} must return
|
||||
* synchronously, but in return it can be passed to {@link compile} and {@link
|
||||
@@ -56,12 +88,6 @@ export interface FileImporter<
|
||||
* @param url - The loaded URL. Since this might be relative, it's represented
|
||||
* as a string rather than a {@link URL} object.
|
||||
*
|
||||
* @param options.fromImport - Whether this is being invoked because of a Sass
|
||||
* `@import` rule, as opposed to a `@use` or `@forward` rule.
|
||||
*
|
||||
* This should *only* be used for determining whether or not to load
|
||||
* [import-only files](https://sass-lang.com/documentation/at-rules/import#import-only-files).
|
||||
*
|
||||
* @returns An absolute `file:` URL if this importer recognizes the `url`.
|
||||
* This may be only partially resolved: the compiler will automatically look
|
||||
* for [partials](https://sass-lang.com/documentation/at-rules/use#partials),
|
||||
@@ -85,7 +111,7 @@ export interface FileImporter<
|
||||
*/
|
||||
findFileUrl(
|
||||
url: string,
|
||||
options: {fromImport: boolean}
|
||||
context: CanonicalizeContext
|
||||
): PromiseOr<URL | null, sync>;
|
||||
|
||||
/** @hidden */
|
||||
@@ -96,7 +122,7 @@ export interface FileImporter<
|
||||
* An object that implements custom Sass loading logic for [`@use`
|
||||
* rules](https://sass-lang.com/documentation/at-rules/use) and [`@import`
|
||||
* rules](https://sass-lang.com/documentation/at-rules/import). It can be passed
|
||||
* to {@link Options.importers} or {@link StringOptionsWithImporter.importer}.
|
||||
* to {@link Options.importers} or {@link StringOptions.importer}.
|
||||
*
|
||||
* Importers that simply redirect to files on disk are encouraged to use the
|
||||
* {@link FileImporter} interface instead.
|
||||
@@ -200,12 +226,13 @@ export interface Importer<sync extends 'sync' | 'async' = 'sync' | 'async'> {
|
||||
* the same result. Calling {@link canonicalize} with a URL returned by a
|
||||
* previous call to {@link canonicalize} must return that URL.
|
||||
*
|
||||
* Relative loads in stylesheets loaded from an importer are handled by
|
||||
* resolving the loaded URL relative to the canonical URL of the stylesheet
|
||||
* that contains it, and passing that URL back to the importer's {@link
|
||||
* canonicalize} method. For example, suppose the "Resolving a Load" example
|
||||
* {@link Importer | above} returned a stylesheet that contained `@use
|
||||
* "mixins"`:
|
||||
* #### Relative URLs
|
||||
*
|
||||
* Relative loads in stylesheets loaded from an importer are first resolved
|
||||
* relative to the canonical URL of the stylesheet that contains it and passed
|
||||
* back to the {@link canonicalize} method for the local importer that loaded
|
||||
* that stylesheet. For example, suppose the "Resolving a Load" example {@link
|
||||
* Importer | above} returned a stylesheet that contained `@use "mixins"`:
|
||||
*
|
||||
* - The compiler resolves the URL `mixins` relative to the current
|
||||
* stylesheet's canonical URL `db:foo/bar/baz/_index.scss` to get
|
||||
@@ -217,15 +244,14 @@ export interface Importer<sync extends 'sync' | 'async' = 'sync' | 'async'> {
|
||||
* called with a URL relative to one returned by an earlier call to {@link
|
||||
* canonicalize}.
|
||||
*
|
||||
* If the local importer's `canonicalize` method returns `null`, the relative
|
||||
* URL is then passed to each of {@link Options.importers}' `canonicalize()`
|
||||
* methods in turn until one returns a canonical URL. If none of them do, the
|
||||
* load fails.
|
||||
*
|
||||
* @param url - The loaded URL. Since this might be relative, it's represented
|
||||
* as a string rather than a {@link URL} object.
|
||||
*
|
||||
* @param options.fromImport - Whether this is being invoked because of a Sass
|
||||
* `@import` rule, as opposed to a `@use` or `@forward` rule.
|
||||
*
|
||||
* This should *only* be used for determining whether or not to load
|
||||
* [import-only files](https://sass-lang.com/documentation/at-rules/import#import-only-files).
|
||||
*
|
||||
* @returns An absolute URL if this importer recognizes the `url`, or `null`
|
||||
* if it doesn't. If this returns `null`, other importers or {@link
|
||||
* Options.loadPaths | load paths} may handle the load.
|
||||
@@ -242,7 +268,7 @@ export interface Importer<sync extends 'sync' | 'async' = 'sync' | 'async'> {
|
||||
*/
|
||||
canonicalize(
|
||||
url: string,
|
||||
options: {fromImport: boolean}
|
||||
context: CanonicalizeContext
|
||||
): PromiseOr<URL | null, sync>;
|
||||
|
||||
/**
|
||||
@@ -272,6 +298,153 @@ export interface Importer<sync extends 'sync' | 'async' = 'sync' | 'async'> {
|
||||
|
||||
/** @hidden */
|
||||
findFileUrl?: never;
|
||||
|
||||
/**
|
||||
* A URL scheme or set of schemes (without the `:`) that this importer
|
||||
* promises never to use for URLs returned by {@link canonicalize}. If it does
|
||||
* return a URL with one of these schemes, that's an error.
|
||||
*
|
||||
* If this is set, any call to canonicalize for a URL with a non-canonical
|
||||
* scheme will be passed {@link CanonicalizeContext.containingUrl} if it's
|
||||
* known.
|
||||
*
|
||||
* These schemes may only contain lowercase ASCII letters, ASCII numerals,
|
||||
* `+`, `-`, and `.`. They may not be empty.
|
||||
*/
|
||||
nonCanonicalScheme?: string | string[];
|
||||
}
|
||||
|
||||
declare const nodePackageImporterKey: unique symbol;
|
||||
|
||||
/**
|
||||
* The built-in Node.js package importer. This loads pkg: URLs from node_modules
|
||||
* according to the standard Node.js resolution algorithm.
|
||||
*
|
||||
* A Node.js package importer is exposed as a class that can be added to the
|
||||
* `importers` option.
|
||||
*
|
||||
*```js
|
||||
* const sass = require('sass');
|
||||
* sass.compileString('@use "pkg:vuetify', {
|
||||
* importers: [new sass.NodePackageImporter()]
|
||||
* });
|
||||
*```
|
||||
*
|
||||
* ## Writing Sass packages
|
||||
*
|
||||
* Package authors can control what is exposed to their users through their
|
||||
* `package.json` manifest. The recommended method is to add a `sass`
|
||||
* conditional export to `package.json`.
|
||||
*
|
||||
* ```json
|
||||
* // node_modules/uicomponents/package.json
|
||||
* {
|
||||
* "exports": {
|
||||
* ".": {
|
||||
* "sass": "./src/scss/index.scss",
|
||||
* "import": "./dist/js/index.mjs",
|
||||
* "default": "./dist/js/index.js"
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* This allows a package user to write `@use "pkg:uicomponents"` to load the
|
||||
* file at `node_modules/uicomponents/src/scss/index.scss`.
|
||||
*
|
||||
* The Node.js package importer supports the variety of formats supported by
|
||||
* Node.js [package entry points], allowing authors to expose multiple subpaths.
|
||||
*
|
||||
* [package entry points]:
|
||||
* https://nodejs.org/api/packages.html#package-entry-points
|
||||
*
|
||||
* ```json
|
||||
* // node_modules/uicomponents/package.json
|
||||
* {
|
||||
* "exports": {
|
||||
* ".": {
|
||||
* "sass": "./src/scss/index.scss",
|
||||
* },
|
||||
* "./colors.scss": {
|
||||
* "sass": "./src/scss/_colors.scss",
|
||||
* },
|
||||
* "./theme/*.scss": {
|
||||
* "sass": "./src/scss/theme/*.scss",
|
||||
* },
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* This allows a package user to write:
|
||||
*
|
||||
* - `@use "pkg:uicomponents";` to import the root export.
|
||||
* - `@use "pkg:uicomponents/colors";` to import the colors partial.
|
||||
* - `@use "pkg:uicomponents/theme/purple";` to import a purple theme.
|
||||
*
|
||||
* Note that while library users can rely on the importer to resolve
|
||||
* [partials](https://sass-lang.com/documentation/at-rules/use#partials), [index
|
||||
* files](https://sass-lang.com/documentation/at-rules/use#index-files), and
|
||||
* extensions, library authors must specify the entire file path in `exports`.
|
||||
*
|
||||
* In addition to the `sass` condition, the `style` condition is also
|
||||
* acceptable. Sass will match the `default` condition if it's a relevant file
|
||||
* type, but authors are discouraged from relying on this. Notably, the key
|
||||
* order matters, and the importer will resolve to the first value with a key
|
||||
* that is `sass`, `style`, or `default`, so you should always put `default`
|
||||
* last.
|
||||
*
|
||||
* To help package authors who haven't transitioned to package entry points
|
||||
* using the `exports` field, the Node.js package importer provides several
|
||||
* fallback options. If the `pkg:` URL does not have a subpath, the Node.js
|
||||
* package importer will look for a `sass` or `style` key at the root of
|
||||
* `package.json`.
|
||||
*
|
||||
* ```json
|
||||
* // node_modules/uicomponents/package.json
|
||||
* {
|
||||
* "sass": "./src/scss/index.scss",
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* This allows a user to write `@use "pkg:uicomponents";` to import the
|
||||
* `index.scss` file.
|
||||
*
|
||||
* Finally, the Node.js package importer will look for an `index` file at the
|
||||
* package root, resolving partials and extensions. For example, if the file
|
||||
* `_index.scss` exists in the package root of `uicomponents`, a user can import
|
||||
* that with `@use "pkg:uicomponents";`.
|
||||
*
|
||||
* If a `pkg:` URL includes a subpath that doesn't have a match in package entry
|
||||
* points, the Node.js importer will attempt to find that file relative to the
|
||||
* package root, resolving for file extensions, partials and index files. For
|
||||
* example, if the file `src/sass/_colors.scss` exists in the `uicomponents`
|
||||
* package, a user can import that file using `@use
|
||||
* "pkg:uicomponents/src/sass/colors";`.
|
||||
*
|
||||
* @compatibility dart: "1.71.0", node: false
|
||||
* @category Importer
|
||||
*/
|
||||
export class NodePackageImporter {
|
||||
/** Used to distinguish this type from any arbitrary object. */
|
||||
private readonly [nodePackageImporterKey]: true;
|
||||
|
||||
/**
|
||||
* The NodePackageImporter has an optional `entryPointDirectory` option, which
|
||||
* is the directory where the Node Package Importer should start when
|
||||
* resolving `pkg:` URLs in sources other than files on disk. This will be
|
||||
* used as the `parentURL` in the [Node Module
|
||||
* Resolution](https://nodejs.org/api/esm.html#resolution-algorithm-specification)
|
||||
* algorithm.
|
||||
*
|
||||
* In order to be found by the Node Package Importer, a package will need to
|
||||
* be inside a node_modules folder located in the `entryPointDirectory`, or
|
||||
* one of its parent directories, up to the filesystem root.
|
||||
*
|
||||
* Relative paths will be resolved relative to the current working directory.
|
||||
* If a path is not provided, this defaults to the parent directory of the
|
||||
* Node.js entrypoint. If that's not available, this will throw an error.
|
||||
*/
|
||||
constructor(entryPointDirectory?: string);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
41
node_modules/sass/types/index.d.ts
generated
vendored
41
node_modules/sass/types/index.d.ts
generated
vendored
@@ -3,15 +3,33 @@
|
||||
// implementations.
|
||||
|
||||
export {
|
||||
AsyncCompiler,
|
||||
CompileResult,
|
||||
Compiler,
|
||||
compile,
|
||||
compileAsync,
|
||||
compileString,
|
||||
compileStringAsync,
|
||||
initCompiler,
|
||||
initAsyncCompiler,
|
||||
} from './compile';
|
||||
export {
|
||||
deprecations,
|
||||
Deprecation,
|
||||
Deprecations,
|
||||
DeprecationOrId,
|
||||
DeprecationStatus,
|
||||
Version,
|
||||
} from './deprecations';
|
||||
export {Exception} from './exception';
|
||||
export {FileImporter, Importer, ImporterResult} from './importer';
|
||||
export {Logger, SourceSpan, SourceLocation} from './logger';
|
||||
export {
|
||||
CanonicalizeContext,
|
||||
FileImporter,
|
||||
Importer,
|
||||
ImporterResult,
|
||||
NodePackageImporter,
|
||||
} from './importer';
|
||||
export {Logger, LoggerWarnOptions, SourceSpan, SourceLocation} from './logger';
|
||||
export {
|
||||
CustomFunction,
|
||||
Options,
|
||||
@@ -27,7 +45,25 @@ export {
|
||||
CalculationOperation,
|
||||
CalculationOperator,
|
||||
CalculationValue,
|
||||
ChannelName,
|
||||
ChannelNameHsl,
|
||||
ChannelNameHwb,
|
||||
ChannelNameLch,
|
||||
ChannelNameLab,
|
||||
ChannelNameRgb,
|
||||
ChannelNameXyz,
|
||||
ColorSpaceHsl,
|
||||
ColorSpaceHwb,
|
||||
ColorSpaceLch,
|
||||
ColorSpaceLab,
|
||||
ColorSpaceRgb,
|
||||
ColorSpaceXyz,
|
||||
GamutMapMethod,
|
||||
HueInterpolationMethod,
|
||||
KnownColorSpace,
|
||||
ListSeparator,
|
||||
PolarColorSpace,
|
||||
RectangularColorSpace,
|
||||
SassArgumentList,
|
||||
SassBoolean,
|
||||
SassCalculation,
|
||||
@@ -35,6 +71,7 @@ export {
|
||||
SassFunction,
|
||||
SassList,
|
||||
SassMap,
|
||||
SassMixin,
|
||||
SassNumber,
|
||||
SassString,
|
||||
Value,
|
||||
|
||||
59
node_modules/sass/types/legacy/options.d.ts
generated
vendored
59
node_modules/sass/types/legacy/options.d.ts
generated
vendored
@@ -1,6 +1,8 @@
|
||||
import {DeprecationOrId, Version} from '../deprecations';
|
||||
import {Logger} from '../logger';
|
||||
import {LegacyImporter} from './importer';
|
||||
import {LegacyFunction} from './function';
|
||||
import {NodePackageImporter} from '../importer';
|
||||
|
||||
/**
|
||||
* Options for {@link render} and {@link renderSync} that are shared between
|
||||
@@ -482,6 +484,45 @@ export interface LegacySharedOptions<sync extends 'sync' | 'async'> {
|
||||
*/
|
||||
quietDeps?: boolean;
|
||||
|
||||
/**
|
||||
* A set of deprecations to treat as fatal.
|
||||
*
|
||||
* If a deprecation warning of any provided type is encountered during
|
||||
* compilation, the compiler will error instead.
|
||||
*
|
||||
* If a `Version` is provided, then all deprecations that were active in that
|
||||
* compiler version will be treated as fatal.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatiblity dart: "1.78.0", node: false
|
||||
*/
|
||||
fatalDeprecations?: (DeprecationOrId | Version)[];
|
||||
|
||||
/**
|
||||
* A set of future deprecations to opt into early.
|
||||
*
|
||||
* Future deprecations passed here will be treated as active by the compiler,
|
||||
* emitting warnings as necessary.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatiblity dart: "1.78.0", node: false
|
||||
*/
|
||||
futureDeprecations?: DeprecationOrId[];
|
||||
|
||||
/**
|
||||
* A set of active deprecations to ignore.
|
||||
*
|
||||
* If a deprecation warning of any provided type is encountered during
|
||||
* compilation, the compiler will ignore it instead.
|
||||
*
|
||||
* **Heads up!** The deprecated functionality you're depending on will
|
||||
* eventually break.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatiblity dart: "1.78.0", node: false
|
||||
*/
|
||||
silenceDeprecations?: DeprecationOrId[];
|
||||
|
||||
/**
|
||||
* By default, Dart Sass will print only five instances of the same
|
||||
* deprecation warning per compilation to avoid deluging users in console
|
||||
@@ -508,6 +549,24 @@ export interface LegacySharedOptions<sync extends 'sync' | 'async'> {
|
||||
* @compatibility dart: "1.43.0", node: false
|
||||
*/
|
||||
logger?: Logger;
|
||||
|
||||
/**
|
||||
* If this option is set to an instance of `NodePackageImporter`, Sass will
|
||||
* use the built-in Node.js package importer to resolve Sass files with a
|
||||
* `pkg:` URL scheme. Details for library authors and users can be found in
|
||||
* the {@link NodePackageImporter} documentation.
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* sass.renderSync({
|
||||
* data: '@use "pkg:vuetify";',
|
||||
* pkgImporter: new sass.NodePackageImporter()
|
||||
* });
|
||||
* ```
|
||||
* @category Plugins
|
||||
* @compatibility dart: "2.0", node: false
|
||||
*/
|
||||
pkgImporter?: NodePackageImporter;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
11
node_modules/sass/types/legacy/render.d.ts
generated
vendored
11
node_modules/sass/types/legacy/render.d.ts
generated
vendored
@@ -96,6 +96,11 @@ export interface LegacyResult {
|
||||
* This function synchronously compiles a Sass file to CSS. If it succeeds, it
|
||||
* returns the result, and if it fails it throws an error.
|
||||
*
|
||||
* **Heads up!** When using the `sass-embedded` npm package, **{@link render}
|
||||
* is almost always faster than {@link renderSync}**, due to the overhead of
|
||||
* emulating synchronous messaging with worker threads and concurrent
|
||||
* compilations being blocked on main thread.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```js
|
||||
@@ -116,9 +121,9 @@ export function renderSync(options: LegacyOptions<'sync'>): LegacyResult;
|
||||
* `callback` with a {@link LegacyResult} if compilation succeeds or {@link
|
||||
* LegacyException} if it fails.
|
||||
*
|
||||
* **Heads up!** When using Dart Sass, **{@link renderSync} is almost twice as
|
||||
* fast as {@link render}** by default, due to the overhead of making the entire
|
||||
* evaluation process asynchronous.
|
||||
* **Heads up!** When using the `sass` npm package, **{@link renderSync} is
|
||||
* almost twice as fast as {@link render}** by default, due to the overhead of
|
||||
* making the entire evaluation process asynchronous.
|
||||
*
|
||||
* ```js
|
||||
* const sass = require('sass'); // or require('node-sass');
|
||||
|
||||
45
node_modules/sass/types/logger/index.d.ts
generated
vendored
45
node_modules/sass/types/logger/index.d.ts
generated
vendored
@@ -1,8 +1,38 @@
|
||||
import {Deprecation} from '../deprecations';
|
||||
import {SourceSpan} from './source_span';
|
||||
|
||||
export {SourceLocation} from './source_location';
|
||||
export {SourceSpan} from './source_span';
|
||||
|
||||
/**
|
||||
* The options passed to {@link Logger.warn}.
|
||||
*
|
||||
* * `deprecation`: Whether this is a deprecation warning.
|
||||
*
|
||||
* * `deprecationType`: The type of deprecation. Only set if `deprecation` is
|
||||
* true.
|
||||
*
|
||||
* * `span`: The location in the Sass source code that generated this warning.
|
||||
* This may be unset if the warning didn't come from Sass source, for
|
||||
* example if it's from a deprecated JavaScript option.
|
||||
*
|
||||
* * `stack`: The Sass stack trace at the point the warning was issued. This may
|
||||
* be unset if the warning didn't come from Sass source, for example if it's
|
||||
* from a deprecated JavaScript option.
|
||||
*
|
||||
* @category Logger
|
||||
*/
|
||||
export type LoggerWarnOptions = (
|
||||
| {
|
||||
deprecation: true;
|
||||
deprecationType: Deprecation;
|
||||
}
|
||||
| {deprecation: false}
|
||||
) & {
|
||||
span?: SourceSpan;
|
||||
stack?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* An object that can be passed to {@link LegacySharedOptions.logger} to control
|
||||
* how Sass emits warnings and debug messages.
|
||||
@@ -41,20 +71,11 @@ export interface Logger {
|
||||
*
|
||||
* If this is `undefined`, Sass will print warnings to standard error.
|
||||
*
|
||||
* `options` may contain the following fields:
|
||||
*
|
||||
* @param message - The warning message.
|
||||
* @param options.deprecation - Whether this is a deprecation warning.
|
||||
* @param options.span - The location in the Sass source code that generated this
|
||||
* warning.
|
||||
* @param options.stack - The Sass stack trace at the point the warning was issued.
|
||||
*/
|
||||
warn?(
|
||||
message: string,
|
||||
options: {
|
||||
deprecation: boolean;
|
||||
span?: SourceSpan;
|
||||
stack?: string;
|
||||
}
|
||||
): void;
|
||||
warn?(message: string, options: LoggerWarnOptions): void;
|
||||
|
||||
/**
|
||||
* This method is called when Sass emits a debug message due to a [`@debug`
|
||||
|
||||
163
node_modules/sass/types/options.d.ts
generated
vendored
163
node_modules/sass/types/options.d.ts
generated
vendored
@@ -1,4 +1,5 @@
|
||||
import {FileImporter, Importer} from './importer';
|
||||
import {DeprecationOrId, Version} from './deprecations';
|
||||
import {FileImporter, Importer, NodePackageImporter} from './importer';
|
||||
import {Logger} from './logger';
|
||||
import {Value} from './value';
|
||||
import {PromiseOr} from './util/promise_or';
|
||||
@@ -122,6 +123,20 @@ export interface Options<sync extends 'sync' | 'async'> {
|
||||
*/
|
||||
charset?: boolean;
|
||||
|
||||
/**
|
||||
* A set of deprecations to treat as fatal.
|
||||
*
|
||||
* If a deprecation warning of any provided type is encountered during
|
||||
* compilation, the compiler will error instead.
|
||||
*
|
||||
* If a `Version` is provided, then all deprecations that were active in that
|
||||
* compiler version will be treated as fatal.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatiblity dart: "1.74.0", node: false
|
||||
*/
|
||||
fatalDeprecations?: (DeprecationOrId | Version)[];
|
||||
|
||||
/**
|
||||
* Additional built-in Sass functions that are available in all stylesheets.
|
||||
* This option takes an object whose keys are Sass function signatures like
|
||||
@@ -198,6 +213,17 @@ export interface Options<sync extends 'sync' | 'async'> {
|
||||
*/
|
||||
functions?: Record<string, CustomFunction<sync>>;
|
||||
|
||||
/**
|
||||
* A set of future deprecations to opt into early.
|
||||
*
|
||||
* Future deprecations passed here will be treated as active by the compiler,
|
||||
* emitting warnings as necessary.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatiblity dart: "1.74.0", node: false
|
||||
*/
|
||||
futureDeprecations?: DeprecationOrId[];
|
||||
|
||||
/**
|
||||
* Custom importers that control how Sass resolves loads from rules like
|
||||
* [`@use`](https://sass-lang.com/documentation/at-rules/use) and
|
||||
@@ -205,11 +231,28 @@ export interface Options<sync extends 'sync' | 'async'> {
|
||||
*
|
||||
* Loads are resolved by trying, in order:
|
||||
*
|
||||
* - The importer that was used to load the current stylesheet, with the
|
||||
* loaded URL resolved relative to the current stylesheet's canonical URL.
|
||||
* - **For relative URLs only:** the URL resolved relative to the current
|
||||
* stylesheet's canonical URL, passed to the importer that loaded the current
|
||||
* stylesheet.
|
||||
*
|
||||
* - Each {@link Importer} or {@link FileImporter} in {@link importers}, in
|
||||
* order.
|
||||
* When calling {@link compileString} or {@link compileStringAsync}, the
|
||||
* entrypoint file isn't "loaded" in the same sense as other files. In that
|
||||
* case:
|
||||
*
|
||||
* - {@link StringOptions.url} is the canonical URL and {@link
|
||||
* StringOptions.importer} is the importer that loaded it.
|
||||
*
|
||||
* - If {@link StringOptions.importer} isn't passed and {@link
|
||||
* StringOptions.url} is a `file:` URL, the URL is loaded from the
|
||||
* filesystem by default. (You can disable this by passing `{canonicalize:
|
||||
* url => null}` as {@link StringOptions.importer}.)
|
||||
*
|
||||
* - If {@link StringOptions.url} isn't passed but {@link
|
||||
* StringOptions.importer} is, the relative URL is passed to {@link
|
||||
* StringOptions.importer} as-is.
|
||||
*
|
||||
* - Each {@link Importer}, {@link FileImporter}, or
|
||||
* {@link NodePackageImporter} in {@link importers}, in order.
|
||||
*
|
||||
* - Each load path in {@link loadPaths}, in order.
|
||||
*
|
||||
@@ -218,7 +261,7 @@ export interface Options<sync extends 'sync' | 'async'> {
|
||||
*
|
||||
* @category Plugins
|
||||
*/
|
||||
importers?: (Importer<sync> | FileImporter<sync>)[];
|
||||
importers?: (Importer<sync> | FileImporter<sync> | NodePackageImporter)[];
|
||||
|
||||
/**
|
||||
* Paths in which to look for stylesheets loaded by rules like
|
||||
@@ -266,16 +309,30 @@ export interface Options<sync extends 'sync' | 'async'> {
|
||||
* so that they can get fixed as soon as possible!
|
||||
*
|
||||
* **Heads up!** If {@link compileString} or {@link compileStringAsync} is
|
||||
* called without {@link StringOptionsWithoutImporter.url}, <em>all</em>
|
||||
* stylesheets it loads will be considered dependencies. Since it doesn’t have
|
||||
* a path of its own, everything it loads is coming from a load path rather
|
||||
* than a relative import.
|
||||
* called without {@link StringOptions.url}, <em>all</em> stylesheets it loads
|
||||
* will be considered dependencies. Since it doesn’t have a path of its own,
|
||||
* everything it loads is coming from a load path rather than a relative
|
||||
* import.
|
||||
*
|
||||
* @defaultValue `false`
|
||||
* @category Messages
|
||||
*/
|
||||
quietDeps?: boolean;
|
||||
|
||||
/**
|
||||
* A set of active deprecations to ignore.
|
||||
*
|
||||
* If a deprecation warning of any provided type is encountered during
|
||||
* compilation, the compiler will ignore it instead.
|
||||
*
|
||||
* **Heads up!** The deprecated functionality you're depending on will
|
||||
* eventually break.
|
||||
*
|
||||
* @category Messages
|
||||
* @compatiblity dart: "1.74.0", node: false
|
||||
*/
|
||||
silenceDeprecations?: DeprecationOrId[];
|
||||
|
||||
/**
|
||||
* Whether or not Sass should generate a source map. If it does, the source
|
||||
* map will be available as {@link CompileResult.sourceMap}.
|
||||
@@ -348,9 +405,10 @@ export interface Options<sync extends 'sync' | 'async'> {
|
||||
* Options that can be passed to {@link compileString} or {@link
|
||||
* compileStringAsync}.
|
||||
*
|
||||
* If the {@link StringOptionsWithImporter.importer} field isn't passed, the
|
||||
* entrypoint file can load files relative to itself if a `file://` URL is
|
||||
* passed to the {@link url} field.
|
||||
* If the {@link StringOptions.importer} field isn't passed, the entrypoint file
|
||||
* can load files relative to itself if a `file://` URL is passed to the {@link
|
||||
* url} field. If `importer` is passed, the entrypoint file uses that importer
|
||||
* to load files relative to itself.
|
||||
*
|
||||
* @typeParam sync - This lets the TypeScript checker verify that asynchronous
|
||||
* {@link Importer}s, {@link FileImporter}s, and {@link CustomFunction}s aren't
|
||||
@@ -358,7 +416,7 @@ export interface Options<sync extends 'sync' | 'async'> {
|
||||
*
|
||||
* @category Options
|
||||
*/
|
||||
export interface StringOptionsWithoutImporter<sync extends 'sync' | 'async'>
|
||||
export interface StringOptions<sync extends 'sync' | 'async'>
|
||||
extends Options<sync> {
|
||||
/**
|
||||
* The {@link Syntax} to use to parse the entrypoint stylesheet.
|
||||
@@ -370,70 +428,41 @@ export interface StringOptionsWithoutImporter<sync extends 'sync' | 'async'>
|
||||
syntax?: Syntax;
|
||||
|
||||
/**
|
||||
* The canonical URL of the entrypoint stylesheet.
|
||||
* The importer to use to handle relative URL loads in the entrypoint
|
||||
* stylesheet and stylesheets loaded relative to the entrypoint stylesheet.
|
||||
*
|
||||
* A relative load's URL is first resolved relative to {@link url}, then
|
||||
* resolved to a file on disk if it's a `file://` URL. If it can't be resolved
|
||||
* to a file on disk, it's then passed to {@link importers} and {@link
|
||||
* loadPaths}.
|
||||
* See {@link Options.importers} for details on how loads are resolved for the
|
||||
* entrypoint stylesheet.
|
||||
*
|
||||
* @category Input
|
||||
*/
|
||||
importer?: Importer<sync> | FileImporter<sync>;
|
||||
|
||||
/**
|
||||
* The canonical URL of the entrypoint stylesheet.
|
||||
*
|
||||
* See {@link Options.importers} for details on how loads are resolved for the
|
||||
* entrypoint stylesheet.
|
||||
*
|
||||
* @category Input
|
||||
* @compatibility feature: "Undefined URL with importer", dart: "1.75.0", node: false
|
||||
*
|
||||
* Earlier versions of Dart Sass required {@link url} to be defined when
|
||||
* passing {@link StringOptions.importer}.
|
||||
*/
|
||||
url?: URL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options that can be passed to {@link compileString} or {@link
|
||||
* compileStringAsync}.
|
||||
*
|
||||
* If the {@link StringOptionsWithImporter.importer} field is passed, the
|
||||
* entrypoint file uses it to load files relative to itself and the {@link url}
|
||||
* field is mandatory.
|
||||
*
|
||||
* @typeParam sync - This lets the TypeScript checker verify that asynchronous
|
||||
* {@link Importer}s, {@link FileImporter}s, and {@link CustomFunction}s aren't
|
||||
* passed to {@link compile} or {@link compileString}.
|
||||
*
|
||||
* @category Options
|
||||
* @deprecated Use {@link StringOptions} instead.
|
||||
*/
|
||||
export interface StringOptionsWithImporter<sync extends 'sync' | 'async'>
|
||||
extends StringOptionsWithoutImporter<sync> {
|
||||
/**
|
||||
* The importer to use to handle loads that are relative to the entrypoint
|
||||
* stylesheet.
|
||||
*
|
||||
* A relative load's URL is first resolved relative to {@link url}, then
|
||||
* passed to {@link importer}. If the importer doesn't recognize it, it's then
|
||||
* passed to {@link importers} and {@link loadPaths}.
|
||||
*
|
||||
* @category Input
|
||||
*/
|
||||
importer: Importer<sync> | FileImporter<sync>;
|
||||
|
||||
/**
|
||||
* The canonical URL of the entrypoint stylesheet. If this is passed along
|
||||
* with {@link importer}, it's used to resolve relative loads in the
|
||||
* entrypoint stylesheet.
|
||||
*
|
||||
* @category Input
|
||||
*/
|
||||
url: URL;
|
||||
}
|
||||
type StringOptionsWithoutImporter<sync extends 'sync' | 'async'> =
|
||||
StringOptions<sync>;
|
||||
|
||||
/**
|
||||
* Options that can be passed to {@link compileString} or {@link
|
||||
* compileStringAsync}.
|
||||
*
|
||||
* This is a {@link StringOptionsWithImporter} if it has a {@link
|
||||
* StringOptionsWithImporter.importer} field, and a {@link
|
||||
* StringOptionsWithoutImporter} otherwise.
|
||||
*
|
||||
* @typeParam sync - This lets the TypeScript checker verify that asynchronous
|
||||
* {@link Importer}s, {@link FileImporter}s, and {@link CustomFunction}s aren't
|
||||
* passed to {@link compile} or {@link compileString}.
|
||||
*
|
||||
* @category Options
|
||||
* @deprecated Use {@link StringOptions} instead.
|
||||
*/
|
||||
export type StringOptions<sync extends 'sync' | 'async'> =
|
||||
| StringOptionsWithImporter<sync>
|
||||
| StringOptionsWithoutImporter<sync>;
|
||||
type StringOptionsWithImporter<sync extends 'sync' | 'async'> =
|
||||
StringOptions<sync>;
|
||||
|
||||
575
node_modules/sass/types/value/color.d.ts
generated
vendored
575
node_modules/sass/types/value/color.d.ts
generated
vendored
@@ -1,5 +1,103 @@
|
||||
import {List} from 'immutable';
|
||||
|
||||
import {Value} from './index';
|
||||
|
||||
/** The HSL color space name. */
|
||||
export type ColorSpaceHsl = 'hsl';
|
||||
|
||||
/** The HSL color space channel names. */
|
||||
export type ChannelNameHsl = 'hue' | 'saturation' | 'lightness' | 'alpha';
|
||||
|
||||
/** The HWB color space name. */
|
||||
export type ColorSpaceHwb = 'hwb';
|
||||
|
||||
/** The HWB color space channel names. */
|
||||
export type ChannelNameHwb = 'hue' | 'whiteness' | 'blackness' | 'alpha';
|
||||
|
||||
/** The Lab / Oklab color space names. */
|
||||
export type ColorSpaceLab = 'lab' | 'oklab';
|
||||
|
||||
/** The Lab / Oklab color space channel names. */
|
||||
export type ChannelNameLab = 'lightness' | 'a' | 'b' | 'alpha';
|
||||
|
||||
/** The LCH / Oklch color space names. */
|
||||
export type ColorSpaceLch = 'lch' | 'oklch';
|
||||
|
||||
/** The LCH / Oklch color space channel names. */
|
||||
export type ChannelNameLch = 'lightness' | 'chroma' | 'hue' | 'alpha';
|
||||
|
||||
/** Names of color spaces with RGB channels. */
|
||||
export type ColorSpaceRgb =
|
||||
| 'a98-rgb'
|
||||
| 'display-p3'
|
||||
| 'prophoto-rgb'
|
||||
| 'rec2020'
|
||||
| 'rgb'
|
||||
| 'srgb'
|
||||
| 'srgb-linear';
|
||||
|
||||
/** RGB channel names. */
|
||||
export type ChannelNameRgb = 'red' | 'green' | 'blue' | 'alpha';
|
||||
|
||||
/** Names of color spaces with XYZ channels. */
|
||||
export type ColorSpaceXyz = 'xyz' | 'xyz-d50' | 'xyz-d65';
|
||||
|
||||
/** XYZ channel names. */
|
||||
export type ChannelNameXyz = 'x' | 'y' | 'z' | 'alpha';
|
||||
|
||||
/** All supported color space channel names. */
|
||||
export type ChannelName =
|
||||
| ChannelNameHsl
|
||||
| ChannelNameHwb
|
||||
| ChannelNameLab
|
||||
| ChannelNameLch
|
||||
| ChannelNameRgb
|
||||
| ChannelNameXyz;
|
||||
|
||||
/** All supported color space names. */
|
||||
export type KnownColorSpace =
|
||||
| ColorSpaceHsl
|
||||
| ColorSpaceHwb
|
||||
| ColorSpaceLab
|
||||
| ColorSpaceLch
|
||||
| ColorSpaceRgb
|
||||
| ColorSpaceXyz;
|
||||
|
||||
/** Polar color space names (HSL, HWB, LCH, and Oklch spaces). */
|
||||
export type PolarColorSpace = ColorSpaceHsl | ColorSpaceHwb | ColorSpaceLch;
|
||||
|
||||
/** Rectangular color space names (Lab, Oklab, RGB, and XYZ spaces). */
|
||||
export type RectangularColorSpace = Exclude<KnownColorSpace, PolarColorSpace>;
|
||||
|
||||
/**
|
||||
* Methods by which two hues are adjusted when interpolating between polar
|
||||
* colors.
|
||||
*/
|
||||
export type HueInterpolationMethod =
|
||||
| 'decreasing'
|
||||
| 'increasing'
|
||||
| 'longer'
|
||||
| 'shorter';
|
||||
|
||||
/**
|
||||
* Methods by which colors in bounded spaces can be mapped to within their
|
||||
* gamut.
|
||||
*
|
||||
* * `local-minde`: The algorithm specified in [the original Color Level 4
|
||||
* candidate recommendation]. This maps in the Oklch color space, using the
|
||||
* [deltaEOK] color difference formula and the [local-MINDE] improvement.
|
||||
*
|
||||
* * `clip`: Clamp each color channel that's outside the gamut to the minimum or
|
||||
* maximum value for that channel. This algorithm will produce poor visual
|
||||
* results, but it may be useful to match the behavior of other situations in
|
||||
* which a color can be clipped.
|
||||
*
|
||||
* [the original Color Level 4 candidate recommendation]: https://www.w3.org/TR/2024/CRD-css-color-4-20240213/#css-gamut-mapping
|
||||
* [deltaEOK]: https://www.w3.org/TR/2024/CRD-css-color-4-20240213/#color-difference-OK
|
||||
* [local-MINDE]: https://www.w3.org/TR/2024/CRD-css-color-4-20240213/#GM-chroma-local-MINDE
|
||||
*/
|
||||
export type GamutMapMethod = 'clip' | 'local-minde';
|
||||
|
||||
/**
|
||||
* Sass's [color type](https://sass-lang.com/documentation/values/colors).
|
||||
*
|
||||
@@ -10,98 +108,459 @@ import {Value} from './index';
|
||||
*/
|
||||
export class SassColor extends Value {
|
||||
/**
|
||||
* Creates an RGB color.
|
||||
* Creates an [RGB color].
|
||||
*
|
||||
* @throws `Error` if `red`, `green`, and `blue` aren't between `0` and
|
||||
* `255`, or if `alpha` isn't between `0` and `1`.
|
||||
* If `space` is missing, **only** `undefined` should be used to indicate that
|
||||
* `alpha` isn't passed. If `null` is used instead, it will be treated as a
|
||||
* [missing component]. See [breaking changes] for details.
|
||||
*
|
||||
* If `space` is defined and `null` is passed for any component, it will be
|
||||
* treated as a [missing component].
|
||||
*
|
||||
* [RGB color]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/rgb
|
||||
* [missing component]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
* [breaking changes]: /documentation/breaking-changes/null-alpha
|
||||
*
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
constructor(options: {
|
||||
red: number;
|
||||
green: number;
|
||||
blue: number;
|
||||
alpha?: number;
|
||||
red: number | null;
|
||||
green: number | null;
|
||||
blue: number | null;
|
||||
alpha?: number | null;
|
||||
space?: 'rgb';
|
||||
});
|
||||
|
||||
/**
|
||||
* Creates an HSL color.
|
||||
* Creates an [HSL color].
|
||||
*
|
||||
* @throws `Error` if `saturation` or `lightness` aren't between `0` and
|
||||
* `100`, or if `alpha` isn't between `0` and `1`.
|
||||
* If `space` is missing, **only** `undefined` should be used to indicate that
|
||||
* `alpha` isn't passed. If `null` is used instead, it will be treated as a
|
||||
* [missing component]. See [breaking changes] for details.
|
||||
*
|
||||
* If `space` is defined and `null` is passed for any component, it will be
|
||||
* treated as a [missing component].
|
||||
*
|
||||
* [HSL color]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/hsl
|
||||
* [missing component]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
* [breaking changes]: /documentation/breaking-changes/null-alpha
|
||||
*
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
constructor(options: {
|
||||
hue: number;
|
||||
saturation: number;
|
||||
lightness: number;
|
||||
alpha?: number;
|
||||
hue: number | null;
|
||||
saturation: number | null;
|
||||
lightness: number | null;
|
||||
alpha?: number | null;
|
||||
space?: ColorSpaceHsl;
|
||||
});
|
||||
|
||||
/**
|
||||
* Creates an HWB color.
|
||||
* Creates an [HWB color].
|
||||
*
|
||||
* @throws `Error` if `whiteness` or `blackness` aren't between `0` and `100`,
|
||||
* or if `alpha` isn't between `0` and `1`.
|
||||
* If `space` is missing, **only** `undefined` should be used to indicate that
|
||||
* `alpha` isn't passed. If `null` is used instead, it will be treated as a
|
||||
* [missing component]. See [breaking changes] for details.
|
||||
*
|
||||
* If `space` is defined and `null` is passed for any component, it will be
|
||||
* treated as a [missing component].
|
||||
*
|
||||
* [HWB color]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/hwb
|
||||
* [missing component]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
* [breaking changes]: /documentation/breaking-changes/null-alpha
|
||||
*
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
constructor(options: {
|
||||
hue: number;
|
||||
whiteness: number;
|
||||
blackness: number;
|
||||
alpha?: number;
|
||||
hue: number | null;
|
||||
whiteness: number | null;
|
||||
blackness: number | null;
|
||||
alpha?: number | null;
|
||||
space?: ColorSpaceHwb;
|
||||
});
|
||||
|
||||
/** This color's red channel, between `0` and `255`. */
|
||||
get red(): number;
|
||||
/**
|
||||
* Creates a [Lab] or [Oklab] color.
|
||||
*
|
||||
* If `null` is passed for any component, it will be treated as a [missing
|
||||
* component].
|
||||
*
|
||||
* [Lab]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/lab
|
||||
* [Oklab]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/oklab
|
||||
* [missing component]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
constructor(options: {
|
||||
lightness: number | null;
|
||||
a: number | null;
|
||||
b: number | null;
|
||||
alpha?: number | null;
|
||||
space: ColorSpaceLab;
|
||||
});
|
||||
|
||||
/** This color's green channel, between `0` and `255`. */
|
||||
get green(): number;
|
||||
/**
|
||||
* Creates an [LCH] or [Oklch] color.
|
||||
*
|
||||
* If `null` is passed for any component, it will be treated as a [missing
|
||||
* component].
|
||||
*
|
||||
* [LCH]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/lch
|
||||
* [Oklch]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/oklch
|
||||
* [missing component]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
constructor(options: {
|
||||
lightness: number | null;
|
||||
chroma: number | null;
|
||||
hue: number | null;
|
||||
alpha?: number | null;
|
||||
space: ColorSpaceLch;
|
||||
});
|
||||
|
||||
/** This color's blue channel, between `0` and `255`. */
|
||||
get blue(): number;
|
||||
/**
|
||||
* Creates a color in a predefined [RGB color space].
|
||||
*
|
||||
* If `null` is passed for any component, it will be treated as a [missing
|
||||
* component].
|
||||
*
|
||||
* [RGB color space]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/color#using_predefined_colorspaces_with_color
|
||||
* [missing component]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
constructor(options: {
|
||||
red: number | null;
|
||||
green: number | null;
|
||||
blue: number | null;
|
||||
alpha?: number | null;
|
||||
space: Exclude<ColorSpaceRgb, 'rgb'>;
|
||||
});
|
||||
|
||||
/** This color's hue, between `0` and `360`. */
|
||||
get hue(): number;
|
||||
/**
|
||||
* Creates a color in a predefined [XYZ color space].
|
||||
*
|
||||
* If `null` is passed for any component, it will be treated as a [missing
|
||||
* component].
|
||||
*
|
||||
* [XYZ color space]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value/color#using_the_xyz_colorspace_with_color
|
||||
* [missing component]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
constructor(options: {
|
||||
x: number | null;
|
||||
y: number | null;
|
||||
z: number | null;
|
||||
alpha?: number | null;
|
||||
space: ColorSpaceXyz;
|
||||
});
|
||||
|
||||
/** This color's saturation, between `0` and `100`. */
|
||||
get saturation(): number;
|
||||
/** The name of this color's space. */
|
||||
get space(): KnownColorSpace;
|
||||
|
||||
/** This color's lightness, between `0` and `100`. */
|
||||
get lightness(): number;
|
||||
/**
|
||||
* Returns a new color that's the result of converting this color to the
|
||||
* specified `space`.
|
||||
*/
|
||||
toSpace(space: KnownColorSpace): SassColor;
|
||||
|
||||
/** This color's whiteness, between `0` and `100`. */
|
||||
get whiteness(): number;
|
||||
/**
|
||||
* A boolean indicating whether this color is in a legacy color space (`rgb`,
|
||||
* `hsl`, or `hwb`).
|
||||
*/
|
||||
get isLegacy(): boolean;
|
||||
|
||||
/** This color's blackness, between `0` and `100`. */
|
||||
get blackness(): number;
|
||||
/**
|
||||
* Returns a boolean indicating whether this color is in-gamut (as opposed to
|
||||
* having one or more of its channels out of bounds) for the specified
|
||||
* `space`, or its current color space if `space` is not specified.
|
||||
*/
|
||||
isInGamut(space?: KnownColorSpace): boolean;
|
||||
|
||||
/**
|
||||
* Returns a copy of this color, modified so it is in-gamut for the specified
|
||||
* `space`—or the current color space if `space` is not specified—using
|
||||
* `method` to map out-of-gamut colors into the desired gamut.
|
||||
*/
|
||||
toGamut(options: {
|
||||
space?: KnownColorSpace;
|
||||
method: GamutMapMethod;
|
||||
}): SassColor;
|
||||
|
||||
/**
|
||||
* A list of this color's channel values (excluding alpha), with [missing
|
||||
* channels] converted to `null`.
|
||||
*
|
||||
* [missing channels]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*/
|
||||
get channelsOrNull(): List<number | null>;
|
||||
|
||||
/**
|
||||
* A list of this color's channel values (excluding alpha), with [missing
|
||||
* channels] converted to `0`.
|
||||
*
|
||||
* [missing channels]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*/
|
||||
get channels(): List<number>;
|
||||
|
||||
/**
|
||||
* Returns the value of a single specified `channel` of this color, with
|
||||
* [missing channels] converted to `0`.
|
||||
*
|
||||
* [missing channels]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*
|
||||
* @throws `Error` if `channel` is not `alpha` or a channel in this color's
|
||||
* space.
|
||||
*/
|
||||
channel(channel: ChannelName): number;
|
||||
|
||||
/**
|
||||
* Returns the value of a single specified `channel` of this color after
|
||||
* converting this color to the specified `space`, with [missing channels]
|
||||
* converted to `0`.
|
||||
*
|
||||
* [missing channels]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*
|
||||
* @throws `Error` if `channel` is not `alpha` or a channel in `space`.
|
||||
*/
|
||||
channel(channel: ChannelNameHsl, options: {space: ColorSpaceHsl}): number;
|
||||
channel(channel: ChannelNameHwb, options: {space: ColorSpaceHwb}): number;
|
||||
channel(channel: ChannelNameLab, options: {space: ColorSpaceLab}): number;
|
||||
channel(channel: ChannelNameLch, options: {space: ColorSpaceLch}): number;
|
||||
channel(channel: ChannelNameRgb, options: {space: ColorSpaceRgb}): number;
|
||||
channel(channel: ChannelNameXyz, options: {space: ColorSpaceXyz}): number;
|
||||
|
||||
/** This color's alpha channel, between `0` and `1`. */
|
||||
get alpha(): number;
|
||||
|
||||
/**
|
||||
* Changes one or more of this color's RGB channels and returns the result.
|
||||
* Returns a boolean indicating whether a given channel value is a [missing
|
||||
* channel].
|
||||
*
|
||||
* [missing channel]: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#missing_color_components
|
||||
*/
|
||||
change(options: {
|
||||
red?: number;
|
||||
green?: number;
|
||||
blue?: number;
|
||||
alpha?: number;
|
||||
}): SassColor;
|
||||
isChannelMissing(channel: ChannelName): boolean;
|
||||
|
||||
/**
|
||||
* Changes one or more of this color's HSL channels and returns the result.
|
||||
* Returns a boolean indicating whether a given `channel` is [powerless] in
|
||||
* this color. This is a special state that's defined for individual color
|
||||
* spaces, which indicates that a channel's value won't affect how a color is
|
||||
* displayed.
|
||||
*
|
||||
* [powerless]: https://www.w3.org/TR/css-color-4/#powerless
|
||||
*/
|
||||
change(options: {
|
||||
hue?: number;
|
||||
saturation?: number;
|
||||
lightness?: number;
|
||||
alpha?: number;
|
||||
}): SassColor;
|
||||
isChannelPowerless(channel: ChannelName): boolean;
|
||||
isChannelPowerless(
|
||||
channel: ChannelNameHsl,
|
||||
options?: {space: ColorSpaceHsl}
|
||||
): boolean;
|
||||
isChannelPowerless(
|
||||
channel: ChannelNameHwb,
|
||||
options?: {space: ColorSpaceHwb}
|
||||
): boolean;
|
||||
isChannelPowerless(
|
||||
channel: ChannelNameLab,
|
||||
options?: {space: ColorSpaceLab}
|
||||
): boolean;
|
||||
isChannelPowerless(
|
||||
channel: ChannelNameLch,
|
||||
options?: {space: ColorSpaceLch}
|
||||
): boolean;
|
||||
isChannelPowerless(
|
||||
channel: ChannelNameRgb,
|
||||
options?: {space: ColorSpaceRgb}
|
||||
): boolean;
|
||||
isChannelPowerless(
|
||||
channel: ChannelNameXyz,
|
||||
options?: {space: ColorSpaceXyz}
|
||||
): boolean;
|
||||
|
||||
/**
|
||||
* Changes one or more of this color's HWB channels and returns the result.
|
||||
* Returns a color partway between this color and `color2` according to
|
||||
* `method`, as defined by the CSS Color 4 [color interpolation] procedure.
|
||||
*
|
||||
* [color interpolation]: https://www.w3.org/TR/css-color-4/#interpolation
|
||||
*
|
||||
* If `method` is missing and this color is in a rectangular color space (Lab,
|
||||
* Oklab, RGB, and XYZ spaces), `method` defaults to the color space of this
|
||||
* color. Otherwise, `method` defaults to a space separated list containing
|
||||
* the color space of this color and the string "shorter".
|
||||
*
|
||||
* The `weight` is a number between 0 and 1 that indicates how much of this
|
||||
* color should be in the resulting color. If omitted, it defaults to 0.5.
|
||||
*/
|
||||
change(options: {
|
||||
hue?: number;
|
||||
whiteness?: number;
|
||||
blackness?: number;
|
||||
alpha?: number;
|
||||
}): SassColor;
|
||||
interpolate(
|
||||
color2: SassColor,
|
||||
options?: {
|
||||
weight?: number;
|
||||
method?: HueInterpolationMethod;
|
||||
}
|
||||
): SassColor;
|
||||
|
||||
/**
|
||||
* Returns a new color that's the result of changing one or more of this
|
||||
* color's HSL channels.
|
||||
*
|
||||
* @throws `Error` if `space` is missing and this color is not in a legacy
|
||||
* color space (`rgb`, `hsl`, or `hwb`).
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
change(
|
||||
options: {
|
||||
[key in ChannelNameHsl]?: number | null;
|
||||
} & {
|
||||
space?: ColorSpaceHsl;
|
||||
}
|
||||
): SassColor;
|
||||
|
||||
/**
|
||||
* Returns a new color that's the result of changing one or more of this
|
||||
* color's HWB channels.
|
||||
*
|
||||
* @throws `Error` if `space` is missing and this color is not in a legacy
|
||||
* color space (`rgb`, `hsl`, or `hwb`).
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
change(
|
||||
options: {
|
||||
[key in ChannelNameHwb]?: number | null;
|
||||
} & {
|
||||
space?: ColorSpaceHwb;
|
||||
}
|
||||
): SassColor;
|
||||
|
||||
/**
|
||||
* Returns a new color that's the result of changing one or more of this
|
||||
* color's Lab channels.
|
||||
*
|
||||
* @throws `Error` if `space` is missing and this color is not in the Lab or
|
||||
* Oklab color spaces.
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
change(
|
||||
options: {
|
||||
[key in ChannelNameLab]?: number | null;
|
||||
} & {
|
||||
space?: ColorSpaceLab;
|
||||
}
|
||||
): SassColor;
|
||||
|
||||
/**
|
||||
* Returns a new color that's the result of changing one or more of this
|
||||
* color's LCH channels.
|
||||
*
|
||||
* @throws `Error` if `space` is missing and this color is not in the LCH or
|
||||
* Oklch color spaces.
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
change(
|
||||
options: {
|
||||
[key in ChannelNameLch]?: number | null;
|
||||
} & {
|
||||
space?: ColorSpaceLch;
|
||||
}
|
||||
): SassColor;
|
||||
|
||||
/**
|
||||
* Returns a new color that's the result of changing one or more of this
|
||||
* color's RGB channels.
|
||||
*
|
||||
* @throws `Error` if `space` is missing and this color is not in a legacy
|
||||
* color space (`rgb`, `hsl`, or `hwb`).
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
change(
|
||||
options: {
|
||||
[key in ChannelNameRgb]?: number | null;
|
||||
} & {
|
||||
space?: ColorSpaceRgb;
|
||||
}
|
||||
): SassColor;
|
||||
|
||||
/**
|
||||
* Returns a new color that's the result of changing one or more of this
|
||||
* color's XYZ channels.
|
||||
*
|
||||
* @throws `Error` if `space` is missing and this color is not in an XYZ color
|
||||
* space.
|
||||
* @throws `Error` if `alpha` is set and isn't `null` or a number between `0`
|
||||
* and `1`.
|
||||
*/
|
||||
change(
|
||||
options: {
|
||||
[key in ChannelNameXyz]?: number | null;
|
||||
} & {
|
||||
space?: ColorSpaceXyz;
|
||||
}
|
||||
): SassColor;
|
||||
|
||||
/**
|
||||
* This color's red channel in the RGB color space.
|
||||
*
|
||||
* @deprecated Use {@link channel} instead.
|
||||
*/
|
||||
get red(): number;
|
||||
|
||||
/**
|
||||
* This color's green channel in the RGB color space.
|
||||
*
|
||||
* @deprecated Use {@link channel} instead.
|
||||
*/
|
||||
get green(): number;
|
||||
|
||||
/**
|
||||
* This color's blue channel in the RGB color space.
|
||||
*
|
||||
* @deprecated Use {@link channel} instead.
|
||||
*/
|
||||
get blue(): number;
|
||||
|
||||
/**
|
||||
* This color's hue in the HSL color space.
|
||||
*
|
||||
* @deprecated Use {@link channel} instead.
|
||||
*/
|
||||
get hue(): number;
|
||||
|
||||
/**
|
||||
* This color's saturation in the HSL color space.
|
||||
*
|
||||
* @deprecated Use {@link channel} instead.
|
||||
*/
|
||||
get saturation(): number;
|
||||
|
||||
/**
|
||||
* This color's lightness in the HSL color space.
|
||||
*
|
||||
* @deprecated Use {@link channel} instead.
|
||||
*/
|
||||
get lightness(): number;
|
||||
|
||||
/**
|
||||
* This color's whiteness in the HWB color space.
|
||||
*
|
||||
* @deprecated Use {@link channel} instead.
|
||||
*/
|
||||
get whiteness(): number;
|
||||
|
||||
/**
|
||||
* This color's blackness in the HWB color space.
|
||||
*
|
||||
* @deprecated Use {@link channel} instead.
|
||||
*/
|
||||
get blackness(): number;
|
||||
}
|
||||
|
||||
32
node_modules/sass/types/value/index.d.ts
generated
vendored
32
node_modules/sass/types/value/index.d.ts
generated
vendored
@@ -6,6 +6,7 @@ import {SassColor} from './color';
|
||||
import {SassFunction} from './function';
|
||||
import {ListSeparator} from './list';
|
||||
import {SassMap} from './map';
|
||||
import {SassMixin} from './mixin';
|
||||
import {SassNumber} from './number';
|
||||
import {SassString} from './string';
|
||||
|
||||
@@ -18,10 +19,31 @@ export {
|
||||
CalculationOperation,
|
||||
CalculationInterpolation,
|
||||
} from './calculation';
|
||||
export {SassColor} from './color';
|
||||
export {
|
||||
SassColor,
|
||||
ColorSpaceHsl,
|
||||
ChannelNameHsl,
|
||||
ColorSpaceHwb,
|
||||
ChannelNameHwb,
|
||||
ColorSpaceLab,
|
||||
ChannelNameLab,
|
||||
ColorSpaceLch,
|
||||
ChannelNameLch,
|
||||
ColorSpaceRgb,
|
||||
ChannelNameRgb,
|
||||
ColorSpaceXyz,
|
||||
ChannelNameXyz,
|
||||
ChannelName,
|
||||
GamutMapMethod,
|
||||
KnownColorSpace,
|
||||
PolarColorSpace,
|
||||
RectangularColorSpace,
|
||||
HueInterpolationMethod,
|
||||
} from './color';
|
||||
export {SassFunction} from './function';
|
||||
export {SassList, ListSeparator} from './list';
|
||||
export {SassMap} from './map';
|
||||
export {SassMixin} from './mixin';
|
||||
export {SassNumber} from './number';
|
||||
export {SassString} from './string';
|
||||
|
||||
@@ -156,6 +178,14 @@ export abstract class Value implements ValueObject {
|
||||
*/
|
||||
assertMap(name?: string): SassMap;
|
||||
|
||||
/**
|
||||
* Throws if `this` isn't a {@link SassMixin}.
|
||||
*
|
||||
* @param name - The name of the function argument `this` came from (without
|
||||
* the `$`) if it came from an argument. Used for error reporting.
|
||||
*/
|
||||
assertMixin(name?: string): SassMixin;
|
||||
|
||||
/**
|
||||
* Throws if `this` isn't a {@link SassNumber}.
|
||||
*
|
||||
|
||||
14
node_modules/sass/types/value/mixin.d.ts
generated
vendored
Normal file
14
node_modules/sass/types/value/mixin.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import {Value} from './index';
|
||||
|
||||
/**
|
||||
* Sass's [mixin type](https://sass-lang.com/documentation/values/mixins).
|
||||
*
|
||||
* @category Custom Function
|
||||
*/
|
||||
export class SassMixin extends Value {
|
||||
/**
|
||||
* It is not possible to construct a Sass mixin outside of Sass. Attempting to
|
||||
* construct one will throw an exception.
|
||||
*/
|
||||
constructor();
|
||||
}
|
||||
Reference in New Issue
Block a user