config 2/3
This commit is contained in:
parent
40188150cf
commit
e752af5fc8
16
node_modules/.bin/pino
generated
vendored
16
node_modules/.bin/pino
generated
vendored
@ -1,16 +0,0 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../pino/bin.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../pino/bin.js" "$@"
|
||||
fi
|
||||
16
node_modules/.bin/pino-pretty
generated
vendored
16
node_modules/.bin/pino-pretty
generated
vendored
@ -1,16 +0,0 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../pino-pretty/bin.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../pino-pretty/bin.js" "$@"
|
||||
fi
|
||||
17
node_modules/.bin/pino-pretty.cmd
generated
vendored
17
node_modules/.bin/pino-pretty.cmd
generated
vendored
@ -1,17 +0,0 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\pino-pretty\bin.js" %*
|
||||
28
node_modules/.bin/pino-pretty.ps1
generated
vendored
28
node_modules/.bin/pino-pretty.ps1
generated
vendored
@ -1,28 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../pino-pretty/bin.js" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../pino-pretty/bin.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../pino-pretty/bin.js" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../pino-pretty/bin.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
17
node_modules/.bin/pino.cmd
generated
vendored
17
node_modules/.bin/pino.cmd
generated
vendored
@ -1,17 +0,0 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\pino\bin.js" %*
|
||||
28
node_modules/.bin/pino.ps1
generated
vendored
28
node_modules/.bin/pino.ps1
generated
vendored
@ -1,28 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../pino/bin.js" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../pino/bin.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../pino/bin.js" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../pino/bin.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
281
node_modules/.package-lock.json
generated
vendored
281
node_modules/.package-lock.json
generated
vendored
@ -1,281 +0,0 @@
|
||||
{
|
||||
"name": "polymech-astro",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@pinojs/redact": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@pinojs/redact/-/redact-0.4.0.tgz",
|
||||
"integrity": "sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/atomic-sleep": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz",
|
||||
"integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/colorette": {
|
||||
"version": "2.0.20",
|
||||
"resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
|
||||
"integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/dateformat": {
|
||||
"version": "4.6.3",
|
||||
"resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz",
|
||||
"integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/end-of-stream": {
|
||||
"version": "1.4.5",
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
|
||||
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-copy": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-4.0.2.tgz",
|
||||
"integrity": "sha512-ybA6PDXIXOXivLJK/z9e+Otk7ve13I4ckBvGO5I2RRmBU1gMHLVDJYEuJYhGwez7YNlYji2M2DvVU+a9mSFDlw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fast-safe-stringify": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz",
|
||||
"integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/help-me": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz",
|
||||
"integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/joycon": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz",
|
||||
"integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/minimist": {
|
||||
"version": "1.2.8",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
|
||||
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/on-exit-leak-free": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz",
|
||||
"integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"node_modules/pino": {
|
||||
"version": "10.1.0",
|
||||
"resolved": "https://registry.npmjs.org/pino/-/pino-10.1.0.tgz",
|
||||
"integrity": "sha512-0zZC2ygfdqvqK8zJIr1e+wT1T/L+LF6qvqvbzEQ6tiMAoTqEVK9a1K3YRu8HEUvGEvNqZyPJTtb2sNIoTkB83w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@pinojs/redact": "^0.4.0",
|
||||
"atomic-sleep": "^1.0.0",
|
||||
"on-exit-leak-free": "^2.1.0",
|
||||
"pino-abstract-transport": "^2.0.0",
|
||||
"pino-std-serializers": "^7.0.0",
|
||||
"process-warning": "^5.0.0",
|
||||
"quick-format-unescaped": "^4.0.3",
|
||||
"real-require": "^0.2.0",
|
||||
"safe-stable-stringify": "^2.3.1",
|
||||
"sonic-boom": "^4.0.1",
|
||||
"thread-stream": "^3.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"pino": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/pino-abstract-transport": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz",
|
||||
"integrity": "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"split2": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pino-pretty": {
|
||||
"version": "13.1.3",
|
||||
"resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.3.tgz",
|
||||
"integrity": "sha512-ttXRkkOz6WWC95KeY9+xxWL6AtImwbyMHrL1mSwqwW9u+vLp/WIElvHvCSDg0xO/Dzrggz1zv3rN5ovTRVowKg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"colorette": "^2.0.7",
|
||||
"dateformat": "^4.6.3",
|
||||
"fast-copy": "^4.0.0",
|
||||
"fast-safe-stringify": "^2.1.1",
|
||||
"help-me": "^5.0.0",
|
||||
"joycon": "^3.1.1",
|
||||
"minimist": "^1.2.6",
|
||||
"on-exit-leak-free": "^2.1.0",
|
||||
"pino-abstract-transport": "^3.0.0",
|
||||
"pump": "^3.0.0",
|
||||
"secure-json-parse": "^4.0.0",
|
||||
"sonic-boom": "^4.0.1",
|
||||
"strip-json-comments": "^5.0.2"
|
||||
},
|
||||
"bin": {
|
||||
"pino-pretty": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/pino-pretty/node_modules/pino-abstract-transport": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz",
|
||||
"integrity": "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"split2": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pino-std-serializers": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz",
|
||||
"integrity": "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/process-warning": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz",
|
||||
"integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pump": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz",
|
||||
"integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"end-of-stream": "^1.1.0",
|
||||
"once": "^1.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/quick-format-unescaped": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz",
|
||||
"integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/real-require": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz",
|
||||
"integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 12.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-stable-stringify": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz",
|
||||
"integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/secure-json-parse": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.1.0.tgz",
|
||||
"integrity": "sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/sonic-boom": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz",
|
||||
"integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"atomic-sleep": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/split2": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
|
||||
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">= 10.x"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-json-comments": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz",
|
||||
"integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14.16"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/thread-stream": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz",
|
||||
"integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"real-require": "^0.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
|
||||
"license": "ISC"
|
||||
}
|
||||
}
|
||||
}
|
||||
13
node_modules/@pinojs/redact/.github/dependabot.yml
generated
vendored
13
node_modules/@pinojs/redact/.github/dependabot.yml
generated
vendored
@ -1,13 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
48
node_modules/@pinojs/redact/.github/workflows/ci.yml
generated
vendored
48
node_modules/@pinojs/redact/.github/workflows/ci.yml
generated
vendored
@ -1,48 +0,0 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
|
||||
# This allows a subsequently queued workflow run to interrupt previous runs
|
||||
concurrency:
|
||||
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: ${{ matrix.node-version }} ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macOS-latest, windows-latest, ubuntu-latest]
|
||||
node-version: [18, 20, 22, 24]
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm i --ignore-scripts
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test
|
||||
43
node_modules/@pinojs/redact/.github/workflows/publish-release.yml
generated
vendored
43
node_modules/@pinojs/redact/.github/workflows/publish-release.yml
generated
vendored
@ -1,43 +0,0 @@
|
||||
name: Publish release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'The version number to tag and release'
|
||||
required: true
|
||||
type: string
|
||||
prerelease:
|
||||
description: 'Release as pre-release'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
release-npm:
|
||||
runs-on: ubuntu-latest
|
||||
environment: main
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '22'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm install npm -g
|
||||
- run: npm install
|
||||
- name: Change version number and sync
|
||||
run: |
|
||||
node scripts/sync-version.mjs ${{ inputs.version }}
|
||||
- name: GIT commit and push all changed files
|
||||
run: |
|
||||
git config --global user.name "mcollina"
|
||||
git config --global user.email "hello@matteocollina.com"
|
||||
git commit -n -a -m "Bumped v${{ inputs.version }}"
|
||||
git push origin HEAD:${{ github.ref }}
|
||||
- run: npm publish --access public --tag ${{ inputs.prerelease == true && 'next' || 'latest' }}
|
||||
- name: 'Create release notes'
|
||||
run: |
|
||||
npx @matteo.collina/release-notes -a ${{ secrets.GITHUB_TOKEN }} -t v${{ inputs.version }} -r redact -o pinojs ${{ github.event.inputs.prerelease == 'true' && '-p' || '' }} -c ${{ github.ref }}
|
||||
21
node_modules/@pinojs/redact/LICENSE
generated
vendored
21
node_modules/@pinojs/redact/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 pinojs contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
350
node_modules/@pinojs/redact/README.md
generated
vendored
350
node_modules/@pinojs/redact/README.md
generated
vendored
@ -1,350 +0,0 @@
|
||||
# @pinojs/redact
|
||||
|
||||
> Smart object redaction for JavaScript applications - safe AND fast!
|
||||
|
||||
Redact JS objects with the same API as [fast-redact](https://github.com/davidmarkclements/fast-redact), but uses innovative **selective cloning** instead of mutating the original. This provides immutability guarantees with **performance competitive** to fast-redact for real-world usage patterns.
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
npm install @pinojs/redact
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const slowRedact = require('@pinojs/redact')
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['headers.cookie', 'headers.authorization', 'user.password']
|
||||
})
|
||||
|
||||
const obj = {
|
||||
headers: {
|
||||
cookie: 'secret-session-token',
|
||||
authorization: 'Bearer abc123',
|
||||
'x-forwarded-for': '192.168.1.1'
|
||||
},
|
||||
user: {
|
||||
name: 'john',
|
||||
password: 'secret123'
|
||||
}
|
||||
}
|
||||
|
||||
console.log(redact(obj))
|
||||
// Output: {"headers":{"cookie":"[REDACTED]","authorization":"[REDACTED]","x-forwarded-for":"192.168.1.1"},"user":{"name":"john","password":"[REDACTED]"}}
|
||||
|
||||
// Original object is completely unchanged:
|
||||
console.log(obj.headers.cookie) // 'secret-session-token'
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### slowRedact(options) → Function
|
||||
|
||||
Creates a redaction function with the specified options.
|
||||
|
||||
#### Options
|
||||
|
||||
- **paths** `string[]` (required): An array of strings describing the nested location of a key in an object
|
||||
- **censor** `any` (optional, default: `'[REDACTED]'`): The value to replace sensitive data with. Can be a static value or function.
|
||||
- **serialize** `Function|boolean` (optional, default: `JSON.stringify`): Serialization function. Set to `false` to return the redacted object.
|
||||
- **remove** `boolean` (optional, default: `false`): Remove redacted keys from serialized output
|
||||
- **strict** `boolean` (optional, default: `true`): Throw on non-object values or pass through primitives
|
||||
|
||||
#### Path Syntax
|
||||
|
||||
Supports the same path syntax as fast-redact:
|
||||
|
||||
- **Dot notation**: `'user.name'`, `'headers.cookie'`
|
||||
- **Bracket notation**: `'user["password"]'`, `'headers["X-Forwarded-For"]'`
|
||||
- **Array indices**: `'users[0].password'`, `'items[1].secret'`
|
||||
- **Wildcards**:
|
||||
- Terminal: `'users.*.password'` (redacts password for all users)
|
||||
- Intermediate: `'*.password'` (redacts password at any level)
|
||||
- Array wildcard: `'items.*'` (redacts all array elements)
|
||||
|
||||
#### Examples
|
||||
|
||||
**Custom censor value:**
|
||||
```js
|
||||
const redact = slowRedact({
|
||||
paths: ['password'],
|
||||
censor: '***HIDDEN***'
|
||||
})
|
||||
```
|
||||
|
||||
**Dynamic censor function:**
|
||||
```js
|
||||
const redact = slowRedact({
|
||||
paths: ['password'],
|
||||
censor: (value, path) => `REDACTED:${path}`
|
||||
})
|
||||
```
|
||||
|
||||
**Return object instead of JSON string:**
|
||||
```js
|
||||
const redact = slowRedact({
|
||||
paths: ['secret'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact({ secret: 'hidden', public: 'data' })
|
||||
console.log(result.secret) // '[REDACTED]'
|
||||
console.log(result.public) // 'data'
|
||||
|
||||
// Restore original values
|
||||
const restored = result.restore()
|
||||
console.log(restored.secret) // 'hidden'
|
||||
```
|
||||
|
||||
**Custom serialization:**
|
||||
```js
|
||||
const redact = slowRedact({
|
||||
paths: ['password'],
|
||||
serialize: obj => JSON.stringify(obj, null, 2)
|
||||
})
|
||||
```
|
||||
|
||||
**Remove keys instead of redacting:**
|
||||
```js
|
||||
const redact = slowRedact({
|
||||
paths: ['password', 'user.secret'],
|
||||
remove: true
|
||||
})
|
||||
|
||||
const obj = { username: 'john', password: 'secret123', user: { name: 'Jane', secret: 'hidden' } }
|
||||
console.log(redact(obj))
|
||||
// Output: {"username":"john","user":{"name":"Jane"}}
|
||||
// Note: 'password' and 'user.secret' are completely absent, not redacted
|
||||
```
|
||||
|
||||
**Wildcard patterns:**
|
||||
```js
|
||||
// Redact all properties in secrets object
|
||||
const redact1 = slowRedact({ paths: ['secrets.*'] })
|
||||
|
||||
// Redact password for any user
|
||||
const redact2 = slowRedact({ paths: ['users.*.password'] })
|
||||
|
||||
// Redact all items in an array
|
||||
const redact3 = slowRedact({ paths: ['items.*'] })
|
||||
|
||||
// Remove all secrets instead of redacting them
|
||||
const redact4 = slowRedact({ paths: ['secrets.*'], remove: true })
|
||||
```
|
||||
|
||||
## Key Differences from fast-redact
|
||||
|
||||
### Safety First
|
||||
- **No mutation**: Original objects are never modified
|
||||
- **Selective cloning**: Only clones paths that need redaction, shares references for everything else
|
||||
- **Restore capability**: Can restore original values when `serialize: false`
|
||||
|
||||
### Feature Compatibility
|
||||
- **Remove option**: Full compatibility with fast-redact's `remove: true` option to completely omit keys from output
|
||||
- **All path patterns**: Supports same syntax including wildcards, bracket notation, and array indices
|
||||
- **Censor functions**: Dynamic censoring with path information passed as arrays
|
||||
- **Serialization**: Custom serializers and `serialize: false` mode
|
||||
|
||||
### Smart Performance Approach
|
||||
- **Selective cloning**: Analyzes redaction paths and only clones necessary object branches
|
||||
- **Reference sharing**: Non-redacted properties maintain original object references
|
||||
- **Memory efficiency**: Dramatically reduced memory usage for large objects with minimal redaction
|
||||
- **Setup-time optimization**: Path analysis happens once during setup, not per redaction
|
||||
|
||||
### When to Use @pinojs/redact
|
||||
- When immutability is critical
|
||||
- When you need to preserve original objects
|
||||
- When objects are shared across multiple contexts
|
||||
- In functional programming environments
|
||||
- When debugging and you need to compare before/after
|
||||
- **Large objects with selective redaction** (now performance-competitive!)
|
||||
- When memory efficiency with reference sharing is important
|
||||
|
||||
### When to Use fast-redact
|
||||
- When absolute maximum performance is critical
|
||||
- In extremely high-throughput scenarios (>100,000 ops/sec)
|
||||
- When you control the object lifecycle and mutation is acceptable
|
||||
- Very small objects where setup overhead matters
|
||||
|
||||
## Performance Benchmarks
|
||||
|
||||
@pinojs/redact uses **selective cloning** that provides good performance while maintaining immutability guarantees:
|
||||
|
||||
### Performance Results
|
||||
|
||||
| Operation Type | @pinojs/redact | fast-redact | Performance Ratio |
|
||||
|---------------|-------------|-------------|-------------------|
|
||||
| **Small objects** | ~690ns | ~200ns | ~3.5x slower |
|
||||
| **Large objects (minimal redaction)** | **~18μs** | ~17μs | **~same performance** |
|
||||
| **Large objects (wildcards)** | **~48μs** | ~37μs | **~1.3x slower** |
|
||||
| **No redaction (large objects)** | **~18μs** | ~17μs | **~same performance** |
|
||||
|
||||
### Performance Improvements
|
||||
|
||||
@pinojs/redact is performance-competitive with fast-redact for large objects.
|
||||
|
||||
1. **Selective cloning approach**: Only clones object paths that need redaction
|
||||
2. **Reference sharing**: Non-redacted properties share original object references
|
||||
3. **Setup-time optimization**: Path analysis happens once, not per redaction
|
||||
4. **Memory efficiency**: Dramatically reduced memory usage for typical use cases
|
||||
|
||||
### Benchmark Details
|
||||
|
||||
**Small Objects (~180 bytes)**:
|
||||
- @pinojs/redact: **690ns** per operation
|
||||
- fast-redact: **200ns** per operation
|
||||
- **Slight setup overhead for small objects**
|
||||
|
||||
**Large Objects (~18KB, minimal redaction)**:
|
||||
- @pinojs/redact: **18μs** per operation
|
||||
- fast-redact: **17μs** per operation
|
||||
- Near-identical performance
|
||||
|
||||
**Large Objects (~18KB, wildcard patterns)**:
|
||||
- @pinojs/redact: **48μs** per operation
|
||||
- fast-redact: **37μs** per operation
|
||||
- Competitive performance for complex patterns
|
||||
|
||||
**Memory Considerations**:
|
||||
- @pinojs/redact: **Selective reference sharing** (much lower memory usage than before)
|
||||
- fast-redact: Mutates in-place (lowest memory usage)
|
||||
- Large objects with few redacted paths now share most references
|
||||
|
||||
### When Performance Matters
|
||||
|
||||
Choose **fast-redact** when:
|
||||
- Absolute maximum performance is critical (>100,000 ops/sec)
|
||||
- Working with very small objects frequently
|
||||
- Mutation is acceptable and controlled
|
||||
- Every microsecond counts
|
||||
|
||||
Choose **@pinojs/redact** when:
|
||||
- Immutability is required (with competitive performance)
|
||||
- Objects are shared across contexts
|
||||
- Large objects with selective redaction
|
||||
- Memory efficiency through reference sharing is important
|
||||
- Safety and functionality are priorities
|
||||
- Most production applications (performance gap is minimal)
|
||||
|
||||
Run benchmarks yourself:
|
||||
```bash
|
||||
npm run bench
|
||||
```
|
||||
|
||||
## How Selective Cloning Works
|
||||
|
||||
@pinojs/redact uses an innovative **selective cloning** approach that provides immutability guarantees while dramatically improving performance:
|
||||
|
||||
### Traditional Approach (before optimization)
|
||||
```js
|
||||
// Old approach: Deep clone entire object, then redact
|
||||
const fullClone = deepClone(originalObject) // Clone everything
|
||||
redact(fullClone, paths) // Then redact specific paths
|
||||
```
|
||||
|
||||
### Selective Cloning Approach (current)
|
||||
```js
|
||||
// New approach: Analyze paths, clone only what's needed
|
||||
const pathStructure = buildPathStructure(paths) // One-time setup
|
||||
const selectiveClone = cloneOnlyNeededPaths(obj, pathStructure) // Smart cloning
|
||||
redact(selectiveClone, paths) // Redact pre-identified paths
|
||||
```
|
||||
|
||||
### Key Innovations
|
||||
|
||||
1. **Path Analysis**: Pre-processes redaction paths into an efficient tree structure
|
||||
2. **Selective Cloning**: Only creates new objects for branches that contain redaction targets
|
||||
3. **Reference Sharing**: Non-redacted properties maintain exact same object references
|
||||
4. **Setup Optimization**: Path parsing happens once during redactor creation, not per redaction
|
||||
|
||||
### Example: Reference Sharing in Action
|
||||
|
||||
```js
|
||||
const largeConfig = {
|
||||
database: { /* large config object */ },
|
||||
api: { /* another large config */ },
|
||||
secrets: { password: 'hidden', apiKey: 'secret' }
|
||||
}
|
||||
|
||||
const redact = slowRedact({ paths: ['secrets.password'] })
|
||||
const result = redact(largeConfig)
|
||||
|
||||
// Only secrets object is cloned, database and api share original references
|
||||
console.log(result.database === largeConfig.database) // true - shared reference!
|
||||
console.log(result.api === largeConfig.api) // true - shared reference!
|
||||
console.log(result.secrets === largeConfig.secrets) // false - cloned for redaction
|
||||
```
|
||||
|
||||
This approach provides **immutability where it matters** while **sharing references where it's safe**.
|
||||
|
||||
## Remove Option
|
||||
|
||||
The `remove: true` option provides full compatibility with fast-redact's key removal functionality:
|
||||
|
||||
```js
|
||||
const redact = slowRedact({
|
||||
paths: ['password', 'secrets.*', 'users.*.credentials'],
|
||||
remove: true
|
||||
})
|
||||
|
||||
const data = {
|
||||
username: 'john',
|
||||
password: 'secret123',
|
||||
secrets: { apiKey: 'abc', token: 'xyz' },
|
||||
users: [
|
||||
{ name: 'Alice', credentials: { password: 'pass1' } },
|
||||
{ name: 'Bob', credentials: { password: 'pass2' } }
|
||||
]
|
||||
}
|
||||
|
||||
console.log(redact(data))
|
||||
// Output: {"username":"john","secrets":{},"users":[{"name":"Alice"},{"name":"Bob"}]}
|
||||
```
|
||||
|
||||
### Remove vs Redact Behavior
|
||||
|
||||
| Option | Behavior | Output Example |
|
||||
|--------|----------|----------------|
|
||||
| Default (redact) | Replaces values with censor | `{"password":"[REDACTED]"}` |
|
||||
| `remove: true` | Completely omits keys | `{}` |
|
||||
|
||||
### Compatibility Notes
|
||||
|
||||
- **Same output as fast-redact**: Identical JSON output when using `remove: true`
|
||||
- **Wildcard support**: Works with all wildcard patterns (`*`, `users.*`, `items.*.secret`)
|
||||
- **Array handling**: Array items are set to `undefined` (omitted in JSON output)
|
||||
- **Nested paths**: Supports deep removal (`users.*.credentials.password`)
|
||||
- **Serialize compatibility**: Only works with `JSON.stringify` serializer (like fast-redact)
|
||||
|
||||
## Testing
|
||||
|
||||
```bash
|
||||
# Run unit tests
|
||||
npm test
|
||||
|
||||
# Run integration tests comparing with fast-redact
|
||||
npm run test:integration
|
||||
|
||||
# Run all tests (unit + integration)
|
||||
npm run test:all
|
||||
|
||||
# Run benchmarks
|
||||
npm run bench
|
||||
```
|
||||
|
||||
### Test Coverage
|
||||
|
||||
- **16 unit tests**: Core functionality and edge cases
|
||||
- **16 integration tests**: Output compatibility with fast-redact
|
||||
- **All major features**: Paths, wildcards, serialization, custom censors
|
||||
- **Performance benchmarks**: Direct comparison with fast-redact
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Contributing
|
||||
|
||||
Pull requests welcome! Please ensure all tests pass and add tests for new features.
|
||||
184
node_modules/@pinojs/redact/benchmarks/basic.js
generated
vendored
184
node_modules/@pinojs/redact/benchmarks/basic.js
generated
vendored
@ -1,184 +0,0 @@
|
||||
const { bench, group, run } = require('mitata')
|
||||
const slowRedact = require('../index.js')
|
||||
const fastRedact = require('fast-redact')
|
||||
|
||||
// Test objects
|
||||
const smallObj = {
|
||||
user: { name: 'john', password: 'secret123' },
|
||||
headers: { cookie: 'session-token', authorization: 'Bearer abc123' }
|
||||
}
|
||||
|
||||
const largeObj = {
|
||||
users: [],
|
||||
metadata: {
|
||||
version: '1.0.0',
|
||||
secret: 'app-secret-key',
|
||||
database: {
|
||||
host: 'localhost',
|
||||
password: 'db-password'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate users array with for loop instead of Array.from
|
||||
for (let i = 0; i < 100; i++) {
|
||||
largeObj.users.push({
|
||||
id: i,
|
||||
name: `user${i}`,
|
||||
email: `user${i}@example.com`,
|
||||
password: `secret${i}`,
|
||||
profile: {
|
||||
age: 20 + (i % 50),
|
||||
preferences: {
|
||||
theme: 'dark',
|
||||
notifications: true,
|
||||
apiKey: `key-${i}-secret`
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Redaction configurations
|
||||
const basicSlowRedact = slowRedact({
|
||||
paths: ['user.password', 'headers.cookie']
|
||||
})
|
||||
|
||||
const basicFastRedact = fastRedact({
|
||||
paths: ['user.password', 'headers.cookie']
|
||||
})
|
||||
|
||||
const wildcardSlowRedact = slowRedact({
|
||||
paths: ['users.*.password', 'users.*.profile.preferences.apiKey']
|
||||
})
|
||||
|
||||
const wildcardFastRedact = fastRedact({
|
||||
paths: ['users.*.password', 'users.*.profile.preferences.apiKey']
|
||||
})
|
||||
|
||||
const deepSlowRedact = slowRedact({
|
||||
paths: ['metadata.secret', 'metadata.database.password']
|
||||
})
|
||||
|
||||
const deepFastRedact = fastRedact({
|
||||
paths: ['metadata.secret', 'metadata.database.password']
|
||||
})
|
||||
|
||||
group('Small Object Redaction - @pinojs/redact', () => {
|
||||
bench('basic paths', () => {
|
||||
basicSlowRedact(smallObj)
|
||||
})
|
||||
|
||||
bench('serialize: false', () => {
|
||||
const redact = slowRedact({
|
||||
paths: ['user.password'],
|
||||
serialize: false
|
||||
})
|
||||
redact(smallObj)
|
||||
})
|
||||
|
||||
bench('custom censor function', () => {
|
||||
const redact = slowRedact({
|
||||
paths: ['user.password'],
|
||||
censor: (value, path) => `HIDDEN:${path}`
|
||||
})
|
||||
redact(smallObj)
|
||||
})
|
||||
})
|
||||
|
||||
group('Small Object Redaction - fast-redact', () => {
|
||||
bench('basic paths', () => {
|
||||
basicFastRedact(smallObj)
|
||||
})
|
||||
|
||||
bench('serialize: false', () => {
|
||||
const redact = fastRedact({
|
||||
paths: ['user.password'],
|
||||
serialize: false
|
||||
})
|
||||
redact(smallObj)
|
||||
})
|
||||
|
||||
bench('custom censor function', () => {
|
||||
const redact = fastRedact({
|
||||
paths: ['user.password'],
|
||||
censor: (value, path) => `HIDDEN:${path}`
|
||||
})
|
||||
redact(smallObj)
|
||||
})
|
||||
})
|
||||
|
||||
group('Large Object Redaction - @pinojs/redact', () => {
|
||||
bench('wildcard patterns', () => {
|
||||
wildcardSlowRedact(largeObj)
|
||||
})
|
||||
|
||||
bench('deep nested paths', () => {
|
||||
deepSlowRedact(largeObj)
|
||||
})
|
||||
|
||||
bench('multiple wildcards', () => {
|
||||
const redact = slowRedact({
|
||||
paths: ['users.*.password', 'users.*.profile.preferences.*']
|
||||
})
|
||||
redact(largeObj)
|
||||
})
|
||||
})
|
||||
|
||||
group('Large Object Redaction - fast-redact', () => {
|
||||
bench('wildcard patterns', () => {
|
||||
wildcardFastRedact(largeObj)
|
||||
})
|
||||
|
||||
bench('deep nested paths', () => {
|
||||
deepFastRedact(largeObj)
|
||||
})
|
||||
|
||||
bench('multiple wildcards', () => {
|
||||
const redact = fastRedact({
|
||||
paths: ['users.*.password', 'users.*.profile.preferences.*']
|
||||
})
|
||||
redact(largeObj)
|
||||
})
|
||||
})
|
||||
|
||||
group('Direct Performance Comparison', () => {
|
||||
bench('@pinojs/redact - basic paths', () => {
|
||||
basicSlowRedact(smallObj)
|
||||
})
|
||||
|
||||
bench('fast-redact - basic paths', () => {
|
||||
basicFastRedact(smallObj)
|
||||
})
|
||||
|
||||
bench('@pinojs/redact - wildcards', () => {
|
||||
wildcardSlowRedact(largeObj)
|
||||
})
|
||||
|
||||
bench('fast-redact - wildcards', () => {
|
||||
wildcardFastRedact(largeObj)
|
||||
})
|
||||
})
|
||||
|
||||
group('Object Cloning Overhead', () => {
|
||||
bench('@pinojs/redact - no redaction (clone only)', () => {
|
||||
const redact = slowRedact({ paths: [] })
|
||||
redact(smallObj)
|
||||
})
|
||||
|
||||
bench('fast-redact - no redaction', () => {
|
||||
const redact = fastRedact({ paths: [] })
|
||||
redact(smallObj)
|
||||
})
|
||||
|
||||
bench('@pinojs/redact - large object clone', () => {
|
||||
const redact = slowRedact({ paths: [] })
|
||||
redact(largeObj)
|
||||
})
|
||||
|
||||
bench('fast-redact - large object', () => {
|
||||
const redact = fastRedact({ paths: [] })
|
||||
redact(largeObj)
|
||||
})
|
||||
})
|
||||
|
||||
run()
|
||||
1
node_modules/@pinojs/redact/eslint.config.js
generated
vendored
1
node_modules/@pinojs/redact/eslint.config.js
generated
vendored
@ -1 +0,0 @@
|
||||
module.exports = require('neostandard')()
|
||||
52
node_modules/@pinojs/redact/index.d.ts
generated
vendored
52
node_modules/@pinojs/redact/index.d.ts
generated
vendored
@ -1,52 +0,0 @@
|
||||
export = F;
|
||||
|
||||
/**
|
||||
* When called without any options, or with a zero length paths array, @pinojs/redact will return JSON.stringify or the serialize option, if set.
|
||||
* @param redactOptions
|
||||
* @param redactOptions.paths An array of strings describing the nested location of a key in an object.
|
||||
* @param redactOptions.censor This is the value which overwrites redacted properties.
|
||||
* @param redactOptions.remove The remove option, when set to true will cause keys to be removed from the serialized output.
|
||||
* @param redactOptions.serialize The serialize option may either be a function or a boolean. If a function is supplied, this will be used to serialize the redacted object.
|
||||
* @param redactOptions.strict The strict option, when set to true, will cause the redactor function to throw if instead of an object it finds a primitive.
|
||||
* @returns Redacted value from input
|
||||
*/
|
||||
declare function F(
|
||||
redactOptions: F.RedactOptionsNoSerialize
|
||||
): F.redactFnNoSerialize;
|
||||
declare function F(redactOptions?: F.RedactOptions): F.redactFn;
|
||||
|
||||
declare namespace F {
|
||||
/** Redacts input */
|
||||
type redactFn = <T>(input: T) => string | T;
|
||||
|
||||
/** Redacts input without serialization */
|
||||
type redactFnNoSerialize = redactFn & {
|
||||
/** Method that allowing the redacted keys to be restored with the original data. Supplied only when serialize option set to false. */
|
||||
restore<T>(input: T): T;
|
||||
};
|
||||
|
||||
interface RedactOptions {
|
||||
/** An array of strings describing the nested location of a key in an object. */
|
||||
paths?: string[] | undefined;
|
||||
|
||||
/** This is the value which overwrites redacted properties. */
|
||||
censor?: string | ((v: any) => any) | undefined;
|
||||
|
||||
/** The remove option, when set to true will cause keys to be removed from the serialized output. */
|
||||
remove?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* The serialize option may either be a function or a boolean. If a function is supplied, this will be used to serialize the redacted object.
|
||||
* The default serialize is the function JSON.stringify
|
||||
*/
|
||||
serialize?: boolean | ((v: any) => any) | undefined;
|
||||
|
||||
/** The strict option, when set to true, will cause the redactor function to throw if instead of an object it finds a primitive. */
|
||||
strict?: boolean | undefined;
|
||||
}
|
||||
|
||||
/** RedactOptions without serialization. Instead of the serialized object, the output of the redactor function will be the mutated object itself. */
|
||||
interface RedactOptionsNoSerialize extends RedactOptions {
|
||||
serialize: false;
|
||||
}
|
||||
}
|
||||
529
node_modules/@pinojs/redact/index.js
generated
vendored
529
node_modules/@pinojs/redact/index.js
generated
vendored
@ -1,529 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
function deepClone (obj) {
|
||||
if (obj === null || typeof obj !== 'object') {
|
||||
return obj
|
||||
}
|
||||
|
||||
if (obj instanceof Date) {
|
||||
return new Date(obj.getTime())
|
||||
}
|
||||
|
||||
if (obj instanceof Array) {
|
||||
const cloned = []
|
||||
for (let i = 0; i < obj.length; i++) {
|
||||
cloned[i] = deepClone(obj[i])
|
||||
}
|
||||
return cloned
|
||||
}
|
||||
|
||||
if (typeof obj === 'object') {
|
||||
const cloned = Object.create(Object.getPrototypeOf(obj))
|
||||
for (const key in obj) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||||
cloned[key] = deepClone(obj[key])
|
||||
}
|
||||
}
|
||||
return cloned
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
function parsePath (path) {
|
||||
const parts = []
|
||||
let current = ''
|
||||
let inBrackets = false
|
||||
let inQuotes = false
|
||||
let quoteChar = ''
|
||||
|
||||
for (let i = 0; i < path.length; i++) {
|
||||
const char = path[i]
|
||||
|
||||
if (!inBrackets && char === '.') {
|
||||
if (current) {
|
||||
parts.push(current)
|
||||
current = ''
|
||||
}
|
||||
} else if (char === '[') {
|
||||
if (current) {
|
||||
parts.push(current)
|
||||
current = ''
|
||||
}
|
||||
inBrackets = true
|
||||
} else if (char === ']' && inBrackets) {
|
||||
// Always push the current value when closing brackets, even if it's an empty string
|
||||
parts.push(current)
|
||||
current = ''
|
||||
inBrackets = false
|
||||
inQuotes = false
|
||||
} else if ((char === '"' || char === "'") && inBrackets) {
|
||||
if (!inQuotes) {
|
||||
inQuotes = true
|
||||
quoteChar = char
|
||||
} else if (char === quoteChar) {
|
||||
inQuotes = false
|
||||
quoteChar = ''
|
||||
} else {
|
||||
current += char
|
||||
}
|
||||
} else {
|
||||
current += char
|
||||
}
|
||||
}
|
||||
|
||||
if (current) {
|
||||
parts.push(current)
|
||||
}
|
||||
|
||||
return parts
|
||||
}
|
||||
|
||||
function setValue (obj, parts, value) {
|
||||
let current = obj
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const key = parts[i]
|
||||
// Type safety: Check if current is an object before using 'in' operator
|
||||
if (typeof current !== 'object' || current === null || !(key in current)) {
|
||||
return false // Path doesn't exist, don't create it
|
||||
}
|
||||
if (typeof current[key] !== 'object' || current[key] === null) {
|
||||
return false // Path doesn't exist properly
|
||||
}
|
||||
current = current[key]
|
||||
}
|
||||
|
||||
const lastKey = parts[parts.length - 1]
|
||||
if (lastKey === '*') {
|
||||
if (Array.isArray(current)) {
|
||||
for (let i = 0; i < current.length; i++) {
|
||||
current[i] = value
|
||||
}
|
||||
} else if (typeof current === 'object' && current !== null) {
|
||||
for (const key in current) {
|
||||
if (Object.prototype.hasOwnProperty.call(current, key)) {
|
||||
current[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Type safety: Check if current is an object before using 'in' operator
|
||||
if (typeof current === 'object' && current !== null && lastKey in current && Object.prototype.hasOwnProperty.call(current, lastKey)) {
|
||||
current[lastKey] = value
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
function removeKey (obj, parts) {
|
||||
let current = obj
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const key = parts[i]
|
||||
// Type safety: Check if current is an object before using 'in' operator
|
||||
if (typeof current !== 'object' || current === null || !(key in current)) {
|
||||
return false // Path doesn't exist, don't create it
|
||||
}
|
||||
if (typeof current[key] !== 'object' || current[key] === null) {
|
||||
return false // Path doesn't exist properly
|
||||
}
|
||||
current = current[key]
|
||||
}
|
||||
|
||||
const lastKey = parts[parts.length - 1]
|
||||
if (lastKey === '*') {
|
||||
if (Array.isArray(current)) {
|
||||
// For arrays, we can't really "remove" all items as that would change indices
|
||||
// Instead, we set them to undefined which will be omitted by JSON.stringify
|
||||
for (let i = 0; i < current.length; i++) {
|
||||
current[i] = undefined
|
||||
}
|
||||
} else if (typeof current === 'object' && current !== null) {
|
||||
for (const key in current) {
|
||||
if (Object.prototype.hasOwnProperty.call(current, key)) {
|
||||
delete current[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Type safety: Check if current is an object before using 'in' operator
|
||||
if (typeof current === 'object' && current !== null && lastKey in current && Object.prototype.hasOwnProperty.call(current, lastKey)) {
|
||||
delete current[lastKey]
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Sentinel object to distinguish between undefined value and non-existent path
|
||||
const PATH_NOT_FOUND = Symbol('PATH_NOT_FOUND')
|
||||
|
||||
function getValueIfExists (obj, parts) {
|
||||
let current = obj
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined) {
|
||||
return PATH_NOT_FOUND
|
||||
}
|
||||
// Type safety: Check if current is an object before property access
|
||||
if (typeof current !== 'object' || current === null) {
|
||||
return PATH_NOT_FOUND
|
||||
}
|
||||
// Check if the property exists before accessing it
|
||||
if (!(part in current)) {
|
||||
return PATH_NOT_FOUND
|
||||
}
|
||||
current = current[part]
|
||||
}
|
||||
|
||||
return current
|
||||
}
|
||||
|
||||
function getValue (obj, parts) {
|
||||
let current = obj
|
||||
|
||||
for (const part of parts) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined
|
||||
}
|
||||
// Type safety: Check if current is an object before property access
|
||||
if (typeof current !== 'object' || current === null) {
|
||||
return undefined
|
||||
}
|
||||
current = current[part]
|
||||
}
|
||||
|
||||
return current
|
||||
}
|
||||
|
||||
function redactPaths (obj, paths, censor, remove = false) {
|
||||
for (const path of paths) {
|
||||
const parts = parsePath(path)
|
||||
|
||||
if (parts.includes('*')) {
|
||||
redactWildcardPath(obj, parts, censor, path, remove)
|
||||
} else {
|
||||
if (remove) {
|
||||
removeKey(obj, parts)
|
||||
} else {
|
||||
// Get value only if path exists - single traversal
|
||||
const value = getValueIfExists(obj, parts)
|
||||
if (value === PATH_NOT_FOUND) {
|
||||
continue
|
||||
}
|
||||
|
||||
const actualCensor = typeof censor === 'function'
|
||||
? censor(value, parts)
|
||||
: censor
|
||||
setValue(obj, parts, actualCensor)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function redactWildcardPath (obj, parts, censor, originalPath, remove = false) {
|
||||
const wildcardIndex = parts.indexOf('*')
|
||||
|
||||
if (wildcardIndex === parts.length - 1) {
|
||||
const parentParts = parts.slice(0, -1)
|
||||
let current = obj
|
||||
|
||||
for (const part of parentParts) {
|
||||
if (current === null || current === undefined) return
|
||||
// Type safety: Check if current is an object before property access
|
||||
if (typeof current !== 'object' || current === null) return
|
||||
current = current[part]
|
||||
}
|
||||
|
||||
if (Array.isArray(current)) {
|
||||
if (remove) {
|
||||
// For arrays, set all items to undefined which will be omitted by JSON.stringify
|
||||
for (let i = 0; i < current.length; i++) {
|
||||
current[i] = undefined
|
||||
}
|
||||
} else {
|
||||
for (let i = 0; i < current.length; i++) {
|
||||
const indexPath = [...parentParts, i.toString()]
|
||||
const actualCensor = typeof censor === 'function'
|
||||
? censor(current[i], indexPath)
|
||||
: censor
|
||||
current[i] = actualCensor
|
||||
}
|
||||
}
|
||||
} else if (typeof current === 'object' && current !== null) {
|
||||
if (remove) {
|
||||
// Collect keys to delete to avoid issues with deleting during iteration
|
||||
const keysToDelete = []
|
||||
for (const key in current) {
|
||||
if (Object.prototype.hasOwnProperty.call(current, key)) {
|
||||
keysToDelete.push(key)
|
||||
}
|
||||
}
|
||||
for (const key of keysToDelete) {
|
||||
delete current[key]
|
||||
}
|
||||
} else {
|
||||
for (const key in current) {
|
||||
const keyPath = [...parentParts, key]
|
||||
const actualCensor = typeof censor === 'function'
|
||||
? censor(current[key], keyPath)
|
||||
: censor
|
||||
current[key] = actualCensor
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
redactIntermediateWildcard(obj, parts, censor, wildcardIndex, originalPath, remove)
|
||||
}
|
||||
}
|
||||
|
||||
function redactIntermediateWildcard (obj, parts, censor, wildcardIndex, originalPath, remove = false) {
|
||||
const beforeWildcard = parts.slice(0, wildcardIndex)
|
||||
const afterWildcard = parts.slice(wildcardIndex + 1)
|
||||
const pathArray = [] // Cached array to avoid allocations
|
||||
|
||||
function traverse (current, pathLength) {
|
||||
if (pathLength === beforeWildcard.length) {
|
||||
if (Array.isArray(current)) {
|
||||
for (let i = 0; i < current.length; i++) {
|
||||
pathArray[pathLength] = i.toString()
|
||||
traverse(current[i], pathLength + 1)
|
||||
}
|
||||
} else if (typeof current === 'object' && current !== null) {
|
||||
for (const key in current) {
|
||||
pathArray[pathLength] = key
|
||||
traverse(current[key], pathLength + 1)
|
||||
}
|
||||
}
|
||||
} else if (pathLength < beforeWildcard.length) {
|
||||
const nextKey = beforeWildcard[pathLength]
|
||||
// Type safety: Check if current is an object before using 'in' operator
|
||||
if (current && typeof current === 'object' && current !== null && nextKey in current) {
|
||||
pathArray[pathLength] = nextKey
|
||||
traverse(current[nextKey], pathLength + 1)
|
||||
}
|
||||
} else {
|
||||
// Check if afterWildcard contains more wildcards
|
||||
if (afterWildcard.includes('*')) {
|
||||
// Recursively handle remaining wildcards
|
||||
// Wrap censor to prepend current path context
|
||||
const wrappedCensor = typeof censor === 'function'
|
||||
? (value, path) => {
|
||||
const fullPath = [...pathArray.slice(0, pathLength), ...path]
|
||||
return censor(value, fullPath)
|
||||
}
|
||||
: censor
|
||||
redactWildcardPath(current, afterWildcard, wrappedCensor, originalPath, remove)
|
||||
} else {
|
||||
// No more wildcards, apply the redaction directly
|
||||
if (remove) {
|
||||
removeKey(current, afterWildcard)
|
||||
} else {
|
||||
const actualCensor = typeof censor === 'function'
|
||||
? censor(getValue(current, afterWildcard), [...pathArray.slice(0, pathLength), ...afterWildcard])
|
||||
: censor
|
||||
setValue(current, afterWildcard, actualCensor)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (beforeWildcard.length === 0) {
|
||||
traverse(obj, 0)
|
||||
} else {
|
||||
let current = obj
|
||||
for (let i = 0; i < beforeWildcard.length; i++) {
|
||||
const part = beforeWildcard[i]
|
||||
if (current === null || current === undefined) return
|
||||
// Type safety: Check if current is an object before property access
|
||||
if (typeof current !== 'object' || current === null) return
|
||||
current = current[part]
|
||||
pathArray[i] = part
|
||||
}
|
||||
if (current !== null && current !== undefined) {
|
||||
traverse(current, beforeWildcard.length)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildPathStructure (pathsToClone) {
|
||||
if (pathsToClone.length === 0) {
|
||||
return null // No paths to redact
|
||||
}
|
||||
|
||||
// Parse all paths and organize by depth
|
||||
const pathStructure = new Map()
|
||||
for (const path of pathsToClone) {
|
||||
const parts = parsePath(path)
|
||||
let current = pathStructure
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const part = parts[i]
|
||||
if (!current.has(part)) {
|
||||
current.set(part, new Map())
|
||||
}
|
||||
current = current.get(part)
|
||||
}
|
||||
}
|
||||
return pathStructure
|
||||
}
|
||||
|
||||
function selectiveClone (obj, pathStructure) {
|
||||
if (!pathStructure) {
|
||||
return obj // No paths to redact, return original
|
||||
}
|
||||
|
||||
function cloneSelectively (source, pathMap, depth = 0) {
|
||||
if (!pathMap || pathMap.size === 0) {
|
||||
return source // No more paths to clone, return reference
|
||||
}
|
||||
|
||||
if (source === null || typeof source !== 'object') {
|
||||
return source
|
||||
}
|
||||
|
||||
if (source instanceof Date) {
|
||||
return new Date(source.getTime())
|
||||
}
|
||||
|
||||
if (Array.isArray(source)) {
|
||||
const cloned = []
|
||||
for (let i = 0; i < source.length; i++) {
|
||||
const indexStr = i.toString()
|
||||
if (pathMap.has(indexStr) || pathMap.has('*')) {
|
||||
cloned[i] = cloneSelectively(source[i], pathMap.get(indexStr) || pathMap.get('*'))
|
||||
} else {
|
||||
cloned[i] = source[i] // Share reference for non-redacted items
|
||||
}
|
||||
}
|
||||
return cloned
|
||||
}
|
||||
|
||||
// Handle objects
|
||||
const cloned = Object.create(Object.getPrototypeOf(source))
|
||||
for (const key in source) {
|
||||
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
||||
if (pathMap.has(key) || pathMap.has('*')) {
|
||||
cloned[key] = cloneSelectively(source[key], pathMap.get(key) || pathMap.get('*'))
|
||||
} else {
|
||||
cloned[key] = source[key] // Share reference for non-redacted properties
|
||||
}
|
||||
}
|
||||
}
|
||||
return cloned
|
||||
}
|
||||
|
||||
return cloneSelectively(obj, pathStructure)
|
||||
}
|
||||
|
||||
function validatePath (path) {
|
||||
if (typeof path !== 'string') {
|
||||
throw new Error('Paths must be (non-empty) strings')
|
||||
}
|
||||
|
||||
if (path === '') {
|
||||
throw new Error('Invalid redaction path ()')
|
||||
}
|
||||
|
||||
// Check for double dots
|
||||
if (path.includes('..')) {
|
||||
throw new Error(`Invalid redaction path (${path})`)
|
||||
}
|
||||
|
||||
// Check for comma-separated paths (invalid syntax)
|
||||
if (path.includes(',')) {
|
||||
throw new Error(`Invalid redaction path (${path})`)
|
||||
}
|
||||
|
||||
// Check for unmatched brackets
|
||||
let bracketCount = 0
|
||||
let inQuotes = false
|
||||
let quoteChar = ''
|
||||
|
||||
for (let i = 0; i < path.length; i++) {
|
||||
const char = path[i]
|
||||
|
||||
if ((char === '"' || char === "'") && bracketCount > 0) {
|
||||
if (!inQuotes) {
|
||||
inQuotes = true
|
||||
quoteChar = char
|
||||
} else if (char === quoteChar) {
|
||||
inQuotes = false
|
||||
quoteChar = ''
|
||||
}
|
||||
} else if (char === '[' && !inQuotes) {
|
||||
bracketCount++
|
||||
} else if (char === ']' && !inQuotes) {
|
||||
bracketCount--
|
||||
if (bracketCount < 0) {
|
||||
throw new Error(`Invalid redaction path (${path})`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bracketCount !== 0) {
|
||||
throw new Error(`Invalid redaction path (${path})`)
|
||||
}
|
||||
}
|
||||
|
||||
function validatePaths (paths) {
|
||||
if (!Array.isArray(paths)) {
|
||||
throw new TypeError('paths must be an array')
|
||||
}
|
||||
|
||||
for (const path of paths) {
|
||||
validatePath(path)
|
||||
}
|
||||
}
|
||||
|
||||
function slowRedact (options = {}) {
|
||||
const {
|
||||
paths = [],
|
||||
censor = '[REDACTED]',
|
||||
serialize = JSON.stringify,
|
||||
strict = true,
|
||||
remove = false
|
||||
} = options
|
||||
|
||||
// Validate paths upfront to match fast-redact behavior
|
||||
validatePaths(paths)
|
||||
|
||||
// Build path structure once during setup, not on every call
|
||||
const pathStructure = buildPathStructure(paths)
|
||||
|
||||
return function redact (obj) {
|
||||
if (strict && (obj === null || typeof obj !== 'object')) {
|
||||
if (obj === null || obj === undefined) {
|
||||
return serialize ? serialize(obj) : obj
|
||||
}
|
||||
if (typeof obj !== 'object') {
|
||||
return serialize ? serialize(obj) : obj
|
||||
}
|
||||
}
|
||||
|
||||
// Only clone paths that need redaction
|
||||
const cloned = selectiveClone(obj, pathStructure)
|
||||
const original = obj // Keep reference to original for restore
|
||||
|
||||
let actualCensor = censor
|
||||
if (typeof censor === 'function') {
|
||||
actualCensor = censor
|
||||
}
|
||||
|
||||
redactPaths(cloned, paths, actualCensor, remove)
|
||||
|
||||
if (serialize === false) {
|
||||
cloned.restore = function () {
|
||||
return deepClone(original) // Full clone only when restore is called
|
||||
}
|
||||
return cloned
|
||||
}
|
||||
|
||||
if (typeof serialize === 'function') {
|
||||
return serialize(cloned)
|
||||
}
|
||||
|
||||
return JSON.stringify(cloned)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = slowRedact
|
||||
22
node_modules/@pinojs/redact/index.test-d.ts
generated
vendored
22
node_modules/@pinojs/redact/index.test-d.ts
generated
vendored
@ -1,22 +0,0 @@
|
||||
import { expectType, expectAssignable } from "tsd";
|
||||
import slowRedact from ".";
|
||||
import type { redactFn, redactFnNoSerialize } from ".";
|
||||
|
||||
// should return redactFn
|
||||
expectType<redactFn>(slowRedact());
|
||||
expectType<redactFn>(slowRedact({ paths: [] }));
|
||||
expectType<redactFn>(slowRedact({ paths: ["some.path"] }));
|
||||
expectType<redactFn>(slowRedact({ paths: [], censor: "[REDACTED]" }));
|
||||
expectType<redactFn>(slowRedact({ paths: [], strict: true }));
|
||||
expectType<redactFn>(slowRedact({ paths: [], serialize: JSON.stringify }));
|
||||
expectType<redactFn>(slowRedact({ paths: [], serialize: true }));
|
||||
expectType<redactFnNoSerialize>(slowRedact({ paths: [], serialize: false }));
|
||||
expectType<redactFn>(slowRedact({ paths: [], remove: true }));
|
||||
|
||||
// should return string
|
||||
expectType<string>(slowRedact()(""));
|
||||
|
||||
// should return string or T
|
||||
expectAssignable<string | { someField: string }>(
|
||||
slowRedact()({ someField: "someValue" })
|
||||
);
|
||||
37
node_modules/@pinojs/redact/package.json
generated
vendored
37
node_modules/@pinojs/redact/package.json
generated
vendored
@ -1,37 +0,0 @@
|
||||
{
|
||||
"name": "@pinojs/redact",
|
||||
"version": "0.4.0",
|
||||
"description": "Redact JS objects",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"scripts": {
|
||||
"test": "node --test && npm run test:types",
|
||||
"test:integration": "node --test test/integration.test.js",
|
||||
"test:types": "tsd",
|
||||
"test:all": "node --test test/*.test.js",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"bench": "node benchmarks/basic.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/pinojs/redact.git"
|
||||
},
|
||||
"keywords": [
|
||||
"redact"
|
||||
],
|
||||
"author": "Matteo Collina <hello@matteocollina.com>",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/pinojs/redact/issues"
|
||||
},
|
||||
"homepage": "https://github.com/pinojs/redact#readme",
|
||||
"devDependencies": {
|
||||
"eslint": "^9.36.0",
|
||||
"fast-redact": "^3.5.0",
|
||||
"mitata": "^1.0.34",
|
||||
"neostandard": "^0.12.2",
|
||||
"tsd": "^0.33.0",
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
}
|
||||
20
node_modules/@pinojs/redact/scripts/sync-version.mjs
generated
vendored
20
node_modules/@pinojs/redact/scripts/sync-version.mjs
generated
vendored
@ -1,20 +0,0 @@
|
||||
import fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
|
||||
const packageJsonPath = path.resolve(import.meta.dirname, '../package.json')
|
||||
let { version } = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'))
|
||||
|
||||
let passedVersion = process.argv[2]
|
||||
|
||||
if (passedVersion) {
|
||||
passedVersion = passedVersion.trim().replace(/^v/, '')
|
||||
if (version !== passedVersion) {
|
||||
console.log(`Syncing version from ${version} to ${passedVersion}`)
|
||||
version = passedVersion
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'))
|
||||
packageJson.version = version
|
||||
fs.writeFileSync(path.resolve('./package.json'), JSON.stringify(packageJson, null, 2) + '\n', { encoding: 'utf-8' })
|
||||
}
|
||||
} else {
|
||||
throw new Error('Version argument is required')
|
||||
}
|
||||
211
node_modules/@pinojs/redact/test/actual-redact-comparison.test.js
generated
vendored
211
node_modules/@pinojs/redact/test/actual-redact-comparison.test.js
generated
vendored
@ -1,211 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
// Node.js test comparing @pinojs/redact vs fast-redact for multiple wildcard patterns
|
||||
// This test validates that @pinojs/redact correctly handles 3+ consecutive wildcards
|
||||
// matching the behavior of fast-redact
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { strict: assert } = require('node:assert')
|
||||
const fastRedact = require('fast-redact')
|
||||
const slowRedact = require('../index.js')
|
||||
|
||||
// Helper function to test redaction and track which values were censored
|
||||
function testRedactDirect (library, pattern, testData = {}) {
|
||||
const matches = []
|
||||
const redactor = library === '@pinojs/redact' ? slowRedact : fastRedact
|
||||
|
||||
try {
|
||||
const redact = redactor({
|
||||
paths: [pattern],
|
||||
censor: (value, path) => {
|
||||
if (
|
||||
value !== undefined &&
|
||||
value !== null &&
|
||||
typeof value === 'string' &&
|
||||
value.includes('secret')
|
||||
) {
|
||||
matches.push({
|
||||
value,
|
||||
path: path ? path.join('.') : 'unknown'
|
||||
})
|
||||
}
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
redact(JSON.parse(JSON.stringify(testData)))
|
||||
|
||||
return {
|
||||
library,
|
||||
pattern,
|
||||
matches,
|
||||
success: true,
|
||||
testData
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
library,
|
||||
pattern,
|
||||
matches: [],
|
||||
success: false,
|
||||
error: error.message,
|
||||
testData
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function testSlowRedactDirect (pattern, testData) {
|
||||
return testRedactDirect('@pinojs/redact', pattern, testData)
|
||||
}
|
||||
|
||||
function testFastRedactDirect (pattern, testData) {
|
||||
return testRedactDirect('fast-redact', pattern, testData)
|
||||
}
|
||||
|
||||
test('@pinojs/redact: *.password (2 levels)', () => {
|
||||
const result = testSlowRedactDirect('*.password', {
|
||||
simple: { password: 'secret-2-levels' }
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'secret-2-levels')
|
||||
})
|
||||
|
||||
test('@pinojs/redact: *.*.password (3 levels)', () => {
|
||||
const result = testSlowRedactDirect('*.*.password', {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } }
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'secret-3-levels')
|
||||
})
|
||||
|
||||
test('@pinojs/redact: *.*.*.password (4 levels)', () => {
|
||||
const result = testSlowRedactDirect('*.*.*.password', {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } }
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'secret-4-levels')
|
||||
})
|
||||
|
||||
test('@pinojs/redact: *.*.*.*.password (5 levels)', () => {
|
||||
const result = testSlowRedactDirect('*.*.*.*.password', {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } },
|
||||
config: {
|
||||
user: { auth: { settings: { password: 'secret-5-levels' } } }
|
||||
}
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'secret-5-levels')
|
||||
})
|
||||
|
||||
test('@pinojs/redact: *.*.*.*.*.password (6 levels)', () => {
|
||||
const result = testSlowRedactDirect('*.*.*.*.*.password', {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } },
|
||||
config: {
|
||||
user: { auth: { settings: { password: 'secret-5-levels' } } }
|
||||
},
|
||||
data: {
|
||||
reqConfig: {
|
||||
data: {
|
||||
credentials: {
|
||||
settings: {
|
||||
password: 'real-secret-6-levels'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'real-secret-6-levels')
|
||||
})
|
||||
|
||||
test('fast-redact: *.password (2 levels)', () => {
|
||||
const result = testFastRedactDirect('*.password', {
|
||||
simple: { password: 'secret-2-levels' }
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'secret-2-levels')
|
||||
})
|
||||
|
||||
test('fast-redact: *.*.password (3 levels)', () => {
|
||||
const result = testFastRedactDirect('*.*.password', {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } }
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'secret-3-levels')
|
||||
})
|
||||
|
||||
test('fast-redact: *.*.*.password (4 levels)', () => {
|
||||
const result = testFastRedactDirect('*.*.*.password', {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } }
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'secret-4-levels')
|
||||
})
|
||||
|
||||
test('fast-redact: *.*.*.*.password (5 levels)', () => {
|
||||
const result = testFastRedactDirect('*.*.*.*.password', {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } },
|
||||
config: {
|
||||
user: { auth: { settings: { password: 'secret-5-levels' } } }
|
||||
}
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'secret-5-levels')
|
||||
})
|
||||
|
||||
test('fast-redact: *.*.*.*.*.password (6 levels)', () => {
|
||||
const result = testFastRedactDirect('*.*.*.*.*.password', {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } },
|
||||
config: {
|
||||
user: { auth: { settings: { password: 'secret-5-levels' } } }
|
||||
},
|
||||
data: {
|
||||
reqConfig: {
|
||||
data: {
|
||||
credentials: {
|
||||
settings: {
|
||||
password: 'real-secret-6-levels'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
assert.strictEqual(result.success, true)
|
||||
assert.strictEqual(result.matches.length, 1)
|
||||
assert.strictEqual(result.matches[0].value, 'real-secret-6-levels')
|
||||
})
|
||||
824
node_modules/@pinojs/redact/test/index.test.js
generated
vendored
824
node_modules/@pinojs/redact/test/index.test.js
generated
vendored
@ -1,824 +0,0 @@
|
||||
const { test } = require('node:test')
|
||||
const { strict: assert } = require('node:assert')
|
||||
const slowRedact = require('../index.js')
|
||||
|
||||
test('basic path redaction', () => {
|
||||
const obj = {
|
||||
headers: {
|
||||
cookie: 'secret-cookie',
|
||||
authorization: 'Bearer token'
|
||||
},
|
||||
body: { message: 'hello' }
|
||||
}
|
||||
|
||||
const redact = slowRedact({ paths: ['headers.cookie'] })
|
||||
const result = redact(obj)
|
||||
|
||||
// Original object should remain unchanged
|
||||
assert.strictEqual(obj.headers.cookie, 'secret-cookie')
|
||||
|
||||
// Result should have redacted path
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.headers.cookie, '[REDACTED]')
|
||||
assert.strictEqual(parsed.headers.authorization, 'Bearer token')
|
||||
assert.strictEqual(parsed.body.message, 'hello')
|
||||
})
|
||||
|
||||
test('multiple paths redaction', () => {
|
||||
const obj = {
|
||||
user: { name: 'john', password: 'secret' },
|
||||
session: { token: 'abc123' }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['user.password', 'session.token']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
// Original unchanged
|
||||
assert.strictEqual(obj.user.password, 'secret')
|
||||
assert.strictEqual(obj.session.token, 'abc123')
|
||||
|
||||
// Result redacted
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.user.password, '[REDACTED]')
|
||||
assert.strictEqual(parsed.session.token, '[REDACTED]')
|
||||
assert.strictEqual(parsed.user.name, 'john')
|
||||
})
|
||||
|
||||
test('custom censor value', () => {
|
||||
const obj = { secret: 'hidden' }
|
||||
const redact = slowRedact({
|
||||
paths: ['secret'],
|
||||
censor: '***'
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.secret, '***')
|
||||
})
|
||||
|
||||
test('serialize: false returns object with restore method', () => {
|
||||
const obj = { secret: 'hidden' }
|
||||
const redact = slowRedact({
|
||||
paths: ['secret'],
|
||||
serialize: false
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
// Should be object, not string
|
||||
assert.strictEqual(typeof result, 'object')
|
||||
assert.strictEqual(result.secret, '[REDACTED]')
|
||||
|
||||
// Should have restore method
|
||||
assert.strictEqual(typeof result.restore, 'function')
|
||||
|
||||
const restored = result.restore()
|
||||
assert.strictEqual(restored.secret, 'hidden')
|
||||
})
|
||||
|
||||
test('bracket notation paths', () => {
|
||||
const obj = {
|
||||
'weird-key': { 'another-weird': 'secret' },
|
||||
normal: 'public'
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['["weird-key"]["another-weird"]']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed['weird-key']['another-weird'], '[REDACTED]')
|
||||
assert.strictEqual(parsed.normal, 'public')
|
||||
})
|
||||
|
||||
test('array paths', () => {
|
||||
const obj = {
|
||||
users: [
|
||||
{ name: 'john', password: 'secret1' },
|
||||
{ name: 'jane', password: 'secret2' }
|
||||
]
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['users[0].password', 'users[1].password']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.users[0].password, '[REDACTED]')
|
||||
assert.strictEqual(parsed.users[1].password, '[REDACTED]')
|
||||
assert.strictEqual(parsed.users[0].name, 'john')
|
||||
assert.strictEqual(parsed.users[1].name, 'jane')
|
||||
})
|
||||
|
||||
test('wildcard at end of path', () => {
|
||||
const obj = {
|
||||
secrets: {
|
||||
key1: 'secret1',
|
||||
key2: 'secret2'
|
||||
},
|
||||
public: 'data'
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['secrets.*']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.secrets.key1, '[REDACTED]')
|
||||
assert.strictEqual(parsed.secrets.key2, '[REDACTED]')
|
||||
assert.strictEqual(parsed.public, 'data')
|
||||
})
|
||||
|
||||
test('wildcard with arrays', () => {
|
||||
const obj = {
|
||||
items: ['secret1', 'secret2', 'secret3']
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['items.*']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.items[0], '[REDACTED]')
|
||||
assert.strictEqual(parsed.items[1], '[REDACTED]')
|
||||
assert.strictEqual(parsed.items[2], '[REDACTED]')
|
||||
})
|
||||
|
||||
test('intermediate wildcard', () => {
|
||||
const obj = {
|
||||
users: {
|
||||
user1: { password: 'secret1' },
|
||||
user2: { password: 'secret2' }
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['users.*.password']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.users.user1.password, '[REDACTED]')
|
||||
assert.strictEqual(parsed.users.user2.password, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('censor function', () => {
|
||||
const obj = { secret: 'hidden' }
|
||||
const redact = slowRedact({
|
||||
paths: ['secret'],
|
||||
censor: (value, path) => `REDACTED:${path.join('.')}`
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.secret, 'REDACTED:secret')
|
||||
})
|
||||
|
||||
test('custom serialize function', () => {
|
||||
const obj = { secret: 'hidden', public: 'data' }
|
||||
const redact = slowRedact({
|
||||
paths: ['secret'],
|
||||
serialize: (obj) => `custom:${JSON.stringify(obj)}`
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
assert(result.startsWith('custom:'))
|
||||
const parsed = JSON.parse(result.slice(7))
|
||||
assert.strictEqual(parsed.secret, '[REDACTED]')
|
||||
assert.strictEqual(parsed.public, 'data')
|
||||
})
|
||||
|
||||
test('nested paths', () => {
|
||||
const obj = {
|
||||
level1: {
|
||||
level2: {
|
||||
level3: {
|
||||
secret: 'hidden'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['level1.level2.level3.secret']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.level1.level2.level3.secret, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('non-existent paths are ignored', () => {
|
||||
const obj = { existing: 'value' }
|
||||
const redact = slowRedact({
|
||||
paths: ['nonexistent.path']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.existing, 'value')
|
||||
assert.strictEqual(parsed.nonexistent, undefined)
|
||||
})
|
||||
|
||||
test('null and undefined handling', () => {
|
||||
const obj = {
|
||||
nullValue: null,
|
||||
undefinedValue: undefined,
|
||||
nested: {
|
||||
nullValue: null
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['nullValue', 'nested.nullValue']
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.nullValue, '[REDACTED]')
|
||||
assert.strictEqual(parsed.nested.nullValue, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('original object remains unchanged', () => {
|
||||
const original = {
|
||||
secret: 'hidden',
|
||||
nested: { secret: 'hidden2' }
|
||||
}
|
||||
const copy = JSON.parse(JSON.stringify(original))
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['secret', 'nested.secret']
|
||||
})
|
||||
redact(original)
|
||||
|
||||
// Original should be completely unchanged
|
||||
assert.deepStrictEqual(original, copy)
|
||||
})
|
||||
|
||||
test('strict mode with primitives', () => {
|
||||
const redact = slowRedact({
|
||||
paths: ['test'],
|
||||
strict: true
|
||||
})
|
||||
|
||||
const stringResult = redact('primitive')
|
||||
assert.strictEqual(stringResult, '"primitive"')
|
||||
|
||||
const numberResult = redact(42)
|
||||
assert.strictEqual(numberResult, '42')
|
||||
})
|
||||
|
||||
// Path validation tests to match fast-redact behavior
|
||||
test('path validation - non-string paths should throw', () => {
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: [123] })
|
||||
}, {
|
||||
message: 'Paths must be (non-empty) strings'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: [null] })
|
||||
}, {
|
||||
message: 'Paths must be (non-empty) strings'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: [undefined] })
|
||||
}, {
|
||||
message: 'Paths must be (non-empty) strings'
|
||||
})
|
||||
})
|
||||
|
||||
test('path validation - empty string should throw', () => {
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: [''] })
|
||||
}, {
|
||||
message: 'Invalid redaction path ()'
|
||||
})
|
||||
})
|
||||
|
||||
test('path validation - double dots should throw', () => {
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['invalid..path'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (invalid..path)'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['a..b..c'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (a..b..c)'
|
||||
})
|
||||
})
|
||||
|
||||
test('path validation - unmatched brackets should throw', () => {
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['invalid[unclosed'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (invalid[unclosed)'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['invalid]unopened'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (invalid]unopened)'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['nested[a[b]'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (nested[a[b])'
|
||||
})
|
||||
})
|
||||
|
||||
test('path validation - comma-separated paths should throw', () => {
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['req,headers.cookie'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (req,headers.cookie)'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['user,profile,name'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (user,profile,name)'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['a,b'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (a,b)'
|
||||
})
|
||||
})
|
||||
|
||||
test('path validation - mixed valid and invalid should throw', () => {
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['valid.path', 123, 'another.valid'] })
|
||||
}, {
|
||||
message: 'Paths must be (non-empty) strings'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['valid.path', 'invalid..path'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (invalid..path)'
|
||||
})
|
||||
|
||||
assert.throws(() => {
|
||||
slowRedact({ paths: ['valid.path', 'req,headers.cookie'] })
|
||||
}, {
|
||||
message: 'Invalid redaction path (req,headers.cookie)'
|
||||
})
|
||||
})
|
||||
|
||||
test('path validation - valid paths should work', () => {
|
||||
// These should not throw
|
||||
assert.doesNotThrow(() => {
|
||||
slowRedact({ paths: [] })
|
||||
})
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
slowRedact({ paths: ['valid.path'] })
|
||||
})
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
slowRedact({ paths: ['user.password', 'data[0].secret'] })
|
||||
})
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
slowRedact({ paths: ['["quoted-key"].value'] })
|
||||
})
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
slowRedact({ paths: ["['single-quoted'].value"] })
|
||||
})
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
slowRedact({ paths: ['array[0]', 'object.property', 'wildcard.*'] })
|
||||
})
|
||||
})
|
||||
|
||||
// fast-redact compatibility tests
|
||||
test('censor function receives path as array (fast-redact compatibility)', () => {
|
||||
const obj = {
|
||||
headers: {
|
||||
authorization: 'Bearer token',
|
||||
'x-api-key': 'secret-key'
|
||||
}
|
||||
}
|
||||
|
||||
const pathsReceived = []
|
||||
const redact = slowRedact({
|
||||
paths: ['headers.authorization', 'headers["x-api-key"]'],
|
||||
censor: (value, path) => {
|
||||
pathsReceived.push(path)
|
||||
assert(Array.isArray(path), 'Path should be an array')
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
redact(obj)
|
||||
|
||||
// Verify paths are arrays
|
||||
assert.strictEqual(pathsReceived.length, 2)
|
||||
assert.deepStrictEqual(pathsReceived[0], ['headers', 'authorization'])
|
||||
assert.deepStrictEqual(pathsReceived[1], ['headers', 'x-api-key'])
|
||||
})
|
||||
|
||||
test('censor function with nested paths receives correct array', () => {
|
||||
const obj = {
|
||||
user: {
|
||||
profile: {
|
||||
credentials: {
|
||||
password: 'secret123'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let receivedPath
|
||||
const redact = slowRedact({
|
||||
paths: ['user.profile.credentials.password'],
|
||||
censor: (value, path) => {
|
||||
receivedPath = path
|
||||
assert.strictEqual(value, 'secret123')
|
||||
assert(Array.isArray(path))
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
redact(obj)
|
||||
|
||||
assert.deepStrictEqual(receivedPath, ['user', 'profile', 'credentials', 'password'])
|
||||
})
|
||||
|
||||
test('censor function with wildcards receives correct array paths', () => {
|
||||
const obj = {
|
||||
users: {
|
||||
user1: { password: 'secret1' },
|
||||
user2: { password: 'secret2' }
|
||||
}
|
||||
}
|
||||
|
||||
const pathsReceived = []
|
||||
const redact = slowRedact({
|
||||
paths: ['users.*.password'],
|
||||
censor: (value, path) => {
|
||||
pathsReceived.push([...path]) // copy the array
|
||||
assert(Array.isArray(path))
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
redact(obj)
|
||||
|
||||
assert.strictEqual(pathsReceived.length, 2)
|
||||
assert.deepStrictEqual(pathsReceived[0], ['users', 'user1', 'password'])
|
||||
assert.deepStrictEqual(pathsReceived[1], ['users', 'user2', 'password'])
|
||||
})
|
||||
|
||||
test('censor function with array wildcard receives correct array paths', () => {
|
||||
const obj = {
|
||||
items: [
|
||||
{ secret: 'value1' },
|
||||
{ secret: 'value2' }
|
||||
]
|
||||
}
|
||||
|
||||
const pathsReceived = []
|
||||
const redact = slowRedact({
|
||||
paths: ['items.*.secret'],
|
||||
censor: (value, path) => {
|
||||
pathsReceived.push([...path])
|
||||
assert(Array.isArray(path))
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
redact(obj)
|
||||
|
||||
assert.strictEqual(pathsReceived.length, 2)
|
||||
assert.deepStrictEqual(pathsReceived[0], ['items', '0', 'secret'])
|
||||
assert.deepStrictEqual(pathsReceived[1], ['items', '1', 'secret'])
|
||||
})
|
||||
|
||||
test('censor function with end wildcard receives correct array paths', () => {
|
||||
const obj = {
|
||||
secrets: {
|
||||
key1: 'secret1',
|
||||
key2: 'secret2'
|
||||
}
|
||||
}
|
||||
|
||||
const pathsReceived = []
|
||||
const redact = slowRedact({
|
||||
paths: ['secrets.*'],
|
||||
censor: (value, path) => {
|
||||
pathsReceived.push([...path])
|
||||
assert(Array.isArray(path))
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
redact(obj)
|
||||
|
||||
assert.strictEqual(pathsReceived.length, 2)
|
||||
// Sort paths for consistent testing since object iteration order isn't guaranteed
|
||||
pathsReceived.sort((a, b) => a[1].localeCompare(b[1]))
|
||||
assert.deepStrictEqual(pathsReceived[0], ['secrets', 'key1'])
|
||||
assert.deepStrictEqual(pathsReceived[1], ['secrets', 'key2'])
|
||||
})
|
||||
|
||||
test('type safety: accessing properties on primitive values should not throw', () => {
|
||||
// Test case from GitHub issue #5
|
||||
const redactor = slowRedact({ paths: ['headers.authorization'] })
|
||||
const data = {
|
||||
headers: 123 // primitive value
|
||||
}
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
const result = redactor(data)
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.headers, 123) // Should remain unchanged
|
||||
})
|
||||
|
||||
// Test wildcards with primitives
|
||||
const redactor2 = slowRedact({ paths: ['data.*.nested'] })
|
||||
const data2 = {
|
||||
data: {
|
||||
item1: 123, // primitive, trying to access .nested on it
|
||||
item2: { nested: 'secret' }
|
||||
}
|
||||
}
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
const result2 = redactor2(data2)
|
||||
const parsed2 = JSON.parse(result2)
|
||||
assert.strictEqual(parsed2.data.item1, 123) // Primitive unchanged
|
||||
assert.strictEqual(parsed2.data.item2.nested, '[REDACTED]') // Object property redacted
|
||||
})
|
||||
|
||||
// Test deep nested access on primitives
|
||||
const redactor3 = slowRedact({ paths: ['user.name.first.charAt'] })
|
||||
const data3 = {
|
||||
user: {
|
||||
name: 'John' // string primitive
|
||||
}
|
||||
}
|
||||
|
||||
assert.doesNotThrow(() => {
|
||||
const result3 = redactor3(data3)
|
||||
const parsed3 = JSON.parse(result3)
|
||||
assert.strictEqual(parsed3.user.name, 'John') // Should remain unchanged
|
||||
})
|
||||
})
|
||||
|
||||
// Remove option tests
|
||||
test('remove option: basic key removal', () => {
|
||||
const obj = { username: 'john', password: 'secret123' }
|
||||
const redact = slowRedact({ paths: ['password'], remove: true })
|
||||
const result = redact(obj)
|
||||
|
||||
// Original object should remain unchanged
|
||||
assert.strictEqual(obj.password, 'secret123')
|
||||
|
||||
// Result should have password completely removed
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.username, 'john')
|
||||
assert.strictEqual('password' in parsed, false)
|
||||
assert.strictEqual(parsed.password, undefined)
|
||||
})
|
||||
|
||||
test('remove option: multiple paths removal', () => {
|
||||
const obj = {
|
||||
user: { name: 'john', password: 'secret' },
|
||||
session: { token: 'abc123', id: 'session1' }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['user.password', 'session.token'],
|
||||
remove: true
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
// Original unchanged
|
||||
assert.strictEqual(obj.user.password, 'secret')
|
||||
assert.strictEqual(obj.session.token, 'abc123')
|
||||
|
||||
// Result has keys completely removed
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.user.name, 'john')
|
||||
assert.strictEqual(parsed.session.id, 'session1')
|
||||
assert.strictEqual('password' in parsed.user, false)
|
||||
assert.strictEqual('token' in parsed.session, false)
|
||||
})
|
||||
|
||||
test('remove option: wildcard removal', () => {
|
||||
const obj = {
|
||||
secrets: {
|
||||
key1: 'secret1',
|
||||
key2: 'secret2'
|
||||
},
|
||||
public: 'data'
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['secrets.*'],
|
||||
remove: true
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.public, 'data')
|
||||
assert.deepStrictEqual(parsed.secrets, {}) // All keys removed
|
||||
})
|
||||
|
||||
test('remove option: array wildcard removal', () => {
|
||||
const obj = {
|
||||
items: ['secret1', 'secret2', 'secret3'],
|
||||
meta: 'data'
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['items.*'],
|
||||
remove: true
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.meta, 'data')
|
||||
// Array items set to undefined are omitted by JSON.stringify
|
||||
assert.deepStrictEqual(parsed.items, [null, null, null])
|
||||
})
|
||||
|
||||
test('remove option: intermediate wildcard removal', () => {
|
||||
const obj = {
|
||||
users: {
|
||||
user1: { password: 'secret1', name: 'john' },
|
||||
user2: { password: 'secret2', name: 'jane' }
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['users.*.password'],
|
||||
remove: true
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.users.user1.name, 'john')
|
||||
assert.strictEqual(parsed.users.user2.name, 'jane')
|
||||
assert.strictEqual('password' in parsed.users.user1, false)
|
||||
assert.strictEqual('password' in parsed.users.user2, false)
|
||||
})
|
||||
|
||||
test('remove option: serialize false returns object with removed keys', () => {
|
||||
const obj = { secret: 'hidden', public: 'data' }
|
||||
const redact = slowRedact({
|
||||
paths: ['secret'],
|
||||
remove: true,
|
||||
serialize: false
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
// Should be object, not string
|
||||
assert.strictEqual(typeof result, 'object')
|
||||
assert.strictEqual(result.public, 'data')
|
||||
assert.strictEqual('secret' in result, false)
|
||||
|
||||
// Should have restore method
|
||||
assert.strictEqual(typeof result.restore, 'function')
|
||||
|
||||
const restored = result.restore()
|
||||
assert.strictEqual(restored.secret, 'hidden')
|
||||
})
|
||||
|
||||
test('remove option: non-existent paths are ignored', () => {
|
||||
const obj = { existing: 'value' }
|
||||
const redact = slowRedact({
|
||||
paths: ['nonexistent.path'],
|
||||
remove: true
|
||||
})
|
||||
const result = redact(obj)
|
||||
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed.existing, 'value')
|
||||
assert.strictEqual(parsed.nonexistent, undefined)
|
||||
})
|
||||
|
||||
// Test for Issue #13: Empty string bracket notation paths not being redacted correctly
|
||||
test('empty string bracket notation path', () => {
|
||||
const obj = { '': { c: 'sensitive-data' } }
|
||||
const redact = slowRedact({ paths: ["[''].c"] })
|
||||
const result = redact(obj)
|
||||
|
||||
// Original object should remain unchanged
|
||||
assert.strictEqual(obj[''].c, 'sensitive-data')
|
||||
|
||||
// Result should have redacted path
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed[''].c, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('empty string bracket notation with double quotes', () => {
|
||||
const obj = { '': { c: 'sensitive-data' } }
|
||||
const redact = slowRedact({ paths: ['[""].c'] })
|
||||
const result = redact(obj)
|
||||
|
||||
// Original object should remain unchanged
|
||||
assert.strictEqual(obj[''].c, 'sensitive-data')
|
||||
|
||||
// Result should have redacted path
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed[''].c, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('empty string key with nested bracket notation', () => {
|
||||
const obj = { '': { '': { secret: 'value' } } }
|
||||
const redact = slowRedact({ paths: ["[''][''].secret"] })
|
||||
const result = redact(obj)
|
||||
|
||||
// Original object should remain unchanged
|
||||
assert.strictEqual(obj[''][''].secret, 'value')
|
||||
|
||||
// Result should have redacted path
|
||||
const parsed = JSON.parse(result)
|
||||
assert.strictEqual(parsed[''][''].secret, '[REDACTED]')
|
||||
})
|
||||
|
||||
// Test for Pino issue #2313: censor should only be called when path exists
|
||||
test('censor function not called for non-existent paths', () => {
|
||||
let censorCallCount = 0
|
||||
const censorCalls = []
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['a.b.c', 'req.authorization', 'url'],
|
||||
serialize: false,
|
||||
censor (value, path) {
|
||||
censorCallCount++
|
||||
censorCalls.push({ value, path: path.slice() })
|
||||
return '***'
|
||||
}
|
||||
})
|
||||
|
||||
// Test case 1: { req: { id: 'test' } }
|
||||
// req.authorization doesn't exist, censor should not be called for it
|
||||
censorCallCount = 0
|
||||
censorCalls.length = 0
|
||||
redact({ req: { id: 'test' } })
|
||||
|
||||
// Should not have been called for any path since none exist
|
||||
assert.strictEqual(censorCallCount, 0, 'censor should not be called when paths do not exist')
|
||||
|
||||
// Test case 2: { a: { d: 'test' } }
|
||||
// a.b.c doesn't exist (a.d exists, but not a.b.c)
|
||||
censorCallCount = 0
|
||||
redact({ a: { d: 'test' } })
|
||||
assert.strictEqual(censorCallCount, 0)
|
||||
|
||||
// Test case 3: paths that do exist should still call censor
|
||||
censorCallCount = 0
|
||||
censorCalls.length = 0
|
||||
const result = redact({ req: { authorization: 'bearer token' } })
|
||||
assert.strictEqual(censorCallCount, 1, 'censor should be called when path exists')
|
||||
assert.deepStrictEqual(censorCalls[0].path, ['req', 'authorization'])
|
||||
assert.strictEqual(censorCalls[0].value, 'bearer token')
|
||||
assert.strictEqual(result.req.authorization, '***')
|
||||
})
|
||||
|
||||
test('censor function not called for non-existent nested paths', () => {
|
||||
let censorCallCount = 0
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['headers.authorization'],
|
||||
serialize: false,
|
||||
censor (value, path) {
|
||||
censorCallCount++
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
// headers exists but authorization doesn't
|
||||
censorCallCount = 0
|
||||
const result1 = redact({ headers: { 'content-type': 'application/json' } })
|
||||
assert.strictEqual(censorCallCount, 0)
|
||||
assert.deepStrictEqual(result1.headers, { 'content-type': 'application/json' })
|
||||
|
||||
// headers doesn't exist at all
|
||||
censorCallCount = 0
|
||||
const result2 = redact({ body: 'data' })
|
||||
assert.strictEqual(censorCallCount, 0)
|
||||
assert.strictEqual(result2.body, 'data')
|
||||
assert.strictEqual(typeof result2.restore, 'function')
|
||||
|
||||
// headers.authorization exists - should call censor
|
||||
censorCallCount = 0
|
||||
const result3 = redact({ headers: { authorization: 'Bearer token' } })
|
||||
assert.strictEqual(censorCallCount, 1)
|
||||
assert.strictEqual(result3.headers.authorization, '[REDACTED]')
|
||||
})
|
||||
390
node_modules/@pinojs/redact/test/integration.test.js
generated
vendored
390
node_modules/@pinojs/redact/test/integration.test.js
generated
vendored
@ -1,390 +0,0 @@
|
||||
const { test } = require('node:test')
|
||||
const { strict: assert } = require('node:assert')
|
||||
const slowRedact = require('../index.js')
|
||||
const fastRedact = require('fast-redact')
|
||||
|
||||
test('integration: basic path redaction matches fast-redact', () => {
|
||||
const obj = {
|
||||
headers: {
|
||||
cookie: 'secret-cookie',
|
||||
authorization: 'Bearer token'
|
||||
},
|
||||
body: { message: 'hello' }
|
||||
}
|
||||
|
||||
const slowResult = slowRedact({ paths: ['headers.cookie'] })(obj)
|
||||
const fastResult = fastRedact({ paths: ['headers.cookie'] })(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: multiple paths match fast-redact', () => {
|
||||
const obj = {
|
||||
user: { name: 'john', password: 'secret' },
|
||||
session: { token: 'abc123' }
|
||||
}
|
||||
|
||||
const paths = ['user.password', 'session.token']
|
||||
const slowResult = slowRedact({ paths })(obj)
|
||||
const fastResult = fastRedact({ paths })(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: custom censor value matches fast-redact', () => {
|
||||
const obj = { secret: 'hidden' }
|
||||
const options = { paths: ['secret'], censor: '***' }
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: bracket notation matches fast-redact', () => {
|
||||
const obj = {
|
||||
'weird-key': { 'another-weird': 'secret' },
|
||||
normal: 'public'
|
||||
}
|
||||
|
||||
const options = { paths: ['["weird-key"]["another-weird"]'] }
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: array paths match fast-redact', () => {
|
||||
const obj = {
|
||||
users: [
|
||||
{ name: 'john', password: 'secret1' },
|
||||
{ name: 'jane', password: 'secret2' }
|
||||
]
|
||||
}
|
||||
|
||||
const options = { paths: ['users[0].password', 'users[1].password'] }
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: wildcard at end matches fast-redact', () => {
|
||||
const obj = {
|
||||
secrets: {
|
||||
key1: 'secret1',
|
||||
key2: 'secret2'
|
||||
},
|
||||
public: 'data'
|
||||
}
|
||||
|
||||
const options = { paths: ['secrets.*'] }
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: wildcard with arrays matches fast-redact', () => {
|
||||
const obj = {
|
||||
items: ['secret1', 'secret2', 'secret3']
|
||||
}
|
||||
|
||||
const options = { paths: ['items.*'] }
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: intermediate wildcard matches fast-redact', () => {
|
||||
const obj = {
|
||||
users: {
|
||||
user1: { password: 'secret1' },
|
||||
user2: { password: 'secret2' }
|
||||
}
|
||||
}
|
||||
|
||||
const options = { paths: ['users.*.password'] }
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: custom serialize function matches fast-redact', () => {
|
||||
const obj = { secret: 'hidden', public: 'data' }
|
||||
const options = {
|
||||
paths: ['secret'],
|
||||
serialize: (obj) => `custom:${JSON.stringify(obj)}`
|
||||
}
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: nested paths match fast-redact', () => {
|
||||
const obj = {
|
||||
level1: {
|
||||
level2: {
|
||||
level3: {
|
||||
secret: 'hidden'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const options = { paths: ['level1.level2.level3.secret'] }
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: non-existent paths match fast-redact', () => {
|
||||
const obj = { existing: 'value' }
|
||||
const options = { paths: ['nonexistent.path'] }
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: null and undefined handling - legitimate difference', () => {
|
||||
const obj = {
|
||||
nullValue: null,
|
||||
undefinedValue: undefined,
|
||||
nested: {
|
||||
nullValue: null
|
||||
}
|
||||
}
|
||||
|
||||
const options = { paths: ['nullValue', 'nested.nullValue'] }
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
// This is a legitimate behavioral difference:
|
||||
// @pinojs/redact redacts null values, fast-redact doesn't
|
||||
const slowParsed = JSON.parse(slowResult)
|
||||
const fastParsed = JSON.parse(fastResult)
|
||||
|
||||
// @pinojs/redact redacts nulls
|
||||
assert.strictEqual(slowParsed.nullValue, '[REDACTED]')
|
||||
assert.strictEqual(slowParsed.nested.nullValue, '[REDACTED]')
|
||||
|
||||
// fast-redact preserves nulls
|
||||
assert.strictEqual(fastParsed.nullValue, null)
|
||||
assert.strictEqual(fastParsed.nested.nullValue, null)
|
||||
})
|
||||
|
||||
test('integration: strict mode with primitives - different error handling', () => {
|
||||
const options = { paths: ['test'], strict: true }
|
||||
|
||||
const slowRedactFn = slowRedact(options)
|
||||
const fastRedactFn = fastRedact(options)
|
||||
|
||||
// @pinojs/redact handles primitives gracefully
|
||||
const stringSlowResult = slowRedactFn('primitive')
|
||||
assert.strictEqual(stringSlowResult, '"primitive"')
|
||||
|
||||
const numberSlowResult = slowRedactFn(42)
|
||||
assert.strictEqual(numberSlowResult, '42')
|
||||
|
||||
// fast-redact throws an error for primitives in strict mode
|
||||
assert.throws(() => {
|
||||
fastRedactFn('primitive')
|
||||
}, /primitives cannot be redacted/)
|
||||
|
||||
assert.throws(() => {
|
||||
fastRedactFn(42)
|
||||
}, /primitives cannot be redacted/)
|
||||
})
|
||||
|
||||
test('integration: serialize false behavior difference', () => {
|
||||
const slowObj = { secret: 'hidden' }
|
||||
const fastObj = { secret: 'hidden' }
|
||||
const options = { paths: ['secret'], serialize: false }
|
||||
|
||||
const slowResult = slowRedact(options)(slowObj)
|
||||
const fastResult = fastRedact(options)(fastObj)
|
||||
|
||||
// Both should redact the secret
|
||||
assert.strictEqual(slowResult.secret, '[REDACTED]')
|
||||
assert.strictEqual(fastResult.secret, '[REDACTED]')
|
||||
|
||||
// @pinojs/redact always has restore method
|
||||
assert.strictEqual(typeof slowResult.restore, 'function')
|
||||
|
||||
// @pinojs/redact should restore to original value
|
||||
assert.strictEqual(slowResult.restore().secret, 'hidden')
|
||||
|
||||
// Key difference: original object state
|
||||
// fast-redact mutates the original, @pinojs/redact doesn't
|
||||
assert.strictEqual(slowObj.secret, 'hidden') // @pinojs/redact preserves original
|
||||
assert.strictEqual(fastObj.secret, '[REDACTED]') // fast-redact mutates original
|
||||
})
|
||||
|
||||
test('integration: censor function behavior', () => {
|
||||
const obj = { secret: 'hidden' }
|
||||
const options = {
|
||||
paths: ['secret'],
|
||||
censor: (value, path) => `REDACTED:${path}`
|
||||
}
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: complex object with mixed patterns', () => {
|
||||
const obj = {
|
||||
users: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'john',
|
||||
credentials: { password: 'secret1', apiKey: 'key1' }
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'jane',
|
||||
credentials: { password: 'secret2', apiKey: 'key2' }
|
||||
}
|
||||
],
|
||||
config: {
|
||||
database: { password: 'db-secret' },
|
||||
api: { keys: ['key1', 'key2', 'key3'] }
|
||||
}
|
||||
}
|
||||
|
||||
const options = {
|
||||
paths: [
|
||||
'users.*.credentials.password',
|
||||
'users.*.credentials.apiKey',
|
||||
'config.database.password',
|
||||
'config.api.keys.*'
|
||||
]
|
||||
}
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
// Remove option integration tests - comparing with fast-redact
|
||||
test('integration: remove option basic comparison with fast-redact', () => {
|
||||
const obj = { username: 'john', password: 'secret123' }
|
||||
const options = { paths: ['password'], remove: true }
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
|
||||
// Verify the key is actually removed
|
||||
const parsed = JSON.parse(slowResult)
|
||||
assert.strictEqual(parsed.username, 'john')
|
||||
assert.strictEqual('password' in parsed, false)
|
||||
})
|
||||
|
||||
test('integration: remove option multiple paths comparison with fast-redact', () => {
|
||||
const obj = {
|
||||
user: { name: 'john', password: 'secret' },
|
||||
session: { token: 'abc123', id: 'session1' }
|
||||
}
|
||||
|
||||
const options = {
|
||||
paths: ['user.password', 'session.token'],
|
||||
remove: true
|
||||
}
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: remove option wildcard comparison with fast-redact', () => {
|
||||
const obj = {
|
||||
secrets: {
|
||||
key1: 'secret1',
|
||||
key2: 'secret2'
|
||||
},
|
||||
public: 'data'
|
||||
}
|
||||
|
||||
const options = {
|
||||
paths: ['secrets.*'],
|
||||
remove: true
|
||||
}
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: remove option intermediate wildcard comparison with fast-redact', () => {
|
||||
const obj = {
|
||||
users: {
|
||||
user1: { password: 'secret1', name: 'john' },
|
||||
user2: { password: 'secret2', name: 'jane' }
|
||||
}
|
||||
}
|
||||
|
||||
const options = {
|
||||
paths: ['users.*.password'],
|
||||
remove: true
|
||||
}
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
})
|
||||
|
||||
test('integration: remove option with custom censor comparison with fast-redact', () => {
|
||||
const obj = { secret: 'hidden', public: 'data' }
|
||||
const options = {
|
||||
paths: ['secret'],
|
||||
censor: '***',
|
||||
remove: true
|
||||
}
|
||||
|
||||
const slowResult = slowRedact(options)(obj)
|
||||
const fastResult = fastRedact(options)(obj)
|
||||
|
||||
assert.strictEqual(slowResult, fastResult)
|
||||
|
||||
// With remove: true, censor value should be ignored
|
||||
const parsed = JSON.parse(slowResult)
|
||||
assert.strictEqual('secret' in parsed, false)
|
||||
assert.strictEqual(parsed.public, 'data')
|
||||
})
|
||||
|
||||
test('integration: remove option serialize false behavior - @pinojs/redact only', () => {
|
||||
// fast-redact doesn't support remove option with serialize: false
|
||||
// so we test @pinojs/redact's behavior only
|
||||
const obj = { secret: 'hidden', public: 'data' }
|
||||
const options = { paths: ['secret'], remove: true, serialize: false }
|
||||
|
||||
const result = slowRedact(options)(obj)
|
||||
|
||||
// Should have the key removed
|
||||
assert.strictEqual('secret' in result, false)
|
||||
assert.strictEqual(result.public, 'data')
|
||||
|
||||
// Should have restore method
|
||||
assert.strictEqual(typeof result.restore, 'function')
|
||||
|
||||
// Original object should be preserved
|
||||
assert.strictEqual(obj.secret, 'hidden')
|
||||
|
||||
// Restore should bring back the removed key
|
||||
const restored = result.restore()
|
||||
assert.strictEqual(restored.secret, 'hidden')
|
||||
})
|
||||
227
node_modules/@pinojs/redact/test/multiple-wildcards.test.js
generated
vendored
227
node_modules/@pinojs/redact/test/multiple-wildcards.test.js
generated
vendored
@ -1,227 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { strict: assert } = require('node:assert')
|
||||
const slowRedact = require('../index.js')
|
||||
|
||||
// Tests for Issue #2319: @pinojs/redact fails to redact patterns with 3+ consecutive wildcards
|
||||
test('three consecutive wildcards: *.*.*.password (4 levels deep)', () => {
|
||||
const obj = {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.password']
|
||||
})
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// Only the 4-level deep password should be redacted
|
||||
assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.nested.deep.auth.password, '[REDACTED]', '4-level password SHOULD be redacted')
|
||||
})
|
||||
|
||||
test('four consecutive wildcards: *.*.*.*.password (5 levels deep)', () => {
|
||||
const obj = {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } },
|
||||
config: { user: { auth: { settings: { password: 'secret-5-levels' } } } }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.*.password']
|
||||
})
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// Only the 5-level deep password should be redacted
|
||||
assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.nested.deep.auth.password, 'secret-4-levels', '4-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.config.user.auth.settings.password, '[REDACTED]', '5-level password SHOULD be redacted')
|
||||
})
|
||||
|
||||
test('five consecutive wildcards: *.*.*.*.*.password (6 levels deep)', () => {
|
||||
const obj = {
|
||||
simple: { password: 'secret-2-levels' },
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
nested: { deep: { auth: { password: 'secret-4-levels' } } },
|
||||
config: { user: { auth: { settings: { password: 'secret-5-levels' } } } },
|
||||
data: {
|
||||
reqConfig: {
|
||||
data: {
|
||||
credentials: {
|
||||
settings: {
|
||||
password: 'secret-6-levels'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.*.*.password']
|
||||
})
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// Only the 6-level deep password should be redacted
|
||||
assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.nested.deep.auth.password, 'secret-4-levels', '4-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.config.user.auth.settings.password, 'secret-5-levels', '5-level password should NOT be redacted')
|
||||
assert.strictEqual(parsed.data.reqConfig.data.credentials.settings.password, '[REDACTED]', '6-level password SHOULD be redacted')
|
||||
})
|
||||
|
||||
test('three wildcards with censor function receives correct values', () => {
|
||||
const obj = {
|
||||
nested: { deep: { auth: { password: 'secret-value' } } }
|
||||
}
|
||||
|
||||
const censorCalls = []
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.password'],
|
||||
censor: (value, path) => {
|
||||
censorCalls.push({ value, path: [...path] })
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// Should have been called exactly once with the correct value
|
||||
assert.strictEqual(censorCalls.length, 1, 'censor should be called once')
|
||||
assert.strictEqual(censorCalls[0].value, 'secret-value', 'censor should receive the actual value')
|
||||
assert.deepStrictEqual(censorCalls[0].path, ['nested', 'deep', 'auth', 'password'], 'censor should receive correct path')
|
||||
assert.strictEqual(parsed.nested.deep.auth.password, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('three wildcards with multiple matches', () => {
|
||||
const obj = {
|
||||
api1: { v1: { auth: { token: 'token1' } } },
|
||||
api2: { v2: { auth: { token: 'token2' } } },
|
||||
api3: { v1: { auth: { token: 'token3' } } }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.token']
|
||||
})
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// All three tokens should be redacted
|
||||
assert.strictEqual(parsed.api1.v1.auth.token, '[REDACTED]')
|
||||
assert.strictEqual(parsed.api2.v2.auth.token, '[REDACTED]')
|
||||
assert.strictEqual(parsed.api3.v1.auth.token, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('three wildcards with remove option', () => {
|
||||
const obj = {
|
||||
nested: { deep: { auth: { password: 'secret', username: 'admin' } } }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.password'],
|
||||
remove: true
|
||||
})
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// Password should be removed entirely
|
||||
assert.strictEqual('password' in parsed.nested.deep.auth, false, 'password key should be removed')
|
||||
assert.strictEqual(parsed.nested.deep.auth.username, 'admin', 'username should remain')
|
||||
})
|
||||
|
||||
test('mixed: two and three wildcards in same redactor', () => {
|
||||
const obj = {
|
||||
user: { auth: { password: 'secret-3-levels' } },
|
||||
config: { deep: { auth: { password: 'secret-4-levels' } } }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.password', '*.*.*.password']
|
||||
})
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// Both should be redacted
|
||||
assert.strictEqual(parsed.user.auth.password, '[REDACTED]', '3-level should be redacted by *.*.password')
|
||||
assert.strictEqual(parsed.config.deep.auth.password, '[REDACTED]', '4-level should be redacted by *.*.*.password')
|
||||
})
|
||||
|
||||
test('three wildcards should not call censor for non-existent paths', () => {
|
||||
const obj = {
|
||||
shallow: { data: 'value' },
|
||||
nested: { deep: { auth: { password: 'secret' } } }
|
||||
}
|
||||
|
||||
let censorCallCount = 0
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.password'],
|
||||
censor: (value, path) => {
|
||||
censorCallCount++
|
||||
return '[REDACTED]'
|
||||
}
|
||||
})
|
||||
|
||||
redact(obj)
|
||||
|
||||
// Should only be called once for the path that exists
|
||||
assert.strictEqual(censorCallCount, 1, 'censor should only be called for existing paths')
|
||||
})
|
||||
|
||||
test('three wildcards with arrays', () => {
|
||||
const obj = {
|
||||
users: [
|
||||
{ auth: { password: 'secret1' } },
|
||||
{ auth: { password: 'secret2' } }
|
||||
]
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.password']
|
||||
})
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// Both passwords should be redacted (users[0].auth.password is 4 levels)
|
||||
assert.strictEqual(parsed.users[0].auth.password, '[REDACTED]')
|
||||
assert.strictEqual(parsed.users[1].auth.password, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('four wildcards with authorization header (real-world case)', () => {
|
||||
const obj = {
|
||||
requests: {
|
||||
api1: {
|
||||
config: {
|
||||
headers: {
|
||||
authorization: 'Bearer secret-token'
|
||||
}
|
||||
}
|
||||
},
|
||||
api2: {
|
||||
config: {
|
||||
headers: {
|
||||
authorization: 'Bearer another-token'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['*.*.*.*.authorization']
|
||||
})
|
||||
const result = redact(obj)
|
||||
const parsed = JSON.parse(result)
|
||||
|
||||
// Both authorization headers should be redacted
|
||||
assert.strictEqual(parsed.requests.api1.config.headers.authorization, '[REDACTED]')
|
||||
assert.strictEqual(parsed.requests.api2.config.headers.authorization, '[REDACTED]')
|
||||
})
|
||||
223
node_modules/@pinojs/redact/test/prototype-pollution.test.js
generated
vendored
223
node_modules/@pinojs/redact/test/prototype-pollution.test.js
generated
vendored
@ -1,223 +0,0 @@
|
||||
const { test } = require('node:test')
|
||||
const { strict: assert } = require('node:assert')
|
||||
const slowRedact = require('../index.js')
|
||||
|
||||
/* eslint-disable no-proto */
|
||||
|
||||
test('prototype pollution: __proto__ path should not pollute Object prototype', () => {
|
||||
const obj = {
|
||||
user: { name: 'john' },
|
||||
__proto__: { isAdmin: true }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['__proto__.isAdmin'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should not pollute Object.prototype
|
||||
assert.strictEqual(Object.prototype.isAdmin, undefined)
|
||||
assert.strictEqual({}.isAdmin, undefined)
|
||||
|
||||
// Should redact the __proto__ property if it exists as a regular property
|
||||
assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('prototype pollution: constructor.prototype path should not pollute', () => {
|
||||
const obj = {
|
||||
user: { name: 'john' },
|
||||
constructor: {
|
||||
prototype: { isAdmin: true }
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['constructor.prototype.isAdmin'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should not pollute Object.prototype
|
||||
assert.strictEqual(Object.prototype.isAdmin, undefined)
|
||||
assert.strictEqual({}.isAdmin, undefined)
|
||||
|
||||
// Should redact the constructor.prototype property if it exists as a regular property
|
||||
assert.strictEqual(result.constructor.prototype.isAdmin, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('prototype pollution: nested __proto__ should not pollute', () => {
|
||||
const obj = {
|
||||
user: {
|
||||
settings: {
|
||||
__proto__: { isAdmin: true }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['user.settings.__proto__.isAdmin'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should not pollute Object.prototype
|
||||
assert.strictEqual(Object.prototype.isAdmin, undefined)
|
||||
assert.strictEqual({}.isAdmin, undefined)
|
||||
|
||||
// Should redact the nested __proto__ property
|
||||
assert.strictEqual(result.user.settings.__proto__.isAdmin, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('prototype pollution: bracket notation __proto__ should not pollute', () => {
|
||||
const obj = {
|
||||
user: { name: 'john' },
|
||||
__proto__: { isAdmin: true }
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['["__proto__"]["isAdmin"]'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should not pollute Object.prototype
|
||||
assert.strictEqual(Object.prototype.isAdmin, undefined)
|
||||
assert.strictEqual({}.isAdmin, undefined)
|
||||
|
||||
// Should redact the __proto__ property when accessed via bracket notation
|
||||
assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('prototype pollution: wildcard with __proto__ should not pollute', () => {
|
||||
const obj = {
|
||||
users: {
|
||||
__proto__: { isAdmin: true },
|
||||
user1: { name: 'john' },
|
||||
user2: { name: 'jane' }
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['users.*'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should not pollute Object.prototype
|
||||
assert.strictEqual(Object.prototype.isAdmin, undefined)
|
||||
assert.strictEqual({}.isAdmin, undefined)
|
||||
|
||||
// Should redact only own properties
|
||||
assert.strictEqual(result.users.user1, '[REDACTED]')
|
||||
assert.strictEqual(result.users.user2, '[REDACTED]')
|
||||
|
||||
// __proto__ should only be redacted if it's an own property, not inherited
|
||||
if (Object.prototype.hasOwnProperty.call(obj.users, '__proto__')) {
|
||||
assert.strictEqual(result.users.__proto__, '[REDACTED]')
|
||||
}
|
||||
})
|
||||
|
||||
test('prototype pollution: malicious JSON payload should not pollute', () => {
|
||||
// Simulate a malicious payload that might come from JSON.parse
|
||||
const maliciousObj = JSON.parse('{"user": {"name": "john"}, "__proto__": {"isAdmin": true}}')
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['__proto__.isAdmin'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(maliciousObj)
|
||||
|
||||
// Should not pollute Object.prototype
|
||||
assert.strictEqual(Object.prototype.isAdmin, undefined)
|
||||
assert.strictEqual({}.isAdmin, undefined)
|
||||
|
||||
// The malicious payload should have been redacted
|
||||
assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]')
|
||||
})
|
||||
|
||||
test('prototype pollution: verify prototype chain is preserved', () => {
|
||||
function CustomClass () {
|
||||
this.data = 'test'
|
||||
}
|
||||
CustomClass.prototype.method = function () { return 'original' }
|
||||
|
||||
const obj = new CustomClass()
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['data'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should redact the data property
|
||||
assert.strictEqual(result.data, '[REDACTED]')
|
||||
|
||||
// Should preserve the original prototype chain
|
||||
assert.strictEqual(result.method(), 'original')
|
||||
assert.strictEqual(Object.getPrototypeOf(result), CustomClass.prototype)
|
||||
})
|
||||
|
||||
test('prototype pollution: setValue should not create prototype pollution', () => {
|
||||
const obj = { user: { name: 'john' } }
|
||||
|
||||
// Try to pollute via non-existent path that could create __proto__
|
||||
const redact = slowRedact({
|
||||
paths: ['__proto__.isAdmin'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should not pollute Object.prototype
|
||||
assert.strictEqual(Object.prototype.isAdmin, undefined)
|
||||
assert.strictEqual({}.isAdmin, undefined)
|
||||
|
||||
// Should not create the path if it doesn't exist
|
||||
// The __proto__ property may exist due to Object.create, but should not contain our redacted value
|
||||
if (result.__proto__) {
|
||||
assert.strictEqual(result.__proto__.isAdmin, undefined)
|
||||
}
|
||||
})
|
||||
|
||||
test('prototype pollution: deep nested prototype properties should not pollute', () => {
|
||||
const obj = {
|
||||
level1: {
|
||||
level2: {
|
||||
level3: {
|
||||
__proto__: { isAdmin: true },
|
||||
constructor: {
|
||||
prototype: { isEvil: true }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: [
|
||||
'level1.level2.level3.__proto__.isAdmin',
|
||||
'level1.level2.level3.constructor.prototype.isEvil'
|
||||
],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should not pollute Object.prototype
|
||||
assert.strictEqual(Object.prototype.isAdmin, undefined)
|
||||
assert.strictEqual(Object.prototype.isEvil, undefined)
|
||||
assert.strictEqual({}.isAdmin, undefined)
|
||||
assert.strictEqual({}.isEvil, undefined)
|
||||
|
||||
// Should redact the deep nested properties
|
||||
assert.strictEqual(result.level1.level2.level3.__proto__.isAdmin, '[REDACTED]')
|
||||
assert.strictEqual(result.level1.level2.level3.constructor.prototype.isEvil, '[REDACTED]')
|
||||
})
|
||||
115
node_modules/@pinojs/redact/test/selective-clone.test.js
generated
vendored
115
node_modules/@pinojs/redact/test/selective-clone.test.js
generated
vendored
@ -1,115 +0,0 @@
|
||||
const { test } = require('node:test')
|
||||
const { strict: assert } = require('node:assert')
|
||||
const slowRedact = require('../index.js')
|
||||
|
||||
test('selective cloning shares references for non-redacted paths', () => {
|
||||
const sharedObject = { unchanged: 'data' }
|
||||
const obj = {
|
||||
toRedact: 'secret',
|
||||
shared: sharedObject,
|
||||
nested: {
|
||||
toRedact: 'secret2',
|
||||
shared: sharedObject
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['toRedact', 'nested.toRedact'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Redacted values should be different
|
||||
assert.strictEqual(result.toRedact, '[REDACTED]')
|
||||
assert.strictEqual(result.nested.toRedact, '[REDACTED]')
|
||||
|
||||
// Non-redacted references should be shared (same object reference)
|
||||
assert.strictEqual(result.shared, obj.shared)
|
||||
assert.strictEqual(result.nested.shared, obj.nested.shared)
|
||||
|
||||
// The shared object should be the exact same reference
|
||||
assert.strictEqual(result.shared, sharedObject)
|
||||
assert.strictEqual(result.nested.shared, sharedObject)
|
||||
})
|
||||
|
||||
test('selective cloning works with arrays', () => {
|
||||
const sharedItem = { unchanged: 'data' }
|
||||
const obj = {
|
||||
items: [
|
||||
{ secret: 'hidden1', shared: sharedItem },
|
||||
{ secret: 'hidden2', shared: sharedItem },
|
||||
sharedItem
|
||||
]
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['items.*.secret'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Secrets should be redacted
|
||||
assert.strictEqual(result.items[0].secret, '[REDACTED]')
|
||||
assert.strictEqual(result.items[1].secret, '[REDACTED]')
|
||||
|
||||
// Shared references should be preserved where possible
|
||||
// Note: array items with secrets will be cloned, but their shared properties should still reference the original
|
||||
assert.strictEqual(result.items[0].shared, sharedItem)
|
||||
assert.strictEqual(result.items[1].shared, sharedItem)
|
||||
|
||||
// The third item gets cloned due to wildcard, but should have the same content
|
||||
assert.deepStrictEqual(result.items[2], sharedItem)
|
||||
// Note: Due to wildcard '*', all array items are cloned, even if they don't need redaction
|
||||
// This is still a significant optimization for object properties that aren't in wildcard paths
|
||||
})
|
||||
|
||||
test('selective cloning with no paths returns original object', () => {
|
||||
const obj = { data: 'unchanged' }
|
||||
const redact = slowRedact({
|
||||
paths: [],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Should return the exact same object reference
|
||||
assert.strictEqual(result, obj)
|
||||
})
|
||||
|
||||
test('selective cloning performance - large objects with minimal redaction', () => {
|
||||
// Create a large object with mostly shared data
|
||||
const sharedData = { large: 'data'.repeat(1000) }
|
||||
const obj = {
|
||||
secret: 'hidden',
|
||||
shared1: sharedData,
|
||||
shared2: sharedData,
|
||||
nested: {
|
||||
secret: 'hidden2',
|
||||
shared3: sharedData,
|
||||
deep: {
|
||||
shared4: sharedData,
|
||||
moreShared: sharedData
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const redact = slowRedact({
|
||||
paths: ['secret', 'nested.secret'],
|
||||
serialize: false
|
||||
})
|
||||
|
||||
const result = redact(obj)
|
||||
|
||||
// Verify redaction worked
|
||||
assert.strictEqual(result.secret, '[REDACTED]')
|
||||
assert.strictEqual(result.nested.secret, '[REDACTED]')
|
||||
|
||||
// Verify shared references are preserved
|
||||
assert.strictEqual(result.shared1, sharedData)
|
||||
assert.strictEqual(result.shared2, sharedData)
|
||||
assert.strictEqual(result.nested.shared3, sharedData)
|
||||
assert.strictEqual(result.nested.deep.shared4, sharedData)
|
||||
assert.strictEqual(result.nested.deep.moreShared, sharedData)
|
||||
})
|
||||
19
node_modules/@pinojs/redact/tsconfig.json
generated
vendored
19
node_modules/@pinojs/redact/tsconfig.json
generated
vendored
@ -1,19 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"lib": [
|
||||
"es6"
|
||||
],
|
||||
"noImplicitAny": true,
|
||||
"noImplicitThis": true,
|
||||
"strictFunctionTypes": true,
|
||||
"strictNullChecks": true,
|
||||
"types": [],
|
||||
"noEmit": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"files": [
|
||||
"index.d.ts",
|
||||
"index.test-d.ts"
|
||||
]
|
||||
}
|
||||
11
node_modules/atomic-sleep/.travis.yml
generated
vendored
11
node_modules/atomic-sleep/.travis.yml
generated
vendored
@ -1,11 +0,0 @@
|
||||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- 6
|
||||
- 8
|
||||
- 10
|
||||
- 11
|
||||
- 12
|
||||
- 13
|
||||
script:
|
||||
- npm run ci
|
||||
22
node_modules/atomic-sleep/LICENSE
generated
vendored
22
node_modules/atomic-sleep/LICENSE
generated
vendored
@ -1,22 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2020 David Mark Clements
|
||||
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
||||
OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
38
node_modules/atomic-sleep/index.js
generated
vendored
38
node_modules/atomic-sleep/index.js
generated
vendored
@ -1,38 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
/* global SharedArrayBuffer, Atomics */
|
||||
|
||||
if (typeof SharedArrayBuffer !== 'undefined' && typeof Atomics !== 'undefined') {
|
||||
const nil = new Int32Array(new SharedArrayBuffer(4))
|
||||
|
||||
function sleep (ms) {
|
||||
// also filters out NaN, non-number types, including empty strings, but allows bigints
|
||||
const valid = ms > 0 && ms < Infinity
|
||||
if (valid === false) {
|
||||
if (typeof ms !== 'number' && typeof ms !== 'bigint') {
|
||||
throw TypeError('sleep: ms must be a number')
|
||||
}
|
||||
throw RangeError('sleep: ms must be a number that is greater than 0 but less than Infinity')
|
||||
}
|
||||
|
||||
Atomics.wait(nil, 0, 0, Number(ms))
|
||||
}
|
||||
module.exports = sleep
|
||||
} else {
|
||||
|
||||
function sleep (ms) {
|
||||
// also filters out NaN, non-number types, including empty strings, but allows bigints
|
||||
const valid = ms > 0 && ms < Infinity
|
||||
if (valid === false) {
|
||||
if (typeof ms !== 'number' && typeof ms !== 'bigint') {
|
||||
throw TypeError('sleep: ms must be a number')
|
||||
}
|
||||
throw RangeError('sleep: ms must be a number that is greater than 0 but less than Infinity')
|
||||
}
|
||||
const target = Date.now() + Number(ms)
|
||||
while (target > Date.now()){}
|
||||
}
|
||||
|
||||
module.exports = sleep
|
||||
|
||||
}
|
||||
37
node_modules/atomic-sleep/package.json
generated
vendored
37
node_modules/atomic-sleep/package.json
generated
vendored
@ -1,37 +0,0 @@
|
||||
{
|
||||
"name": "atomic-sleep",
|
||||
"version": "1.0.0",
|
||||
"description": "Zero CPU overhead, zero dependency, true event-loop blocking sleep",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "tap -R classic- -j1 test",
|
||||
"lint": "standard",
|
||||
"ci": "npm run lint && npm test"
|
||||
},
|
||||
"keywords": [
|
||||
"sleep",
|
||||
"pause",
|
||||
"wait",
|
||||
"performance",
|
||||
"atomics"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
},
|
||||
"author": "David Mark Clements (@davidmarkclem)",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"standard": "^14.3.1",
|
||||
"tap": "^14.10.6",
|
||||
"tape": "^4.13.2"
|
||||
},
|
||||
"dependencies": {},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/davidmarkclements/atomic-sleep.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/davidmarkclements/atomic-sleep/issues"
|
||||
},
|
||||
"homepage": "https://github.com/davidmarkclements/atomic-sleep#readme"
|
||||
}
|
||||
58
node_modules/atomic-sleep/readme.md
generated
vendored
58
node_modules/atomic-sleep/readme.md
generated
vendored
@ -1,58 +0,0 @@
|
||||
<h1 align="center">Welcome to atomic-sleep ⏱️</h1>
|
||||
<p>
|
||||
<img alt="Version" src="https://img.shields.io/badge/version-1.0.0-blue.svg?cacheSeconds=2592000" />
|
||||
<a href="#" target="_blank">
|
||||
<img alt="License: MIT" src="https://img.shields.io/badge/License-MIT-yellow.svg" />
|
||||
</a>
|
||||
<a href="https://twitter.com/davidmarkclem" target="_blank">
|
||||
<img alt="Twitter: davidmarkclem" src="https://img.shields.io/twitter/follow/davidmarkclem.svg?style=social" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
> Zero CPU overhead, zero dependency, true event-loop blocking sleep
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const sleep = require('atomic-sleep')
|
||||
|
||||
console.time('sleep')
|
||||
setTimeout(() => { console.timeEnd('sleep') }, 100)
|
||||
sleep(1000)
|
||||
```
|
||||
|
||||
The `console.time` will report a time of just over 1000ms despite the `setTimeout`
|
||||
being 100ms. This is because the event loop is paused for 1000ms and the setTimeout
|
||||
fires immediately after the event loop is no longer blocked (as more than 100ms have passed).
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
npm install
|
||||
```
|
||||
|
||||
## Run tests
|
||||
|
||||
```sh
|
||||
npm test
|
||||
```
|
||||
|
||||
## Support
|
||||
|
||||
Node and Browser versions that support both `SharedArrayBuffer` and `Atomics` will have (virtually) zero CPU overhead sleep.
|
||||
|
||||
For Node, Atomic Sleep can provide zero CPU overhead sleep from Node 8 and up.
|
||||
|
||||
For browser support see https://caniuse.com/#feat=sharedarraybuffer and https://caniuse.com/#feat=mdn-javascript_builtins_atomics.
|
||||
|
||||
|
||||
For older Node versions and olders browsers we fall back to blocking the event loop in a way that will cause a CPU spike.
|
||||
|
||||
|
||||
|
||||
## Author
|
||||
|
||||
👤 **David Mark Clements (@davidmarkclem)**
|
||||
|
||||
* Twitter: [@davidmarkclem](https://twitter.com/davidmarkclem)
|
||||
* Github: [@davidmarkclements](https://github.com/davidmarkclements)
|
||||
47
node_modules/atomic-sleep/test.js
generated
vendored
47
node_modules/atomic-sleep/test.js
generated
vendored
@ -1,47 +0,0 @@
|
||||
'use strict'
|
||||
const test = require('tape')
|
||||
const sleep = require('.')
|
||||
|
||||
test('blocks event loop for given amount of milliseconds', ({ is, end }) => {
|
||||
const now = Date.now()
|
||||
setTimeout(() => {
|
||||
const delta = Date.now() - now
|
||||
const fuzzyDelta = Math.floor(delta / 10) * 10 // allow up to 10ms of execution lag
|
||||
is(fuzzyDelta, 1000)
|
||||
end()
|
||||
}, 100)
|
||||
sleep(1000)
|
||||
})
|
||||
|
||||
if (typeof BigInt !== 'undefined') {
|
||||
|
||||
test('allows ms to be supplied as a BigInt number', ({ is, end }) => {
|
||||
const now = Date.now()
|
||||
setTimeout(() => {
|
||||
const delta = Date.now() - now
|
||||
const fuzzyDelta = Math.floor(delta / 10) * 10 // allow up to 10ms of execution lag
|
||||
is(fuzzyDelta, 1000)
|
||||
end()
|
||||
}, 100)
|
||||
sleep(BigInt(1000)) // avoiding n notation as this will error on legacy node/browsers
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
test('throws range error if ms less than 0', ({ throws, end }) => {
|
||||
throws(() => sleep(-1), RangeError('sleep: ms must be a number that is greater than 0 but less than Infinity'))
|
||||
end()
|
||||
})
|
||||
|
||||
test('throws range error if ms is Infinity', ({ throws, end }) => {
|
||||
throws(() => sleep(Infinity), RangeError('sleep: ms must be a number that is greater than 0 but less than Infinity'))
|
||||
end()
|
||||
})
|
||||
|
||||
test('throws range error if ms is not a number or bigint', ({ throws, end }) => {
|
||||
throws(() => sleep('Infinity'), TypeError('sleep: ms must be a number'))
|
||||
throws(() => sleep('foo'), TypeError('sleep: ms must be a number'))
|
||||
throws(() => sleep({a: 1}), TypeError('sleep: ms must be a number'))
|
||||
throws(() => sleep([1,2,3]), TypeError('sleep: ms must be a number'))
|
||||
end()
|
||||
})
|
||||
7
node_modules/colorette/LICENSE.md
generated
vendored
7
node_modules/colorette/LICENSE.md
generated
vendored
@ -1,7 +0,0 @@
|
||||
Copyright © Jorge Bucaran <<https://jorgebucaran.com>>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
134
node_modules/colorette/README.md
generated
vendored
134
node_modules/colorette/README.md
generated
vendored
@ -1,134 +0,0 @@
|
||||
# 🌈Colorette
|
||||
|
||||
> Easily set your terminal text color & styles.
|
||||
|
||||
- No dependecies
|
||||
- Automatic color support detection
|
||||
- Up to [2x faster](#benchmarks) than alternatives
|
||||
- TypeScript support
|
||||
- [`NO_COLOR`](https://no-color.org) friendly
|
||||
- Node >= `10`
|
||||
|
||||
> [**Upgrading from Colorette `1.x`?**](https://github.com/jorgebucaran/colorette/issues/70)
|
||||
|
||||
## Quickstart
|
||||
|
||||
```js
|
||||
import { blue, bold, underline } from "colorette"
|
||||
|
||||
console.log(
|
||||
blue("I'm blue"),
|
||||
bold(blue("da ba dee")),
|
||||
underline(bold(blue("da ba daa")))
|
||||
)
|
||||
```
|
||||
|
||||
Here's an example using [template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals).
|
||||
|
||||
```js
|
||||
console.log(`
|
||||
There's a ${underline(blue("house"))},
|
||||
With a ${bold(blue("window"))},
|
||||
And a ${blue("corvette")}
|
||||
And everything is blue
|
||||
`)
|
||||
```
|
||||
|
||||
You can also nest styles without breaking existing color sequences.
|
||||
|
||||
```js
|
||||
console.log(bold(`I'm ${blue(`da ba ${underline("dee")} da ba`)} daa`))
|
||||
```
|
||||
|
||||
Need to override terminal color detection? You can do that too.
|
||||
|
||||
```js
|
||||
import { createColors } from "colorette"
|
||||
|
||||
const { blue } = createColors({ useColor: false })
|
||||
|
||||
console.log(blue("Blue? Nope, nah"))
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
```console
|
||||
npm install colorette
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### \<color\>()
|
||||
|
||||
> See all [supported colors](#supported-colors).
|
||||
|
||||
```js
|
||||
import { blue } from "colorette"
|
||||
|
||||
blue("I'm blue") //=> \x1b[34mI'm blue\x1b[39m
|
||||
```
|
||||
|
||||
### createColors()
|
||||
|
||||
Override terminal color detection via `createColors({ useColor })`.
|
||||
|
||||
```js
|
||||
import { createColors } from "colorette"
|
||||
|
||||
const { blue } = createColors({ useColor: false })
|
||||
```
|
||||
|
||||
### isColorSupported
|
||||
|
||||
`true` if your terminal supports color, `false` otherwise. Used internally, but exposed for convenience.
|
||||
|
||||
## Environment
|
||||
|
||||
You can override color detection from the CLI by setting the `--no-color` or `--color` flags.
|
||||
|
||||
```console
|
||||
$ ./example.js --no-color | ./consumer.js
|
||||
```
|
||||
|
||||
Or if you can't use CLI flags, by setting the `NO_COLOR=` or `FORCE_COLOR=` environment variables.
|
||||
|
||||
```console
|
||||
$ NO_COLOR= ./example.js | ./consumer.js
|
||||
```
|
||||
|
||||
## Supported colors
|
||||
|
||||
| Colors | Background Colors | Bright Colors | Bright Background Colors | Modifiers |
|
||||
| ------- | ----------------- | ------------- | ------------------------ | ----------------- |
|
||||
| black | bgBlack | blackBright | bgBlackBright | dim |
|
||||
| red | bgRed | redBright | bgRedBright | **bold** |
|
||||
| green | bgGreen | greenBright | bgGreenBright | hidden |
|
||||
| yellow | bgYellow | yellowBright | bgYellowBright | _italic_ |
|
||||
| blue | bgBlue | blueBright | bgBlueBright | <u>underline</u> |
|
||||
| magenta | bgMagenta | magentaBright | bgMagentaBright | ~~strikethrough~~ |
|
||||
| cyan | bgCyan | cyanBright | bgCyanBright | reset |
|
||||
| white | bgWhite | whiteBright | bgWhiteBright | |
|
||||
| gray | | | | |
|
||||
|
||||
## [Benchmarks](https://github.com/jorgebucaran/colorette/actions/workflows/bench.yml)
|
||||
|
||||
```console
|
||||
npm --prefix bench start
|
||||
```
|
||||
|
||||
```diff
|
||||
chalk 1,786,703 ops/sec
|
||||
kleur 1,618,960 ops/sec
|
||||
colors 646,823 ops/sec
|
||||
ansi-colors 786,149 ops/sec
|
||||
picocolors 2,871,758 ops/sec
|
||||
+ colorette 3,002,751 ops/sec
|
||||
```
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
Colorette started out in 2015 by [@jorgebucaran](https://github.com/jorgebucaran) as a lightweight alternative to [Chalk](https://github.com/chalk/chalk) and was introduced originally as [Clor](https://github.com/jorgebucaran/colorette/commit/b01b5b9961ceb7df878583a3002e836fae9e37ce). Our terminal color detection logic borrows heavily from [@sindresorhus](https://github.com/sindresorhus) and [@Qix-](https://github.com/Qix-) work on Chalk. The idea of slicing strings to clear bleeding sequences was adapted from a similar technique used by [@alexeyraspopov](https://github.com/alexeyraspopov) in [picocolors](https://github.com/alexeyraspopov/picocolors). Thank you to all our contributors! <3
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE.md)
|
||||
218
node_modules/colorette/index.cjs
generated
vendored
218
node_modules/colorette/index.cjs
generated
vendored
@ -1,218 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var tty = require('tty');
|
||||
|
||||
function _interopNamespace(e) {
|
||||
if (e && e.__esModule) return e;
|
||||
var n = Object.create(null);
|
||||
if (e) {
|
||||
Object.keys(e).forEach(function (k) {
|
||||
if (k !== 'default') {
|
||||
var d = Object.getOwnPropertyDescriptor(e, k);
|
||||
Object.defineProperty(n, k, d.get ? d : {
|
||||
enumerable: true,
|
||||
get: function () { return e[k]; }
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
n["default"] = e;
|
||||
return Object.freeze(n);
|
||||
}
|
||||
|
||||
var tty__namespace = /*#__PURE__*/_interopNamespace(tty);
|
||||
|
||||
const {
|
||||
env = {},
|
||||
argv = [],
|
||||
platform = "",
|
||||
} = typeof process === "undefined" ? {} : process;
|
||||
|
||||
const isDisabled = "NO_COLOR" in env || argv.includes("--no-color");
|
||||
const isForced = "FORCE_COLOR" in env || argv.includes("--color");
|
||||
const isWindows = platform === "win32";
|
||||
const isDumbTerminal = env.TERM === "dumb";
|
||||
|
||||
const isCompatibleTerminal =
|
||||
tty__namespace && tty__namespace.isatty && tty__namespace.isatty(1) && env.TERM && !isDumbTerminal;
|
||||
|
||||
const isCI =
|
||||
"CI" in env &&
|
||||
("GITHUB_ACTIONS" in env || "GITLAB_CI" in env || "CIRCLECI" in env);
|
||||
|
||||
const isColorSupported =
|
||||
!isDisabled &&
|
||||
(isForced || (isWindows && !isDumbTerminal) || isCompatibleTerminal || isCI);
|
||||
|
||||
const replaceClose = (
|
||||
index,
|
||||
string,
|
||||
close,
|
||||
replace,
|
||||
head = string.substring(0, index) + replace,
|
||||
tail = string.substring(index + close.length),
|
||||
next = tail.indexOf(close)
|
||||
) => head + (next < 0 ? tail : replaceClose(next, tail, close, replace));
|
||||
|
||||
const clearBleed = (index, string, open, close, replace) =>
|
||||
index < 0
|
||||
? open + string + close
|
||||
: open + replaceClose(index, string, close, replace) + close;
|
||||
|
||||
const filterEmpty =
|
||||
(open, close, replace = open, at = open.length + 1) =>
|
||||
(string) =>
|
||||
string || !(string === "" || string === undefined)
|
||||
? clearBleed(
|
||||
("" + string).indexOf(close, at),
|
||||
string,
|
||||
open,
|
||||
close,
|
||||
replace
|
||||
)
|
||||
: "";
|
||||
|
||||
const init = (open, close, replace) =>
|
||||
filterEmpty(`\x1b[${open}m`, `\x1b[${close}m`, replace);
|
||||
|
||||
const colors = {
|
||||
reset: init(0, 0),
|
||||
bold: init(1, 22, "\x1b[22m\x1b[1m"),
|
||||
dim: init(2, 22, "\x1b[22m\x1b[2m"),
|
||||
italic: init(3, 23),
|
||||
underline: init(4, 24),
|
||||
inverse: init(7, 27),
|
||||
hidden: init(8, 28),
|
||||
strikethrough: init(9, 29),
|
||||
black: init(30, 39),
|
||||
red: init(31, 39),
|
||||
green: init(32, 39),
|
||||
yellow: init(33, 39),
|
||||
blue: init(34, 39),
|
||||
magenta: init(35, 39),
|
||||
cyan: init(36, 39),
|
||||
white: init(37, 39),
|
||||
gray: init(90, 39),
|
||||
bgBlack: init(40, 49),
|
||||
bgRed: init(41, 49),
|
||||
bgGreen: init(42, 49),
|
||||
bgYellow: init(43, 49),
|
||||
bgBlue: init(44, 49),
|
||||
bgMagenta: init(45, 49),
|
||||
bgCyan: init(46, 49),
|
||||
bgWhite: init(47, 49),
|
||||
blackBright: init(90, 39),
|
||||
redBright: init(91, 39),
|
||||
greenBright: init(92, 39),
|
||||
yellowBright: init(93, 39),
|
||||
blueBright: init(94, 39),
|
||||
magentaBright: init(95, 39),
|
||||
cyanBright: init(96, 39),
|
||||
whiteBright: init(97, 39),
|
||||
bgBlackBright: init(100, 49),
|
||||
bgRedBright: init(101, 49),
|
||||
bgGreenBright: init(102, 49),
|
||||
bgYellowBright: init(103, 49),
|
||||
bgBlueBright: init(104, 49),
|
||||
bgMagentaBright: init(105, 49),
|
||||
bgCyanBright: init(106, 49),
|
||||
bgWhiteBright: init(107, 49),
|
||||
};
|
||||
|
||||
const createColors = ({ useColor = isColorSupported } = {}) =>
|
||||
useColor
|
||||
? colors
|
||||
: Object.keys(colors).reduce(
|
||||
(colors, key) => ({ ...colors, [key]: String }),
|
||||
{}
|
||||
);
|
||||
|
||||
const {
|
||||
reset,
|
||||
bold,
|
||||
dim,
|
||||
italic,
|
||||
underline,
|
||||
inverse,
|
||||
hidden,
|
||||
strikethrough,
|
||||
black,
|
||||
red,
|
||||
green,
|
||||
yellow,
|
||||
blue,
|
||||
magenta,
|
||||
cyan,
|
||||
white,
|
||||
gray,
|
||||
bgBlack,
|
||||
bgRed,
|
||||
bgGreen,
|
||||
bgYellow,
|
||||
bgBlue,
|
||||
bgMagenta,
|
||||
bgCyan,
|
||||
bgWhite,
|
||||
blackBright,
|
||||
redBright,
|
||||
greenBright,
|
||||
yellowBright,
|
||||
blueBright,
|
||||
magentaBright,
|
||||
cyanBright,
|
||||
whiteBright,
|
||||
bgBlackBright,
|
||||
bgRedBright,
|
||||
bgGreenBright,
|
||||
bgYellowBright,
|
||||
bgBlueBright,
|
||||
bgMagentaBright,
|
||||
bgCyanBright,
|
||||
bgWhiteBright,
|
||||
} = createColors();
|
||||
|
||||
exports.bgBlack = bgBlack;
|
||||
exports.bgBlackBright = bgBlackBright;
|
||||
exports.bgBlue = bgBlue;
|
||||
exports.bgBlueBright = bgBlueBright;
|
||||
exports.bgCyan = bgCyan;
|
||||
exports.bgCyanBright = bgCyanBright;
|
||||
exports.bgGreen = bgGreen;
|
||||
exports.bgGreenBright = bgGreenBright;
|
||||
exports.bgMagenta = bgMagenta;
|
||||
exports.bgMagentaBright = bgMagentaBright;
|
||||
exports.bgRed = bgRed;
|
||||
exports.bgRedBright = bgRedBright;
|
||||
exports.bgWhite = bgWhite;
|
||||
exports.bgWhiteBright = bgWhiteBright;
|
||||
exports.bgYellow = bgYellow;
|
||||
exports.bgYellowBright = bgYellowBright;
|
||||
exports.black = black;
|
||||
exports.blackBright = blackBright;
|
||||
exports.blue = blue;
|
||||
exports.blueBright = blueBright;
|
||||
exports.bold = bold;
|
||||
exports.createColors = createColors;
|
||||
exports.cyan = cyan;
|
||||
exports.cyanBright = cyanBright;
|
||||
exports.dim = dim;
|
||||
exports.gray = gray;
|
||||
exports.green = green;
|
||||
exports.greenBright = greenBright;
|
||||
exports.hidden = hidden;
|
||||
exports.inverse = inverse;
|
||||
exports.isColorSupported = isColorSupported;
|
||||
exports.italic = italic;
|
||||
exports.magenta = magenta;
|
||||
exports.magentaBright = magentaBright;
|
||||
exports.red = red;
|
||||
exports.redBright = redBright;
|
||||
exports.reset = reset;
|
||||
exports.strikethrough = strikethrough;
|
||||
exports.underline = underline;
|
||||
exports.white = white;
|
||||
exports.whiteBright = whiteBright;
|
||||
exports.yellow = yellow;
|
||||
exports.yellowBright = yellowBright;
|
||||
93
node_modules/colorette/index.d.ts
generated
vendored
93
node_modules/colorette/index.d.ts
generated
vendored
@ -1,93 +0,0 @@
|
||||
declare module "colorette" {
|
||||
type Color = (text: string | number) => string
|
||||
|
||||
interface Colorette {
|
||||
reset: Color
|
||||
bold: Color
|
||||
dim: Color
|
||||
italic: Color
|
||||
underline: Color
|
||||
inverse: Color
|
||||
hidden: Color
|
||||
strikethrough: Color
|
||||
black: Color
|
||||
red: Color
|
||||
green: Color
|
||||
yellow: Color
|
||||
blue: Color
|
||||
magenta: Color
|
||||
cyan: Color
|
||||
white: Color
|
||||
gray: Color
|
||||
bgBlack: Color
|
||||
bgRed: Color
|
||||
bgGreen: Color
|
||||
bgYellow: Color
|
||||
bgBlue: Color
|
||||
bgMagenta: Color
|
||||
bgCyan: Color
|
||||
bgWhite: Color
|
||||
blackBright: Color
|
||||
redBright: Color
|
||||
greenBright: Color
|
||||
yellowBright: Color
|
||||
blueBright: Color
|
||||
magentaBright: Color
|
||||
cyanBright: Color
|
||||
whiteBright: Color
|
||||
bgBlackBright: Color
|
||||
bgRedBright: Color
|
||||
bgGreenBright: Color
|
||||
bgYellowBright: Color
|
||||
bgBlueBright: Color
|
||||
bgMagentaBright: Color
|
||||
bgCyanBright: Color
|
||||
bgWhiteBright: Color
|
||||
}
|
||||
|
||||
const reset: Color
|
||||
const bold: Color
|
||||
const dim: Color
|
||||
const italic: Color
|
||||
const underline: Color
|
||||
const inverse: Color
|
||||
const hidden: Color
|
||||
const strikethrough: Color
|
||||
const black: Color
|
||||
const red: Color
|
||||
const green: Color
|
||||
const yellow: Color
|
||||
const blue: Color
|
||||
const magenta: Color
|
||||
const cyan: Color
|
||||
const white: Color
|
||||
const gray: Color
|
||||
const bgBlack: Color
|
||||
const bgRed: Color
|
||||
const bgGreen: Color
|
||||
const bgYellow: Color
|
||||
const bgBlue: Color
|
||||
const bgMagenta: Color
|
||||
const bgCyan: Color
|
||||
const bgWhite: Color
|
||||
const blackBright: Color
|
||||
const redBright: Color
|
||||
const greenBright: Color
|
||||
const yellowBright: Color
|
||||
const blueBright: Color
|
||||
const magentaBright: Color
|
||||
const cyanBright: Color
|
||||
const whiteBright: Color
|
||||
const bgBlackBright: Color
|
||||
const bgRedBright: Color
|
||||
const bgGreenBright: Color
|
||||
const bgYellowBright: Color
|
||||
const bgBlueBright: Color
|
||||
const bgMagentaBright: Color
|
||||
const bgCyanBright: Color
|
||||
const bgWhiteBright: Color
|
||||
|
||||
const isColorSupported: boolean
|
||||
|
||||
function createColors(options?: { useColor: boolean }): Colorette
|
||||
}
|
||||
150
node_modules/colorette/index.js
generated
vendored
150
node_modules/colorette/index.js
generated
vendored
@ -1,150 +0,0 @@
|
||||
import * as tty from "tty"
|
||||
|
||||
const {
|
||||
env = {},
|
||||
argv = [],
|
||||
platform = "",
|
||||
} = typeof process === "undefined" ? {} : process
|
||||
|
||||
const isDisabled = "NO_COLOR" in env || argv.includes("--no-color")
|
||||
const isForced = "FORCE_COLOR" in env || argv.includes("--color")
|
||||
const isWindows = platform === "win32"
|
||||
const isDumbTerminal = env.TERM === "dumb"
|
||||
|
||||
const isCompatibleTerminal =
|
||||
tty && tty.isatty && tty.isatty(1) && env.TERM && !isDumbTerminal
|
||||
|
||||
const isCI =
|
||||
"CI" in env &&
|
||||
("GITHUB_ACTIONS" in env || "GITLAB_CI" in env || "CIRCLECI" in env)
|
||||
|
||||
export const isColorSupported =
|
||||
!isDisabled &&
|
||||
(isForced || (isWindows && !isDumbTerminal) || isCompatibleTerminal || isCI)
|
||||
|
||||
const replaceClose = (
|
||||
index,
|
||||
string,
|
||||
close,
|
||||
replace,
|
||||
head = string.substring(0, index) + replace,
|
||||
tail = string.substring(index + close.length),
|
||||
next = tail.indexOf(close)
|
||||
) => head + (next < 0 ? tail : replaceClose(next, tail, close, replace))
|
||||
|
||||
const clearBleed = (index, string, open, close, replace) =>
|
||||
index < 0
|
||||
? open + string + close
|
||||
: open + replaceClose(index, string, close, replace) + close
|
||||
|
||||
const filterEmpty =
|
||||
(open, close, replace = open, at = open.length + 1) =>
|
||||
(string) =>
|
||||
string || !(string === "" || string === undefined)
|
||||
? clearBleed(
|
||||
("" + string).indexOf(close, at),
|
||||
string,
|
||||
open,
|
||||
close,
|
||||
replace
|
||||
)
|
||||
: ""
|
||||
|
||||
const init = (open, close, replace) =>
|
||||
filterEmpty(`\x1b[${open}m`, `\x1b[${close}m`, replace)
|
||||
|
||||
const colors = {
|
||||
reset: init(0, 0),
|
||||
bold: init(1, 22, "\x1b[22m\x1b[1m"),
|
||||
dim: init(2, 22, "\x1b[22m\x1b[2m"),
|
||||
italic: init(3, 23),
|
||||
underline: init(4, 24),
|
||||
inverse: init(7, 27),
|
||||
hidden: init(8, 28),
|
||||
strikethrough: init(9, 29),
|
||||
black: init(30, 39),
|
||||
red: init(31, 39),
|
||||
green: init(32, 39),
|
||||
yellow: init(33, 39),
|
||||
blue: init(34, 39),
|
||||
magenta: init(35, 39),
|
||||
cyan: init(36, 39),
|
||||
white: init(37, 39),
|
||||
gray: init(90, 39),
|
||||
bgBlack: init(40, 49),
|
||||
bgRed: init(41, 49),
|
||||
bgGreen: init(42, 49),
|
||||
bgYellow: init(43, 49),
|
||||
bgBlue: init(44, 49),
|
||||
bgMagenta: init(45, 49),
|
||||
bgCyan: init(46, 49),
|
||||
bgWhite: init(47, 49),
|
||||
blackBright: init(90, 39),
|
||||
redBright: init(91, 39),
|
||||
greenBright: init(92, 39),
|
||||
yellowBright: init(93, 39),
|
||||
blueBright: init(94, 39),
|
||||
magentaBright: init(95, 39),
|
||||
cyanBright: init(96, 39),
|
||||
whiteBright: init(97, 39),
|
||||
bgBlackBright: init(100, 49),
|
||||
bgRedBright: init(101, 49),
|
||||
bgGreenBright: init(102, 49),
|
||||
bgYellowBright: init(103, 49),
|
||||
bgBlueBright: init(104, 49),
|
||||
bgMagentaBright: init(105, 49),
|
||||
bgCyanBright: init(106, 49),
|
||||
bgWhiteBright: init(107, 49),
|
||||
}
|
||||
|
||||
export const createColors = ({ useColor = isColorSupported } = {}) =>
|
||||
useColor
|
||||
? colors
|
||||
: Object.keys(colors).reduce(
|
||||
(colors, key) => ({ ...colors, [key]: String }),
|
||||
{}
|
||||
)
|
||||
|
||||
export const {
|
||||
reset,
|
||||
bold,
|
||||
dim,
|
||||
italic,
|
||||
underline,
|
||||
inverse,
|
||||
hidden,
|
||||
strikethrough,
|
||||
black,
|
||||
red,
|
||||
green,
|
||||
yellow,
|
||||
blue,
|
||||
magenta,
|
||||
cyan,
|
||||
white,
|
||||
gray,
|
||||
bgBlack,
|
||||
bgRed,
|
||||
bgGreen,
|
||||
bgYellow,
|
||||
bgBlue,
|
||||
bgMagenta,
|
||||
bgCyan,
|
||||
bgWhite,
|
||||
blackBright,
|
||||
redBright,
|
||||
greenBright,
|
||||
yellowBright,
|
||||
blueBright,
|
||||
magentaBright,
|
||||
cyanBright,
|
||||
whiteBright,
|
||||
bgBlackBright,
|
||||
bgRedBright,
|
||||
bgGreenBright,
|
||||
bgYellowBright,
|
||||
bgBlueBright,
|
||||
bgMagentaBright,
|
||||
bgCyanBright,
|
||||
bgWhiteBright,
|
||||
} = createColors()
|
||||
40
node_modules/colorette/package.json
generated
vendored
40
node_modules/colorette/package.json
generated
vendored
@ -1,40 +0,0 @@
|
||||
{
|
||||
"name": "colorette",
|
||||
"version": "2.0.20",
|
||||
"type": "module",
|
||||
"main": "index.cjs",
|
||||
"module": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"description": "🌈Easily set your terminal text color & styles.",
|
||||
"repository": "jorgebucaran/colorette",
|
||||
"license": "MIT",
|
||||
"exports": {
|
||||
"./package.json": "./package.json",
|
||||
".": {
|
||||
"require": "./index.cjs",
|
||||
"import": "./index.js",
|
||||
"types": "./index.d.ts"
|
||||
}
|
||||
},
|
||||
"files": [
|
||||
"*.*(c)[tj]s*"
|
||||
],
|
||||
"author": "Jorge Bucaran",
|
||||
"keywords": [
|
||||
"terminal",
|
||||
"styles",
|
||||
"color",
|
||||
"ansi"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "c8 twist tests/*.js",
|
||||
"build": "npx rollup --format cjs --input index.js --file index.cjs",
|
||||
"deploy": "npm test && git commit --all --message $tag && git tag --sign $tag --message $tag && git push && git push --tags",
|
||||
"release": "tag=$npm_package_version npm run deploy && npm publish --access public",
|
||||
"prepare": "npm run build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"c8": "*",
|
||||
"twist": "*"
|
||||
}
|
||||
}
|
||||
20
node_modules/dateformat/LICENSE
generated
vendored
20
node_modules/dateformat/LICENSE
generated
vendored
@ -1,20 +0,0 @@
|
||||
(c) 2007-2009 Steven Levithan <stevenlevithan.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
204
node_modules/dateformat/Readme.md
generated
vendored
204
node_modules/dateformat/Readme.md
generated
vendored
@ -1,204 +0,0 @@
|
||||
# dateformat
|
||||
|
||||
A node.js package for Steven Levithan's excellent [dateFormat()][dateformat] function.
|
||||
|
||||
[](https://travis-ci.org/felixge/node-dateformat)
|
||||
|
||||
## Modifications
|
||||
|
||||
- Removed the `Date.prototype.format` method. Sorry folks, but extending native prototypes is for suckers.
|
||||
- Added a `module.exports = dateFormat;` statement at the bottom
|
||||
- Added the placeholder `N` to get the ISO 8601 numeric representation of the day of the week
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
$ npm install dateformat
|
||||
$ dateformat --help
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
As taken from Steven's post, modified to match the Modifications listed above:
|
||||
|
||||
```js
|
||||
var dateFormat = require("dateformat");
|
||||
var now = new Date();
|
||||
|
||||
// Basic usage
|
||||
dateFormat(now, "dddd, mmmm dS, yyyy, h:MM:ss TT");
|
||||
// Saturday, June 9th, 2007, 5:46:21 PM
|
||||
|
||||
// You can use one of several named masks
|
||||
dateFormat(now, "isoDateTime");
|
||||
// 2007-06-09T17:46:21
|
||||
|
||||
// ...Or add your own
|
||||
dateFormat.masks.hammerTime = 'HH:MM! "Can\'t touch this!"';
|
||||
dateFormat(now, "hammerTime");
|
||||
// 17:46! Can't touch this!
|
||||
|
||||
// You can also provide the date as a string
|
||||
dateFormat("Jun 9 2007", "fullDate");
|
||||
// Saturday, June 9, 2007
|
||||
|
||||
// Note that if you don't include the mask argument,
|
||||
// dateFormat.masks.default is used
|
||||
dateFormat(now);
|
||||
// Sat Jun 09 2007 17:46:21
|
||||
|
||||
// And if you don't include the date argument,
|
||||
// the current date and time is used
|
||||
dateFormat();
|
||||
// Sat Jun 09 2007 17:46:22
|
||||
|
||||
// You can also skip the date argument (as long as your mask doesn't
|
||||
// contain any numbers), in which case the current date/time is used
|
||||
dateFormat("longTime");
|
||||
// 5:46:22 PM EST
|
||||
|
||||
// And finally, you can convert local time to UTC time. Simply pass in
|
||||
// true as an additional argument (no argument skipping allowed in this case):
|
||||
dateFormat(now, "longTime", true);
|
||||
// 10:46:21 PM UTC
|
||||
|
||||
// ...Or add the prefix "UTC:" or "GMT:" to your mask.
|
||||
dateFormat(now, "UTC:h:MM:ss TT Z");
|
||||
// 10:46:21 PM UTC
|
||||
|
||||
// You can also get the ISO 8601 week of the year:
|
||||
dateFormat(now, "W");
|
||||
// 42
|
||||
|
||||
// and also get the ISO 8601 numeric representation of the day of the week:
|
||||
dateFormat(now, "N");
|
||||
// 6
|
||||
```
|
||||
|
||||
### Mask options
|
||||
|
||||
| Mask | Description |
|
||||
| ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `d` | Day of the month as digits; no leading zero for single-digit days. |
|
||||
| `dd` | Day of the month as digits; leading zero for single-digit days. |
|
||||
| `ddd` | Day of the week as a three-letter abbreviation. |
|
||||
| `DDD` | "Ysd", "Tdy" or "Tmw" if date lies within these three days. Else fall back to ddd. |
|
||||
| `dddd` | Day of the week as its full name. |
|
||||
| `DDDD` | "Yesterday", "Today" or "Tomorrow" if date lies within these three days. Else fall back to dddd. |
|
||||
| `m` | Month as digits; no leading zero for single-digit months. |
|
||||
| `mm` | Month as digits; leading zero for single-digit months. |
|
||||
| `mmm` | Month as a three-letter abbreviation. |
|
||||
| `mmmm` | Month as its full name. |
|
||||
| `yy` | Year as last two digits; leading zero for years less than 10. |
|
||||
| `yyyy` | Year represented by four digits. |
|
||||
| `h` | Hours; no leading zero for single-digit hours (12-hour clock). |
|
||||
| `hh` | Hours; leading zero for single-digit hours (12-hour clock). |
|
||||
| `H` | Hours; no leading zero for single-digit hours (24-hour clock). |
|
||||
| `HH` | Hours; leading zero for single-digit hours (24-hour clock). |
|
||||
| `M` | Minutes; no leading zero for single-digit minutes. |
|
||||
| `MM` | Minutes; leading zero for single-digit minutes. |
|
||||
| `N` | ISO 8601 numeric representation of the day of the week. |
|
||||
| `o` | GMT/UTC timezone offset, e.g. -0500 or +0230. |
|
||||
| `p` | GMT/UTC timezone offset, e.g. -05:00 or +02:30. |
|
||||
| `s` | Seconds; no leading zero for single-digit seconds. |
|
||||
| `ss` | Seconds; leading zero for single-digit seconds. |
|
||||
| `S` | The date's ordinal suffix (st, nd, rd, or th). Works well with `d`. |
|
||||
| `l` | Milliseconds; gives 3 digits. |
|
||||
| `L` | Milliseconds; gives 2 digits. |
|
||||
| `t` | Lowercase, single-character time marker string: a or p. |
|
||||
| `tt` | Lowercase, two-character time marker string: am or pm. |
|
||||
| `T` | Uppercase, single-character time marker string: A or P. |
|
||||
| `TT` | Uppercase, two-character time marker string: AM or PM. |
|
||||
| `W` | ISO 8601 week number of the year, e.g. 4, 42 |
|
||||
| `WW` | ISO 8601 week number of the year, leading zero for single-digit, e.g. 04, 42 |
|
||||
| `Z` | US timezone abbreviation, e.g. EST or MDT. For non-US timezones, the GMT/UTC offset is returned, e.g. GMT-0500 |
|
||||
| `'...'`, `"..."` | Literal character sequence. Surrounding quotes are removed. |
|
||||
| `UTC:` | Must be the first four characters of the mask. Converts the date from local time to UTC/GMT/Zulu time before applying the mask. The "UTC:" prefix is removed. |
|
||||
|
||||
### Named Formats
|
||||
|
||||
| Name | Mask | Example |
|
||||
| ----------------- | ------------------------------ | ------------------------ |
|
||||
| `default` | `ddd mmm dd yyyy HH:MM:ss` | Sat Jun 09 2007 17:46:21 |
|
||||
| `shortDate` | `m/d/yy` | 6/9/07 |
|
||||
| `paddedShortDate` | `mm/dd/yyyy` | 06/09/2007 |
|
||||
| `mediumDate` | `mmm d, yyyy` | Jun 9, 2007 |
|
||||
| `longDate` | `mmmm d, yyyy` | June 9, 2007 |
|
||||
| `fullDate` | `dddd, mmmm d, yyyy` | Saturday, June 9, 2007 |
|
||||
| `shortTime` | `h:MM TT` | 5:46 PM |
|
||||
| `mediumTime` | `h:MM:ss TT` | 5:46:21 PM |
|
||||
| `longTime` | `h:MM:ss TT Z` | 5:46:21 PM EST |
|
||||
| `isoDate` | `yyyy-mm-dd` | 2007-06-09 |
|
||||
| `isoTime` | `HH:MM:ss` | 17:46:21 |
|
||||
| `isoDateTime` | `yyyy-mm-dd'T'HH:MM:sso` | 2007-06-09T17:46:21+0700 |
|
||||
| `isoUtcDateTime` | `UTC:yyyy-mm-dd'T'HH:MM:ss'Z'` | 2007-06-09T22:46:21Z |
|
||||
|
||||
### Localization
|
||||
|
||||
Day names, month names and the AM/PM indicators can be localized by
|
||||
passing an object with the necessary strings. For example:
|
||||
|
||||
```js
|
||||
var dateFormat = require("dateformat");
|
||||
dateFormat.i18n = {
|
||||
dayNames: [
|
||||
"Sun",
|
||||
"Mon",
|
||||
"Tue",
|
||||
"Wed",
|
||||
"Thu",
|
||||
"Fri",
|
||||
"Sat",
|
||||
"Sunday",
|
||||
"Monday",
|
||||
"Tuesday",
|
||||
"Wednesday",
|
||||
"Thursday",
|
||||
"Friday",
|
||||
"Saturday",
|
||||
],
|
||||
monthNames: [
|
||||
"Jan",
|
||||
"Feb",
|
||||
"Mar",
|
||||
"Apr",
|
||||
"May",
|
||||
"Jun",
|
||||
"Jul",
|
||||
"Aug",
|
||||
"Sep",
|
||||
"Oct",
|
||||
"Nov",
|
||||
"Dec",
|
||||
"January",
|
||||
"February",
|
||||
"March",
|
||||
"April",
|
||||
"May",
|
||||
"June",
|
||||
"July",
|
||||
"August",
|
||||
"September",
|
||||
"October",
|
||||
"November",
|
||||
"December",
|
||||
],
|
||||
timeNames: ["a", "p", "am", "pm", "A", "P", "AM", "PM"],
|
||||
};
|
||||
```
|
||||
|
||||
> Notice that only one language is supported at a time and all strings
|
||||
> _must_ be present in the new value.
|
||||
|
||||
### Breaking change in 2.1.0
|
||||
|
||||
- 2.1.0 was published with a breaking change, for those using localized strings.
|
||||
- 2.2.0 has been published without the change, to keep packages refering to ^2.0.0 to continue working. This is now branch v2_2.
|
||||
- 3.0.\* contains the localized AM/PM change.
|
||||
|
||||
## License
|
||||
|
||||
(c) 2007-2009 Steven Levithan [stevenlevithan.com][stevenlevithan], MIT license.
|
||||
|
||||
[dateformat]: http://blog.stevenlevithan.com/archives/date-time-format
|
||||
[stevenlevithan]: http://stevenlevithan.com/
|
||||
1
node_modules/dateformat/lib/dateformat.js
generated
vendored
1
node_modules/dateformat/lib/dateformat.js
generated
vendored
File diff suppressed because one or more lines are too long
38
node_modules/dateformat/package.json
generated
vendored
38
node_modules/dateformat/package.json
generated
vendored
@ -1,38 +0,0 @@
|
||||
{
|
||||
"name": "dateformat",
|
||||
"description": "A node.js package for Steven Levithan's excellent dateFormat() function.",
|
||||
"maintainers": [
|
||||
"Felix Geisendörfer <felix@debuggable.com>"
|
||||
],
|
||||
"homepage": "https://github.com/felixge/node-dateformat",
|
||||
"author": "Steven Levithan",
|
||||
"contributors": [
|
||||
"Steven Levithan",
|
||||
"Felix Geisendörfer <felix@debuggable.com>",
|
||||
"Christoph Tavan <dev@tavan.de>",
|
||||
"Jon Schlinkert (https://github.com/jonschlinkert)"
|
||||
],
|
||||
"version": "4.6.3",
|
||||
"license": "MIT",
|
||||
"main": "lib/dateformat",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.12.10",
|
||||
"@babel/core": "^7.12.10",
|
||||
"@babel/preset-env": "^7.12.11",
|
||||
"mocha": "^8.2.1",
|
||||
"uglify-js": "^3.12.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "./node_modules/.bin/babel src --out-dir lib && uglifyjs lib/dateformat.js -o lib/dateformat.js",
|
||||
"test": "npm run build && mocha",
|
||||
"benchmark": "npm run build && node ./benchmark/benchmark.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/felixge/node-dateformat.git"
|
||||
},
|
||||
"dependencies": {}
|
||||
}
|
||||
21
node_modules/end-of-stream/LICENSE
generated
vendored
21
node_modules/end-of-stream/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Mathias Buus
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
54
node_modules/end-of-stream/README.md
generated
vendored
54
node_modules/end-of-stream/README.md
generated
vendored
@ -1,54 +0,0 @@
|
||||
# end-of-stream
|
||||
|
||||
A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
|
||||
|
||||
npm install end-of-stream
|
||||
|
||||
[](https://travis-ci.org/mafintosh/end-of-stream)
|
||||
|
||||
## Usage
|
||||
|
||||
Simply pass a stream and a callback to the `eos`.
|
||||
Both legacy streams, streams2 and stream3 are supported.
|
||||
|
||||
``` js
|
||||
var eos = require('end-of-stream');
|
||||
|
||||
eos(readableStream, function(err) {
|
||||
// this will be set to the stream instance
|
||||
if (err) return console.log('stream had an error or closed early');
|
||||
console.log('stream has ended', this === readableStream);
|
||||
});
|
||||
|
||||
eos(writableStream, function(err) {
|
||||
if (err) return console.log('stream had an error or closed early');
|
||||
console.log('stream has finished', this === writableStream);
|
||||
});
|
||||
|
||||
eos(duplexStream, function(err) {
|
||||
if (err) return console.log('stream had an error or closed early');
|
||||
console.log('stream has ended and finished', this === duplexStream);
|
||||
});
|
||||
|
||||
eos(duplexStream, {readable:false}, function(err) {
|
||||
if (err) return console.log('stream had an error or closed early');
|
||||
console.log('stream has finished but might still be readable');
|
||||
});
|
||||
|
||||
eos(duplexStream, {writable:false}, function(err) {
|
||||
if (err) return console.log('stream had an error or closed early');
|
||||
console.log('stream has ended but might still be writable');
|
||||
});
|
||||
|
||||
eos(readableStream, {error:false}, function(err) {
|
||||
// do not treat emit('error', err) as a end-of-stream
|
||||
});
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Related
|
||||
|
||||
`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
|
||||
96
node_modules/end-of-stream/index.js
generated
vendored
96
node_modules/end-of-stream/index.js
generated
vendored
@ -1,96 +0,0 @@
|
||||
var once = require('once');
|
||||
|
||||
var noop = function() {};
|
||||
|
||||
var qnt = global.Bare ? queueMicrotask : process.nextTick.bind(process);
|
||||
|
||||
var isRequest = function(stream) {
|
||||
return stream.setHeader && typeof stream.abort === 'function';
|
||||
};
|
||||
|
||||
var isChildProcess = function(stream) {
|
||||
return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
|
||||
};
|
||||
|
||||
var eos = function(stream, opts, callback) {
|
||||
if (typeof opts === 'function') return eos(stream, null, opts);
|
||||
if (!opts) opts = {};
|
||||
|
||||
callback = once(callback || noop);
|
||||
|
||||
var ws = stream._writableState;
|
||||
var rs = stream._readableState;
|
||||
var readable = opts.readable || (opts.readable !== false && stream.readable);
|
||||
var writable = opts.writable || (opts.writable !== false && stream.writable);
|
||||
var cancelled = false;
|
||||
|
||||
var onlegacyfinish = function() {
|
||||
if (!stream.writable) onfinish();
|
||||
};
|
||||
|
||||
var onfinish = function() {
|
||||
writable = false;
|
||||
if (!readable) callback.call(stream);
|
||||
};
|
||||
|
||||
var onend = function() {
|
||||
readable = false;
|
||||
if (!writable) callback.call(stream);
|
||||
};
|
||||
|
||||
var onexit = function(exitCode) {
|
||||
callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
|
||||
};
|
||||
|
||||
var onerror = function(err) {
|
||||
callback.call(stream, err);
|
||||
};
|
||||
|
||||
var onclose = function() {
|
||||
qnt(onclosenexttick);
|
||||
};
|
||||
|
||||
var onclosenexttick = function() {
|
||||
if (cancelled) return;
|
||||
if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
|
||||
if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
|
||||
};
|
||||
|
||||
var onrequest = function() {
|
||||
stream.req.on('finish', onfinish);
|
||||
};
|
||||
|
||||
if (isRequest(stream)) {
|
||||
stream.on('complete', onfinish);
|
||||
stream.on('abort', onclose);
|
||||
if (stream.req) onrequest();
|
||||
else stream.on('request', onrequest);
|
||||
} else if (writable && !ws) { // legacy streams
|
||||
stream.on('end', onlegacyfinish);
|
||||
stream.on('close', onlegacyfinish);
|
||||
}
|
||||
|
||||
if (isChildProcess(stream)) stream.on('exit', onexit);
|
||||
|
||||
stream.on('end', onend);
|
||||
stream.on('finish', onfinish);
|
||||
if (opts.error !== false) stream.on('error', onerror);
|
||||
stream.on('close', onclose);
|
||||
|
||||
return function() {
|
||||
cancelled = true;
|
||||
stream.removeListener('complete', onfinish);
|
||||
stream.removeListener('abort', onclose);
|
||||
stream.removeListener('request', onrequest);
|
||||
if (stream.req) stream.req.removeListener('finish', onfinish);
|
||||
stream.removeListener('end', onlegacyfinish);
|
||||
stream.removeListener('close', onlegacyfinish);
|
||||
stream.removeListener('finish', onfinish);
|
||||
stream.removeListener('exit', onexit);
|
||||
stream.removeListener('end', onend);
|
||||
stream.removeListener('error', onerror);
|
||||
stream.removeListener('close', onclose);
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = eos;
|
||||
37
node_modules/end-of-stream/package.json
generated
vendored
37
node_modules/end-of-stream/package.json
generated
vendored
@ -1,37 +0,0 @@
|
||||
{
|
||||
"name": "end-of-stream",
|
||||
"version": "1.4.5",
|
||||
"description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/mafintosh/end-of-stream.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"once": "^1.4.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"stream",
|
||||
"streams",
|
||||
"callback",
|
||||
"finish",
|
||||
"close",
|
||||
"end",
|
||||
"wait"
|
||||
],
|
||||
"bugs": {
|
||||
"url": "https://github.com/mafintosh/end-of-stream/issues"
|
||||
},
|
||||
"homepage": "https://github.com/mafintosh/end-of-stream",
|
||||
"main": "index.js",
|
||||
"author": "Mathias Buus <mathiasbuus@gmail.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"tape": "^4.11.0"
|
||||
}
|
||||
}
|
||||
171
node_modules/fast-copy/CHANGELOG.md
generated
vendored
171
node_modules/fast-copy/CHANGELOG.md
generated
vendored
@ -1,171 +0,0 @@
|
||||
# fast-copy CHANGELOG
|
||||
|
||||
## 4.0.2
|
||||
|
||||
- [#112](https://github.com/planttheidea/fast-copy/pull/112) - Prevent generators from attempting to be copied (fixes
|
||||
[#111](https://github.com/planttheidea/fast-copy/issues/111))
|
||||
|
||||
## 4.0.1
|
||||
|
||||
- [#110](https://github.com/planttheidea/fast-copy/pull/110) - Fix legacy types not aligning with types from build
|
||||
package
|
||||
|
||||
## 4.0.0
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
- The default `copy` method is now a named export, and the default export has been removed.
|
||||
- Legacy environment support has been removed; `Symbol`, `WeakMap`, and `RegExp.prototype.flags` are now expected to be
|
||||
present.
|
||||
- `createCopier` now receives an object of options. The methods passed previously are namespaced under the `methods` key
|
||||
in that options object.
|
||||
- `createStrictCopier` has been removed; please use the `strict` option passed to `createCopier`
|
||||
|
||||
## 3.0.2
|
||||
|
||||
- [#95](https://github.com/planttheidea/fast-copy/pull/95) - Add support for objects that have a prototype with no
|
||||
constructor
|
||||
|
||||
## 3.0.1
|
||||
|
||||
- [#78](https://github.com/planttheidea/fast-copy/pull/78) - Work when running Node process with `--disable-proto=throw`
|
||||
(thanks [@castarco](https://github.com/castarco))
|
||||
|
||||
## 3.0.0
|
||||
|
||||
**Breaking changes**
|
||||
|
||||
- Exports are now always named, so the `.default` suffix is required when accessing
|
||||
- CommonJS in Node => `const copy = require('fast-copy').default;`
|
||||
- UMD global via CDN => `const copy = globalThis['fast-copy'].default;`
|
||||
- `copy.strict` is no longer available; it is now available as the explicit `copyStrict` named import
|
||||
- Options have been removed
|
||||
- `isStrict` option has been replaced with importing the separate `copyStrict` method
|
||||
- `realm` has been removed entirely, as `instanceof` is no longer used internally
|
||||
- The `FastCopy` namespace in typings has been removed in favor of explicit import of available types
|
||||
|
||||
**Enhancements**
|
||||
|
||||
- Support `exports` option, to have bettern handling for different environments (ESM vs CJS vs UMD) and improve
|
||||
tree-shaking when supported
|
||||
- Can now create a custom copier (either standard or strict), allowing maximum performance for specific use-cases
|
||||
- Small speed improvements when handling certain object types
|
||||
|
||||
**Bug fixes**
|
||||
|
||||
- Correctly handle primitive wrappers, e.g. `new String('foo')`
|
||||
|
||||
## 2.1.7
|
||||
|
||||
- Republish of [`2.1.6`](#216), as the release process failed mid-publish
|
||||
|
||||
## 2.1.6
|
||||
|
||||
- Revert [#69](https://github.com/planttheidea/fast-copy/pull/69) and
|
||||
[#71](https://github.com/planttheidea/fast-copy/pull/71), as they broke the package for NodeJS consumption (will be
|
||||
reintroduced in v3, as breaking changes are required)
|
||||
|
||||
## 2.1.5 - DO NOT USE
|
||||
|
||||
- Ensure `"type": "module"` is set to allow ESM in NodeJS to work
|
||||
[#71](https://github.com/planttheidea/fast-copy/pull/71)
|
||||
|
||||
## 2.1.4 - DO NOT USE
|
||||
|
||||
- Provide `"exports"` definition in `package.json` [#69](https://github.com/planttheidea/fast-copy/pull/69) (thanks
|
||||
[@liteoood](https://github.com/ilteoood))
|
||||
|
||||
## 2.1.3
|
||||
|
||||
- Fix source maps not referencing source code [#65](https://github.com/planttheidea/fast-copy/pull/65)
|
||||
|
||||
## 2.1.2
|
||||
|
||||
- Support `constructor` property override on object [#60](https://github.com/planttheidea/fast-copy/pull/60)
|
||||
- Provide better support for `constructor` override on non-plain object types
|
||||
[#61](https://github.com/planttheidea/fast-copy/pull/61)
|
||||
- Remove `tslint` in favor of `@typescript-eslint` [#62](https://github.com/planttheidea/fast-copy/pull/62)
|
||||
|
||||
## 2.1.1
|
||||
|
||||
- Fix ESM-to-CommonJS issue when using TSC to consume [#37](https://github.com/planttheidea/fast-copy/issues/37)
|
||||
- Modify `Blob` cloning to use `blob.slice()` instead of `new Blob()` for speed
|
||||
|
||||
## 2.1.0
|
||||
|
||||
- Support cloning `Blob` [#31](https://github.com/planttheidea/fast-copy/pull/31) (thanks
|
||||
[@fratzigner](https://github.com/fratzinger))
|
||||
- Fix cloning descriptors that only are getters / setters in strict mode
|
||||
- Handle errors when defining properties in strict mode
|
||||
|
||||
## 2.0.5
|
||||
|
||||
- Fix issue copying objects referenced multiple times in source [#28](https://github.com/planttheidea/fast-copy/pull/28)
|
||||
(thanks [@darkowic](https://github.com/darkowic))
|
||||
|
||||
## 2.0.4
|
||||
|
||||
- Cache length of arrays for faster iteration [#22](https://github.com/planttheidea/fast-copy/pull/22)
|
||||
- Update dev dependencies and types
|
||||
|
||||
## 2.0.3
|
||||
|
||||
- Add safety to constructing native objects (fixes #19)
|
||||
|
||||
## 2.0.2
|
||||
|
||||
- Manually coalesce options instead of use destructuring (performance)
|
||||
|
||||
## 2.0.1
|
||||
|
||||
- Fix typings declarations - [#17](https://github.com/planttheidea/fast-copy/pull/17)
|
||||
|
||||
## 2.0.0
|
||||
|
||||
- Rewrite in TypeScript
|
||||
- Add strict mode (for more accurate and thorough copying, at the expense of less performance)
|
||||
|
||||
#### BREAKING CHANGES
|
||||
|
||||
- Second parameter is now an object of [options](README.md#options)
|
||||
|
||||
## 1.2.4
|
||||
|
||||
- Ensure `Date` copy uses realm-specific constructor
|
||||
|
||||
## 1.2.3
|
||||
|
||||
- Support custom prototype applied to plain object via `Object.create()`
|
||||
|
||||
## 1.2.2
|
||||
|
||||
- Support copy of extensions of native `Array` with alternative `push()` method
|
||||
|
||||
## 1.2.1
|
||||
|
||||
- Under-the-hood optimizations per recommendations from #7
|
||||
|
||||
## 1.2.0
|
||||
|
||||
- Add support for multiple realms
|
||||
|
||||
## 1.1.2
|
||||
|
||||
- Optimize order of operations for common use cases
|
||||
|
||||
## 1.1.1
|
||||
|
||||
- Fix cache using `WeakSet` when there was support for `WeakMap`s instead of `WeakSet`s (in case one was polyfilled but
|
||||
not the other)
|
||||
|
||||
## 1.1.0
|
||||
|
||||
- Add TypeScript and FlowType bindings
|
||||
|
||||
## 1.0.1
|
||||
|
||||
- Activate tree-shaking
|
||||
|
||||
## 1.0.0
|
||||
|
||||
- Initial release
|
||||
21
node_modules/fast-copy/LICENSE
generated
vendored
21
node_modules/fast-copy/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 Tony Quetano
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
437
node_modules/fast-copy/README.md
generated
vendored
437
node_modules/fast-copy/README.md
generated
vendored
@ -1,437 +0,0 @@
|
||||
# fast-copy
|
||||
|
||||
<img src="https://img.shields.io/badge/build-passing-brightgreen.svg"/>
|
||||
<img src="https://img.shields.io/badge/coverage-100%25-brightgreen.svg"/>
|
||||
<img src="https://img.shields.io/badge/license-MIT-blue.svg"/>
|
||||
|
||||
A [blazing fast](#benchmarks) deep object copier
|
||||
|
||||
## Table of contents
|
||||
|
||||
- [fast-copy](#fast-copy)
|
||||
- [Table of contents](#table-of-contents)
|
||||
- [Usage](#usage)
|
||||
- [API](#api)
|
||||
- [`copy`](#copy)
|
||||
- [`copyStrict`](#copystrict)
|
||||
- [`createCopier`](#createcopier)
|
||||
- [`createCache`](#createcache)
|
||||
- [`methods`](#methods)
|
||||
- [Copier state](#copier-state)
|
||||
- [`cache`](#cache)
|
||||
- [`copier`](#copier)
|
||||
- [`Constructor` / `prototype`](#constructor--prototype)
|
||||
- [`strict`](#strict)
|
||||
- [Types supported](#types-supported)
|
||||
- [Aspects of default copiers](#aspects-of-default-copiers)
|
||||
- [Error references are copied directly, instead of creating a new `*Error` object](#error-references-are-copied-directly-instead-of-creating-a-new-error-object)
|
||||
- [The constructor of the original object is used, instead of using known globals](#the-constructor-of-the-original-object-is-used-instead-of-using-known-globals)
|
||||
- [Generator objects are copied, but still reference the original generator's state](#generator-objects-are-copied-but-still-reference-the-original-generators-state)
|
||||
- [Benchmarks](#benchmarks)
|
||||
- [Simple objects](#simple-objects)
|
||||
- [Complex objects](#complex-objects)
|
||||
- [Big data](#big-data)
|
||||
- [Circular objects](#circular-objects)
|
||||
- [Special objects](#special-objects)
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { copy } from 'fast-copy';
|
||||
import { deepEqual } from 'fast-equals';
|
||||
|
||||
const object = {
|
||||
array: [123, { deep: 'value' }],
|
||||
map: new Map([
|
||||
['foo', {}],
|
||||
[{ bar: 'baz' }, 'quz'],
|
||||
]),
|
||||
};
|
||||
|
||||
const copiedObject = copy(object);
|
||||
|
||||
console.log(copiedObject === object); // false
|
||||
console.log(deepEqual(copiedObject, object)); // true
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### `copy`
|
||||
|
||||
Deeply copy the object passed.
|
||||
|
||||
```js
|
||||
import { copy } from 'fast-copy';
|
||||
|
||||
const copied = copy({ foo: 'bar' });
|
||||
```
|
||||
|
||||
### `copyStrict`
|
||||
|
||||
Deeply copy the object passed, but with additional strictness when replicating the original object:
|
||||
|
||||
- Properties retain their original property descriptor
|
||||
- Non-enumerable keys are copied
|
||||
- Non-standard properties (e.g., keys on arrays / maps / sets) are copied
|
||||
|
||||
```js
|
||||
import { copyStrict } from 'fast-copy';
|
||||
|
||||
const object = { foo: 'bar' };
|
||||
object.nonEnumerable = Object.defineProperty(object, 'bar', {
|
||||
enumerable: false,
|
||||
value: 'baz',
|
||||
});
|
||||
|
||||
const copied = copy(object);
|
||||
```
|
||||
|
||||
**NOTE**: This method is significantly slower than [`copy`](#copy), so it is recommended to only use this when you have
|
||||
specific use-cases that require it.
|
||||
|
||||
### `createCopier`
|
||||
|
||||
Create a custom copier based on the type-specific method overrides passed, as well as configuration options for how
|
||||
copies should be performed. This is useful if you want to squeeze out maximum performance, or perform something other
|
||||
than a standard deep copy.
|
||||
|
||||
```js
|
||||
import { createCopier } from 'fast-copy';
|
||||
import { LRUCache } from 'lru-cache';
|
||||
|
||||
const copyShallowStrict = createCopier({
|
||||
createCache: () => new LRUCache(),
|
||||
methods: {
|
||||
array: (array) => [...array],
|
||||
map: (map) => new Map(map.entries()),
|
||||
object: (object) => ({ ...object }),
|
||||
set: (set) => new Set(set.values()),
|
||||
},
|
||||
strict: true,
|
||||
});
|
||||
```
|
||||
|
||||
#### `createCache`
|
||||
|
||||
Method that creates the internal [`cache`](#cache) in the [Copier state](#copier-state). Defaults to creating a new
|
||||
`WeakMap` instance.
|
||||
|
||||
#### `methods`
|
||||
|
||||
Methods used for copying specific object types. A list of the methods and which object types they handle:
|
||||
|
||||
- `array` => `Array`
|
||||
- `arrayBuffer`=> `ArrayBuffer`, `Float32Array`, `Float64Array`, `Int8Array`, `Int16Array`, `Int32Array`, `Uint8Array`,
|
||||
`Uint8ClampedArray`, `Uint16Array`, `Uint32Array`, `Uint64Array`
|
||||
- `blob` => `Blob`
|
||||
- `dataView` => `DataView`
|
||||
- `date` => `Date`
|
||||
- `error` => `Error`, `AggregateError`, `EvalError`, `RangeError`, `ReferenceError`, `SyntaxError`, `TypeError`,
|
||||
`URIError`
|
||||
- `map` => `Map`
|
||||
- `object` => `Object`, or any custom constructor
|
||||
- `regExp` => `RegExp`
|
||||
- `set` => `Set`
|
||||
|
||||
Each method has the following contract:
|
||||
|
||||
```js
|
||||
type InternalCopier<Value> = (value: Value, state: State) => Value;
|
||||
|
||||
interface State {
|
||||
Constructor: any;
|
||||
cache: WeakMap;
|
||||
copier: InternalCopier<any>;
|
||||
prototype: any;
|
||||
}
|
||||
```
|
||||
|
||||
##### Copier state
|
||||
|
||||
###### `cache`
|
||||
|
||||
If you want to maintain circular reference handling, then you'll need the methods to handle cache population for future
|
||||
lookups:
|
||||
|
||||
```js
|
||||
function shallowlyCloneArray<Value extends any[]>(
|
||||
value: Value,
|
||||
state: State
|
||||
): Value {
|
||||
const clone = [...value];
|
||||
|
||||
state.cache.set(value, clone);
|
||||
|
||||
return clone;
|
||||
}
|
||||
```
|
||||
|
||||
###### `copier`
|
||||
|
||||
`copier` is provided for recursive calls with deeply-nested objects.
|
||||
|
||||
```js
|
||||
function deeplyCloneArray<Value extends any[]>(
|
||||
value: Value,
|
||||
state: State
|
||||
): Value {
|
||||
const clone = [];
|
||||
|
||||
state.cache.set(value, clone);
|
||||
|
||||
value.forEach((item) => state.copier(item, state));
|
||||
|
||||
return clone;
|
||||
}
|
||||
```
|
||||
|
||||
Note above I am using `forEach` instead of a simple `map`. This is because it is highly recommended to store the clone
|
||||
in [`cache`](#cache) eagerly when deeply copying, so that nested circular references are handled correctly.
|
||||
|
||||
###### `Constructor` / `prototype`
|
||||
|
||||
Both `Constructor` and `prototype` properties are only populated with complex objects that are not standard objects or
|
||||
arrays. This is mainly useful for custom subclasses of these globals, or maintaining custom prototypes of objects.
|
||||
|
||||
```js
|
||||
function deeplyCloneSubclassArray<Value extends CustomArray>(
|
||||
value: Value,
|
||||
state: State
|
||||
): Value {
|
||||
const clone = new state.Constructor();
|
||||
|
||||
state.cache.set(value, clone);
|
||||
|
||||
value.forEach((item) => clone.push(item));
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
function deeplyCloneCustomObject<Value extends CustomObject>(
|
||||
value: Value,
|
||||
state: State
|
||||
): Value {
|
||||
const clone = Object.create(state.prototype);
|
||||
|
||||
state.cache.set(value, clone);
|
||||
|
||||
Object.entries(value).forEach(([k, v]) => (clone[k] = v));
|
||||
|
||||
return clone;
|
||||
}
|
||||
```
|
||||
|
||||
#### `strict`
|
||||
|
||||
Enforces strict copying of properties, which includes properties that are not standard for that object. An example would
|
||||
be a named key on an array.
|
||||
|
||||
**NOTE**: This creates a copier that is significantly slower than "loose" mode, so it is recommended to only use this
|
||||
when you have specific use-cases that require it.
|
||||
|
||||
## Types supported
|
||||
|
||||
The following object types are deeply cloned when they are either properties on the object passed, or the object itself:
|
||||
|
||||
- `Array`
|
||||
- `ArrayBuffer`
|
||||
- `Boolean` primitive wrappers (e.g., `new Boolean(true)`)
|
||||
- `Blob`
|
||||
- `Buffer`
|
||||
- `DataView`
|
||||
- `Date`
|
||||
- `Float32Array`
|
||||
- `Float64Array`
|
||||
- `Int8Array`
|
||||
- `Int16Array`
|
||||
- `Int32Array`
|
||||
- `Map`
|
||||
- `Number` primitive wrappers (e.g., `new Number(123)`)
|
||||
- `Object`
|
||||
- `RegExp`
|
||||
- `Set`
|
||||
- `String` primitive wrappers (e.g., `new String('foo')`)
|
||||
- `Uint8Array`
|
||||
- `Uint8ClampedArray`
|
||||
- `Uint16Array`
|
||||
- `Uint32Array`
|
||||
- `React` components
|
||||
- Custom constructors
|
||||
|
||||
The following object types are copied directly, as they are either primitives, cannot be cloned, or the common use-case
|
||||
implementation does not expect cloning:
|
||||
|
||||
- `AsyncFunction`
|
||||
- `AsyncGenerator`
|
||||
- `Boolean` primitives
|
||||
- `Error`
|
||||
- `Function`
|
||||
- `Generator`
|
||||
- `GeneratorFunction`
|
||||
- `Number` primitives
|
||||
- `Null`
|
||||
- `Promise`
|
||||
- `String` primitives
|
||||
- `Symbol`
|
||||
- `Undefined`
|
||||
- `WeakMap`
|
||||
- `WeakSet`
|
||||
|
||||
Circular objects are supported out of the box. By default, a cache based on `WeakSet` is used, but if `WeakSet` is not
|
||||
available then a fallback is used. The benchmarks quoted below are based on use of `WeakSet`.
|
||||
|
||||
## Aspects of default copiers
|
||||
|
||||
Inherently, what is considered a valid copy is subjective because of different requirements and use-cases. For this
|
||||
library, some decisions were explicitly made for the default copiers of specific object types, and those decisions are
|
||||
detailed below. If your use-cases require different handling, you can always create your own custom copier with
|
||||
[`createCopier`](#createcopier).
|
||||
|
||||
### Error references are copied directly, instead of creating a new `*Error` object
|
||||
|
||||
While it would be relatively trivial to copy over the message and stack to a new object of the same `Error` subclass, it
|
||||
is a common practice to "override" the message or stack, and copies would not retain this mutation. As such, the
|
||||
original reference is copied.
|
||||
|
||||
### The constructor of the original object is used, instead of using known globals
|
||||
|
||||
Starting in ES2015, native globals can be subclassed like any custom class. When copying, we explicitly reuse the
|
||||
constructor of the original object. However, the expectation is that these subclasses would have the same constructur
|
||||
signature as their native base class. This is a common community practice, but there is the possibility of inaccuracy if
|
||||
the contract differs.
|
||||
|
||||
### Generator objects are copied, but still reference the original generator's state
|
||||
|
||||
[Generator objects](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Generator) are
|
||||
specific types of iterators, but appear like standard objects that just have a few methods (`next`, `throw`, `return`).
|
||||
These methods are bound to the internal state of the generator, which cannot be copied effectively. Normally this would
|
||||
be treated like other "uncopiable" objects and simply pass the reference through, however the "validation" of whether it
|
||||
is a generator object or a standard object is not guaranteed (duck-typing) and there is a runtime cost associated with.
|
||||
Therefore, the simplest path of treating it like a standard object (copying methods to a new object) was taken.
|
||||
|
||||
## Benchmarks
|
||||
|
||||
#### Simple objects
|
||||
|
||||
_Small number of properties, all values are primitives_
|
||||
|
||||
```bash
|
||||
┌────────────────────┬────────────────┐
|
||||
│ Name │ Ops / sec │
|
||||
├────────────────────┼────────────────┤
|
||||
│ fast-copy │ 4606103.720559 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ lodash.cloneDeep │ 2575175.39241 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ clone │ 2172921.6353 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ ramda │ 1919715.448951 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ fast-clone │ 1576610.693318 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ deepclone │ 1173500.05884 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ fast-copy (strict) │ 1049310.47701 │
|
||||
└────────────────────┴────────────────┘
|
||||
Fastest was "fast-copy".
|
||||
```
|
||||
|
||||
#### Complex objects
|
||||
|
||||
_Large number of properties, values are a combination of primitives and complex objects_
|
||||
|
||||
```bash
|
||||
┌────────────────────┬───────────────┐
|
||||
│ Name │ Ops / sec │
|
||||
├────────────────────┼───────────────┤
|
||||
│ fast-copy │ 235511.4532 │
|
||||
├────────────────────┼───────────────┤
|
||||
│ deepclone │ 142976.849406 │
|
||||
├────────────────────┼───────────────┤
|
||||
│ clone │ 125026.837887 │
|
||||
├────────────────────┼───────────────┤
|
||||
│ ramda │ 114216.98158 │
|
||||
├────────────────────┼───────────────┤
|
||||
│ fast-clone │ 111388.215547 │
|
||||
├────────────────────┼───────────────┤
|
||||
│ fast-copy (strict) │ 77683.900047 │
|
||||
├────────────────────┼───────────────┤
|
||||
│ lodash.cloneDeep │ 71343.431983 │
|
||||
└────────────────────┴───────────────┘
|
||||
Fastest was "fast-copy".
|
||||
```
|
||||
|
||||
#### Big data
|
||||
|
||||
_Very large number of properties with high amount of nesting, mainly objects and arrays_
|
||||
|
||||
```bash
|
||||
Testing big data object...
|
||||
┌────────────────────┬────────────┐
|
||||
│ Name │ Ops / sec │
|
||||
├────────────────────┼────────────┤
|
||||
│ fast-copy │ 325.548627 │
|
||||
├────────────────────┼────────────┤
|
||||
│ fast-clone │ 257.913886 │
|
||||
├────────────────────┼────────────┤
|
||||
│ deepclone │ 158.228042 │
|
||||
├────────────────────┼────────────┤
|
||||
│ lodash.cloneDeep │ 153.520966 │
|
||||
├────────────────────┼────────────┤
|
||||
│ fast-copy (strict) │ 126.027381 │
|
||||
├────────────────────┼────────────┤
|
||||
│ clone │ 123.383641 │
|
||||
├────────────────────┼────────────┤
|
||||
│ ramda │ 35.507959 │
|
||||
└────────────────────┴────────────┘
|
||||
Fastest was "fast-copy".
|
||||
```
|
||||
|
||||
#### Circular objects
|
||||
|
||||
```bash
|
||||
Testing circular object...
|
||||
┌────────────────────┬────────────────┐
|
||||
│ Name │ Ops / sec │
|
||||
├────────────────────┼────────────────┤
|
||||
│ fast-copy │ 1344790.296938 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ deepclone │ 1127781.641192 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ lodash.cloneDeep │ 894679.711048 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ clone │ 892911.50594 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ fast-copy (strict) │ 821339.44828 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ ramda │ 615222.946985 │
|
||||
├────────────────────┼────────────────┤
|
||||
│ fast-clone │ 0 │
|
||||
└────────────────────┴────────────────┘
|
||||
Fastest was "fast-copy".
|
||||
```
|
||||
|
||||
#### Special objects
|
||||
|
||||
_Custom constructors, React components, etc_
|
||||
|
||||
```bash
|
||||
┌────────────────────┬──────────────┐
|
||||
│ Name │ Ops / sec │
|
||||
├────────────────────┼──────────────┤
|
||||
│ fast-copy │ 86875.694416 │
|
||||
├────────────────────┼──────────────┤
|
||||
│ clone │ 73525.671381 │
|
||||
├────────────────────┼──────────────┤
|
||||
│ lodash.cloneDeep │ 63280.563976 │
|
||||
├────────────────────┼──────────────┤
|
||||
│ fast-clone │ 52991.064016 │
|
||||
├────────────────────┼──────────────┤
|
||||
│ ramda │ 31770.652317 │
|
||||
├────────────────────┼──────────────┤
|
||||
│ deepclone │ 24253.795114 │
|
||||
├────────────────────┼──────────────┤
|
||||
│ fast-copy (strict) │ 19112.538416 │
|
||||
└────────────────────┴──────────────┘
|
||||
Fastest was "fast-copy".
|
||||
```
|
||||
75
node_modules/fast-copy/dist/cjs/copier.d.cts
generated
vendored
75
node_modules/fast-copy/dist/cjs/copier.d.cts
generated
vendored
@ -1,75 +0,0 @@
|
||||
import type { Cache } from './utils.d.cts';
|
||||
export type InternalCopier<Value> = (value: Value, state: State) => Value;
|
||||
export interface State {
|
||||
Constructor: any;
|
||||
cache: Cache;
|
||||
copier: InternalCopier<any>;
|
||||
prototype: any;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array.
|
||||
*/
|
||||
export declare function copyArrayLoose(array: any[], state: State): any;
|
||||
/**
|
||||
* Deeply copy the indexed values in the array, as well as any custom properties.
|
||||
*/
|
||||
export declare function copyArrayStrict<Value extends any[]>(array: Value, state: State): Value;
|
||||
/**
|
||||
* Copy the contents of the ArrayBuffer.
|
||||
*/
|
||||
export declare function copyArrayBuffer<Value extends ArrayBufferLike>(arrayBuffer: Value, _state: State): Value;
|
||||
/**
|
||||
* Create a new Blob with the contents of the original.
|
||||
*/
|
||||
export declare function copyBlob<Value extends Blob>(blob: Value, _state: State): Value;
|
||||
/**
|
||||
* Create a new DataView with the contents of the original.
|
||||
*/
|
||||
export declare function copyDataView<Value extends DataView>(dataView: Value, state: State): Value;
|
||||
/**
|
||||
* Create a new Date based on the time of the original.
|
||||
*/
|
||||
export declare function copyDate<Value extends Date>(date: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the keys and values of the original.
|
||||
*/
|
||||
export declare function copyMapLoose<Value extends Map<any, any>>(map: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the keys and values of the original, as well as any custom properties.
|
||||
*/
|
||||
export declare function copyMapStrict<Value extends Map<any, any>>(map: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original.
|
||||
*/
|
||||
export declare function copyObjectLoose<Value extends Record<string, any>>(object: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original, as well
|
||||
* as any hidden or non-enumerable properties.
|
||||
*/
|
||||
export declare function copyObjectStrict<Value extends Record<string, any>>(object: Value, state: State): Value;
|
||||
/**
|
||||
* Create a new primitive wrapper from the value of the original.
|
||||
*/
|
||||
export declare function copyPrimitiveWrapper<Value extends Boolean | Number | String>(
|
||||
primitiveObject: Value,
|
||||
state: State,
|
||||
): Value;
|
||||
/**
|
||||
* Create a new RegExp based on the value and flags of the original.
|
||||
*/
|
||||
export declare function copyRegExp<Value extends RegExp>(regExp: Value, state: State): Value;
|
||||
/**
|
||||
* Return the original value (an identity function).
|
||||
*
|
||||
* @note
|
||||
* THis is used for objects that cannot be copied, such as WeakMap.
|
||||
*/
|
||||
export declare function copySelf<Value>(value: Value, _state: State): Value;
|
||||
/**
|
||||
* Deeply copy the values of the original.
|
||||
*/
|
||||
export declare function copySetLoose<Value extends Set<any>>(set: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the values of the original, as well as any custom properties.
|
||||
*/
|
||||
export declare function copySetStrict<Value extends Set<any>>(set: Value, state: State): Value;
|
||||
340
node_modules/fast-copy/dist/cjs/index.cjs
generated
vendored
340
node_modules/fast-copy/dist/cjs/index.cjs
generated
vendored
@ -1,340 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const toStringFunction = Function.prototype.toString;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const toStringObject = Object.prototype.toString;
|
||||
/**
|
||||
* Get an empty version of the object with the same prototype it has.
|
||||
*/
|
||||
function getCleanClone(prototype) {
|
||||
if (!prototype) {
|
||||
return Object.create(null);
|
||||
}
|
||||
const Constructor = prototype.constructor;
|
||||
if (Constructor === Object) {
|
||||
return prototype === Object.prototype ? {} : Object.create(prototype);
|
||||
}
|
||||
if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) {
|
||||
try {
|
||||
return new Constructor();
|
||||
}
|
||||
catch (_a) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return Object.create(prototype);
|
||||
}
|
||||
/**
|
||||
* Get the tag of the value passed, so that the correct copier can be used.
|
||||
*/
|
||||
function getTag(value) {
|
||||
const stringTag = value[Symbol.toStringTag];
|
||||
if (stringTag) {
|
||||
return stringTag;
|
||||
}
|
||||
const type = toStringObject.call(value);
|
||||
return type.substring(8, type.length - 1);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const { hasOwnProperty, propertyIsEnumerable } = Object.prototype;
|
||||
function copyOwnDescriptor(original, clone, property, state) {
|
||||
const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: original[property],
|
||||
writable: true,
|
||||
};
|
||||
const descriptor = ownDescriptor.get || ownDescriptor.set
|
||||
? ownDescriptor
|
||||
: {
|
||||
configurable: ownDescriptor.configurable,
|
||||
enumerable: ownDescriptor.enumerable,
|
||||
value: state.copier(ownDescriptor.value, state),
|
||||
writable: ownDescriptor.writable,
|
||||
};
|
||||
try {
|
||||
Object.defineProperty(clone, property, descriptor);
|
||||
}
|
||||
catch (_a) {
|
||||
// The above can fail on node in extreme edge cases, so fall back to the loose assignment.
|
||||
clone[property] = descriptor.get ? descriptor.get() : descriptor.value;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Striclty copy all properties contained on the object.
|
||||
*/
|
||||
function copyOwnPropertiesStrict(value, clone, state) {
|
||||
const names = Object.getOwnPropertyNames(value);
|
||||
for (let index = 0; index < names.length; ++index) {
|
||||
copyOwnDescriptor(value, clone, names[index], state);
|
||||
}
|
||||
const symbols = Object.getOwnPropertySymbols(value);
|
||||
for (let index = 0; index < symbols.length; ++index) {
|
||||
copyOwnDescriptor(value, clone, symbols[index], state);
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array.
|
||||
*/
|
||||
function copyArrayLoose(array, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(array, clone);
|
||||
for (let index = 0; index < array.length; ++index) {
|
||||
clone[index] = state.copier(array[index], state);
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array, as well as any custom properties.
|
||||
*/
|
||||
function copyArrayStrict(array, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(array, clone);
|
||||
return copyOwnPropertiesStrict(array, clone, state);
|
||||
}
|
||||
/**
|
||||
* Copy the contents of the ArrayBuffer.
|
||||
*/
|
||||
function copyArrayBuffer(arrayBuffer, _state) {
|
||||
return arrayBuffer.slice(0);
|
||||
}
|
||||
/**
|
||||
* Create a new Blob with the contents of the original.
|
||||
*/
|
||||
function copyBlob(blob, _state) {
|
||||
return blob.slice(0, blob.size, blob.type);
|
||||
}
|
||||
/**
|
||||
* Create a new DataView with the contents of the original.
|
||||
*/
|
||||
function copyDataView(dataView, state) {
|
||||
return new state.Constructor(copyArrayBuffer(dataView.buffer));
|
||||
}
|
||||
/**
|
||||
* Create a new Date based on the time of the original.
|
||||
*/
|
||||
function copyDate(date, state) {
|
||||
return new state.Constructor(date.getTime());
|
||||
}
|
||||
/**
|
||||
* Deeply copy the keys and values of the original.
|
||||
*/
|
||||
function copyMapLoose(map, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(map, clone);
|
||||
map.forEach((value, key) => {
|
||||
clone.set(key, state.copier(value, state));
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the keys and values of the original, as well as any custom properties.
|
||||
*/
|
||||
function copyMapStrict(map, state) {
|
||||
return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state);
|
||||
}
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original.
|
||||
*/
|
||||
function copyObjectLoose(object, state) {
|
||||
const clone = getCleanClone(state.prototype);
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(object, clone);
|
||||
for (const key in object) {
|
||||
if (hasOwnProperty.call(object, key)) {
|
||||
clone[key] = state.copier(object[key], state);
|
||||
}
|
||||
}
|
||||
const symbols = Object.getOwnPropertySymbols(object);
|
||||
for (let index = 0; index < symbols.length; ++index) {
|
||||
const symbol = symbols[index];
|
||||
if (propertyIsEnumerable.call(object, symbol)) {
|
||||
clone[symbol] = state.copier(object[symbol], state);
|
||||
}
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original, as well
|
||||
* as any hidden or non-enumerable properties.
|
||||
*/
|
||||
function copyObjectStrict(object, state) {
|
||||
const clone = getCleanClone(state.prototype);
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(object, clone);
|
||||
return copyOwnPropertiesStrict(object, clone, state);
|
||||
}
|
||||
/**
|
||||
* Create a new primitive wrapper from the value of the original.
|
||||
*/
|
||||
function copyPrimitiveWrapper(primitiveObject, state) {
|
||||
return new state.Constructor(primitiveObject.valueOf());
|
||||
}
|
||||
/**
|
||||
* Create a new RegExp based on the value and flags of the original.
|
||||
*/
|
||||
function copyRegExp(regExp, state) {
|
||||
const clone = new state.Constructor(regExp.source, regExp.flags);
|
||||
clone.lastIndex = regExp.lastIndex;
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Return the original value (an identity function).
|
||||
*
|
||||
* @note
|
||||
* THis is used for objects that cannot be copied, such as WeakMap.
|
||||
*/
|
||||
function copySelf(value, _state) {
|
||||
return value;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the values of the original.
|
||||
*/
|
||||
function copySetLoose(set, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(set, clone);
|
||||
set.forEach((value) => {
|
||||
clone.add(state.copier(value, state));
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the values of the original, as well as any custom properties.
|
||||
*/
|
||||
function copySetStrict(set, state) {
|
||||
return copyOwnPropertiesStrict(set, copySetLoose(set, state), state);
|
||||
}
|
||||
|
||||
function createDefaultCache() {
|
||||
return new WeakMap();
|
||||
}
|
||||
function getOptions({ createCache: createCacheOverride, methods: methodsOverride, strict, }) {
|
||||
const defaultMethods = {
|
||||
array: strict ? copyArrayStrict : copyArrayLoose,
|
||||
arrayBuffer: copyArrayBuffer,
|
||||
asyncGenerator: copySelf,
|
||||
blob: copyBlob,
|
||||
dataView: copyDataView,
|
||||
date: copyDate,
|
||||
error: copySelf,
|
||||
generator: copySelf,
|
||||
map: strict ? copyMapStrict : copyMapLoose,
|
||||
object: strict ? copyObjectStrict : copyObjectLoose,
|
||||
regExp: copyRegExp,
|
||||
set: strict ? copySetStrict : copySetLoose,
|
||||
};
|
||||
const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods;
|
||||
const copiers = getTagSpecificCopiers(methods);
|
||||
const createCache = createCacheOverride || createDefaultCache;
|
||||
// Extra safety check to ensure that object and array copiers are always provided,
|
||||
// avoiding runtime errors.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (!copiers.Object || !copiers.Array) {
|
||||
throw new Error('An object and array copier must be provided.');
|
||||
}
|
||||
return { createCache, copiers, methods, strict: Boolean(strict) };
|
||||
}
|
||||
/**
|
||||
* Get the copiers used for each specific object tag.
|
||||
*/
|
||||
function getTagSpecificCopiers(methods) {
|
||||
return {
|
||||
Arguments: methods.object,
|
||||
Array: methods.array,
|
||||
ArrayBuffer: methods.arrayBuffer,
|
||||
AsyncGenerator: methods.asyncGenerator,
|
||||
Blob: methods.blob,
|
||||
Boolean: copyPrimitiveWrapper,
|
||||
DataView: methods.dataView,
|
||||
Date: methods.date,
|
||||
Error: methods.error,
|
||||
Float32Array: methods.arrayBuffer,
|
||||
Float64Array: methods.arrayBuffer,
|
||||
Generator: methods.generator,
|
||||
Int8Array: methods.arrayBuffer,
|
||||
Int16Array: methods.arrayBuffer,
|
||||
Int32Array: methods.arrayBuffer,
|
||||
Map: methods.map,
|
||||
Number: copyPrimitiveWrapper,
|
||||
Object: methods.object,
|
||||
Promise: copySelf,
|
||||
RegExp: methods.regExp,
|
||||
Set: methods.set,
|
||||
String: copyPrimitiveWrapper,
|
||||
WeakMap: copySelf,
|
||||
WeakSet: copySelf,
|
||||
Uint8Array: methods.arrayBuffer,
|
||||
Uint8ClampedArray: methods.arrayBuffer,
|
||||
Uint16Array: methods.arrayBuffer,
|
||||
Uint32Array: methods.arrayBuffer,
|
||||
Uint64Array: methods.arrayBuffer,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a custom copier based on custom options for any of the following:
|
||||
* - `createCache` method to create a cache for copied objects
|
||||
* - custom copier `methods` for specific object types
|
||||
* - `strict` mode to copy all properties with their descriptors
|
||||
*/
|
||||
function createCopier(options = {}) {
|
||||
const { createCache, copiers } = getOptions(options);
|
||||
const { Array: copyArray, Object: copyObject } = copiers;
|
||||
function copier(value, state) {
|
||||
state.prototype = state.Constructor = undefined;
|
||||
if (!value || typeof value !== 'object') {
|
||||
return value;
|
||||
}
|
||||
if (state.cache.has(value)) {
|
||||
return state.cache.get(value);
|
||||
}
|
||||
state.prototype = Object.getPrototypeOf(value);
|
||||
// Using logical AND for speed, since optional chaining transforms to
|
||||
// a local variable usage.
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
|
||||
state.Constructor = state.prototype && state.prototype.constructor;
|
||||
// plain objects
|
||||
if (!state.Constructor || state.Constructor === Object) {
|
||||
return copyObject(value, state);
|
||||
}
|
||||
// arrays
|
||||
if (Array.isArray(value)) {
|
||||
return copyArray(value, state);
|
||||
}
|
||||
const tagSpecificCopier = copiers[getTag(value)];
|
||||
if (tagSpecificCopier) {
|
||||
return tagSpecificCopier(value, state);
|
||||
}
|
||||
return typeof value.then === 'function' ? value : copyObject(value, state);
|
||||
}
|
||||
return function copy(value) {
|
||||
return copier(value, {
|
||||
Constructor: undefined,
|
||||
cache: createCache(),
|
||||
copier,
|
||||
prototype: undefined,
|
||||
});
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Copy an value deeply as much as possible, where strict recreation of object properties
|
||||
* are maintained. All properties (including non-enumerable ones) are copied with their
|
||||
* original property descriptors on both objects and arrays.
|
||||
*/
|
||||
const copyStrict = createCopier({ strict: true });
|
||||
/**
|
||||
* Copy an value deeply as much as possible.
|
||||
*/
|
||||
const copy = createCopier();
|
||||
|
||||
exports.copy = copy;
|
||||
exports.copyStrict = copyStrict;
|
||||
exports.createCopier = createCopier;
|
||||
//# sourceMappingURL=index.cjs.map
|
||||
1
node_modules/fast-copy/dist/cjs/index.cjs.map
generated
vendored
1
node_modules/fast-copy/dist/cjs/index.cjs.map
generated
vendored
File diff suppressed because one or more lines are too long
20
node_modules/fast-copy/dist/cjs/index.d.cts
generated
vendored
20
node_modules/fast-copy/dist/cjs/index.d.cts
generated
vendored
@ -1,20 +0,0 @@
|
||||
import type { CreateCopierOptions } from './options.d.cts';
|
||||
export type { State } from './copier.d.cts';
|
||||
export type { CreateCopierOptions } from './options.d.cts';
|
||||
/**
|
||||
* Create a custom copier based on custom options for any of the following:
|
||||
* - `createCache` method to create a cache for copied objects
|
||||
* - custom copier `methods` for specific object types
|
||||
* - `strict` mode to copy all properties with their descriptors
|
||||
*/
|
||||
export declare function createCopier(options?: CreateCopierOptions): <Value>(value: Value) => Value;
|
||||
/**
|
||||
* Copy an value deeply as much as possible, where strict recreation of object properties
|
||||
* are maintained. All properties (including non-enumerable ones) are copied with their
|
||||
* original property descriptors on both objects and arrays.
|
||||
*/
|
||||
export declare const copyStrict: <Value>(value: Value) => Value;
|
||||
/**
|
||||
* Copy an value deeply as much as possible.
|
||||
*/
|
||||
export declare const copy: <Value>(value: Value) => Value;
|
||||
68
node_modules/fast-copy/dist/cjs/options.d.cts
generated
vendored
68
node_modules/fast-copy/dist/cjs/options.d.cts
generated
vendored
@ -1,68 +0,0 @@
|
||||
import type { InternalCopier } from './copier.d.cts';
|
||||
import type { Cache } from './utils.d.cts';
|
||||
export interface CopierMethods {
|
||||
array?: InternalCopier<any[]>;
|
||||
arrayBuffer?: InternalCopier<ArrayBuffer>;
|
||||
asyncGenerator?: InternalCopier<AsyncGenerator>;
|
||||
blob?: InternalCopier<Blob>;
|
||||
dataView?: InternalCopier<DataView>;
|
||||
date?: InternalCopier<Date>;
|
||||
error?: InternalCopier<Error>;
|
||||
generator?: InternalCopier<Generator>;
|
||||
map?: InternalCopier<Map<any, any>>;
|
||||
object?: InternalCopier<Record<string, any>>;
|
||||
regExp?: InternalCopier<RegExp>;
|
||||
set?: InternalCopier<Set<any>>;
|
||||
}
|
||||
interface Copiers {
|
||||
[key: string]: InternalCopier<any> | undefined;
|
||||
Arguments: InternalCopier<Record<string, any>>;
|
||||
Array: InternalCopier<any[]>;
|
||||
ArrayBuffer: InternalCopier<ArrayBuffer>;
|
||||
AsyncGenerator: InternalCopier<AsyncGenerator>;
|
||||
Blob: InternalCopier<Blob>;
|
||||
Boolean: InternalCopier<Boolean>;
|
||||
DataView: InternalCopier<DataView>;
|
||||
Date: InternalCopier<Date>;
|
||||
Error: InternalCopier<Error>;
|
||||
Float32Array: InternalCopier<ArrayBuffer>;
|
||||
Float64Array: InternalCopier<ArrayBuffer>;
|
||||
Generator: InternalCopier<Generator>;
|
||||
Int8Array: InternalCopier<ArrayBuffer>;
|
||||
Int16Array: InternalCopier<ArrayBuffer>;
|
||||
Int32Array: InternalCopier<ArrayBuffer>;
|
||||
Map: InternalCopier<Map<any, any>>;
|
||||
Number: InternalCopier<Number>;
|
||||
Object: InternalCopier<Record<string, any>>;
|
||||
Promise: InternalCopier<Promise<any>>;
|
||||
RegExp: InternalCopier<RegExp>;
|
||||
Set: InternalCopier<Set<any>>;
|
||||
String: InternalCopier<String>;
|
||||
WeakMap: InternalCopier<WeakMap<any, any>>;
|
||||
WeakSet: InternalCopier<WeakSet<any>>;
|
||||
Uint8Array: InternalCopier<ArrayBuffer>;
|
||||
Uint8ClampedArray: InternalCopier<ArrayBuffer>;
|
||||
Uint16Array: InternalCopier<ArrayBuffer>;
|
||||
Uint32Array: InternalCopier<ArrayBuffer>;
|
||||
Uint64Array: InternalCopier<ArrayBuffer>;
|
||||
}
|
||||
export interface CreateCopierOptions {
|
||||
createCache?: () => Cache;
|
||||
methods?: CopierMethods;
|
||||
strict?: boolean;
|
||||
}
|
||||
export interface RequiredCreateCopierOptions extends Omit<Required<CreateCopierOptions>, 'methods'> {
|
||||
copiers: Copiers;
|
||||
methods: Required<CopierMethods>;
|
||||
}
|
||||
export declare function createDefaultCache(): Cache;
|
||||
export declare function getOptions({
|
||||
createCache: createCacheOverride,
|
||||
methods: methodsOverride,
|
||||
strict,
|
||||
}: CreateCopierOptions): RequiredCreateCopierOptions;
|
||||
/**
|
||||
* Get the copiers used for each specific object tag.
|
||||
*/
|
||||
export declare function getTagSpecificCopiers(methods: Required<CopierMethods>): Copiers;
|
||||
export {};
|
||||
13
node_modules/fast-copy/dist/cjs/utils.d.cts
generated
vendored
13
node_modules/fast-copy/dist/cjs/utils.d.cts
generated
vendored
@ -1,13 +0,0 @@
|
||||
export interface Cache {
|
||||
has: (value: any) => boolean;
|
||||
set: (key: any, value: any) => void;
|
||||
get: (key: any) => any;
|
||||
}
|
||||
/**
|
||||
* Get an empty version of the object with the same prototype it has.
|
||||
*/
|
||||
export declare function getCleanClone(prototype: any): any;
|
||||
/**
|
||||
* Get the tag of the value passed, so that the correct copier can be used.
|
||||
*/
|
||||
export declare function getTag(value: any): string;
|
||||
75
node_modules/fast-copy/dist/es/copier.d.mts
generated
vendored
75
node_modules/fast-copy/dist/es/copier.d.mts
generated
vendored
@ -1,75 +0,0 @@
|
||||
import type { Cache } from './utils.d.mts';
|
||||
export type InternalCopier<Value> = (value: Value, state: State) => Value;
|
||||
export interface State {
|
||||
Constructor: any;
|
||||
cache: Cache;
|
||||
copier: InternalCopier<any>;
|
||||
prototype: any;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array.
|
||||
*/
|
||||
export declare function copyArrayLoose(array: any[], state: State): any;
|
||||
/**
|
||||
* Deeply copy the indexed values in the array, as well as any custom properties.
|
||||
*/
|
||||
export declare function copyArrayStrict<Value extends any[]>(array: Value, state: State): Value;
|
||||
/**
|
||||
* Copy the contents of the ArrayBuffer.
|
||||
*/
|
||||
export declare function copyArrayBuffer<Value extends ArrayBufferLike>(arrayBuffer: Value, _state: State): Value;
|
||||
/**
|
||||
* Create a new Blob with the contents of the original.
|
||||
*/
|
||||
export declare function copyBlob<Value extends Blob>(blob: Value, _state: State): Value;
|
||||
/**
|
||||
* Create a new DataView with the contents of the original.
|
||||
*/
|
||||
export declare function copyDataView<Value extends DataView>(dataView: Value, state: State): Value;
|
||||
/**
|
||||
* Create a new Date based on the time of the original.
|
||||
*/
|
||||
export declare function copyDate<Value extends Date>(date: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the keys and values of the original.
|
||||
*/
|
||||
export declare function copyMapLoose<Value extends Map<any, any>>(map: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the keys and values of the original, as well as any custom properties.
|
||||
*/
|
||||
export declare function copyMapStrict<Value extends Map<any, any>>(map: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original.
|
||||
*/
|
||||
export declare function copyObjectLoose<Value extends Record<string, any>>(object: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original, as well
|
||||
* as any hidden or non-enumerable properties.
|
||||
*/
|
||||
export declare function copyObjectStrict<Value extends Record<string, any>>(object: Value, state: State): Value;
|
||||
/**
|
||||
* Create a new primitive wrapper from the value of the original.
|
||||
*/
|
||||
export declare function copyPrimitiveWrapper<Value extends Boolean | Number | String>(
|
||||
primitiveObject: Value,
|
||||
state: State,
|
||||
): Value;
|
||||
/**
|
||||
* Create a new RegExp based on the value and flags of the original.
|
||||
*/
|
||||
export declare function copyRegExp<Value extends RegExp>(regExp: Value, state: State): Value;
|
||||
/**
|
||||
* Return the original value (an identity function).
|
||||
*
|
||||
* @note
|
||||
* THis is used for objects that cannot be copied, such as WeakMap.
|
||||
*/
|
||||
export declare function copySelf<Value>(value: Value, _state: State): Value;
|
||||
/**
|
||||
* Deeply copy the values of the original.
|
||||
*/
|
||||
export declare function copySetLoose<Value extends Set<any>>(set: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the values of the original, as well as any custom properties.
|
||||
*/
|
||||
export declare function copySetStrict<Value extends Set<any>>(set: Value, state: State): Value;
|
||||
20
node_modules/fast-copy/dist/es/index.d.mts
generated
vendored
20
node_modules/fast-copy/dist/es/index.d.mts
generated
vendored
@ -1,20 +0,0 @@
|
||||
import type { CreateCopierOptions } from './options.d.mts';
|
||||
export type { State } from './copier.d.mts';
|
||||
export type { CreateCopierOptions } from './options.d.mts';
|
||||
/**
|
||||
* Create a custom copier based on custom options for any of the following:
|
||||
* - `createCache` method to create a cache for copied objects
|
||||
* - custom copier `methods` for specific object types
|
||||
* - `strict` mode to copy all properties with their descriptors
|
||||
*/
|
||||
export declare function createCopier(options?: CreateCopierOptions): <Value>(value: Value) => Value;
|
||||
/**
|
||||
* Copy an value deeply as much as possible, where strict recreation of object properties
|
||||
* are maintained. All properties (including non-enumerable ones) are copied with their
|
||||
* original property descriptors on both objects and arrays.
|
||||
*/
|
||||
export declare const copyStrict: <Value>(value: Value) => Value;
|
||||
/**
|
||||
* Copy an value deeply as much as possible.
|
||||
*/
|
||||
export declare const copy: <Value>(value: Value) => Value;
|
||||
336
node_modules/fast-copy/dist/es/index.mjs
generated
vendored
336
node_modules/fast-copy/dist/es/index.mjs
generated
vendored
@ -1,336 +0,0 @@
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const toStringFunction = Function.prototype.toString;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const toStringObject = Object.prototype.toString;
|
||||
/**
|
||||
* Get an empty version of the object with the same prototype it has.
|
||||
*/
|
||||
function getCleanClone(prototype) {
|
||||
if (!prototype) {
|
||||
return Object.create(null);
|
||||
}
|
||||
const Constructor = prototype.constructor;
|
||||
if (Constructor === Object) {
|
||||
return prototype === Object.prototype ? {} : Object.create(prototype);
|
||||
}
|
||||
if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) {
|
||||
try {
|
||||
return new Constructor();
|
||||
}
|
||||
catch (_a) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return Object.create(prototype);
|
||||
}
|
||||
/**
|
||||
* Get the tag of the value passed, so that the correct copier can be used.
|
||||
*/
|
||||
function getTag(value) {
|
||||
const stringTag = value[Symbol.toStringTag];
|
||||
if (stringTag) {
|
||||
return stringTag;
|
||||
}
|
||||
const type = toStringObject.call(value);
|
||||
return type.substring(8, type.length - 1);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const { hasOwnProperty, propertyIsEnumerable } = Object.prototype;
|
||||
function copyOwnDescriptor(original, clone, property, state) {
|
||||
const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: original[property],
|
||||
writable: true,
|
||||
};
|
||||
const descriptor = ownDescriptor.get || ownDescriptor.set
|
||||
? ownDescriptor
|
||||
: {
|
||||
configurable: ownDescriptor.configurable,
|
||||
enumerable: ownDescriptor.enumerable,
|
||||
value: state.copier(ownDescriptor.value, state),
|
||||
writable: ownDescriptor.writable,
|
||||
};
|
||||
try {
|
||||
Object.defineProperty(clone, property, descriptor);
|
||||
}
|
||||
catch (_a) {
|
||||
// The above can fail on node in extreme edge cases, so fall back to the loose assignment.
|
||||
clone[property] = descriptor.get ? descriptor.get() : descriptor.value;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Striclty copy all properties contained on the object.
|
||||
*/
|
||||
function copyOwnPropertiesStrict(value, clone, state) {
|
||||
const names = Object.getOwnPropertyNames(value);
|
||||
for (let index = 0; index < names.length; ++index) {
|
||||
copyOwnDescriptor(value, clone, names[index], state);
|
||||
}
|
||||
const symbols = Object.getOwnPropertySymbols(value);
|
||||
for (let index = 0; index < symbols.length; ++index) {
|
||||
copyOwnDescriptor(value, clone, symbols[index], state);
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array.
|
||||
*/
|
||||
function copyArrayLoose(array, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(array, clone);
|
||||
for (let index = 0; index < array.length; ++index) {
|
||||
clone[index] = state.copier(array[index], state);
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array, as well as any custom properties.
|
||||
*/
|
||||
function copyArrayStrict(array, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(array, clone);
|
||||
return copyOwnPropertiesStrict(array, clone, state);
|
||||
}
|
||||
/**
|
||||
* Copy the contents of the ArrayBuffer.
|
||||
*/
|
||||
function copyArrayBuffer(arrayBuffer, _state) {
|
||||
return arrayBuffer.slice(0);
|
||||
}
|
||||
/**
|
||||
* Create a new Blob with the contents of the original.
|
||||
*/
|
||||
function copyBlob(blob, _state) {
|
||||
return blob.slice(0, blob.size, blob.type);
|
||||
}
|
||||
/**
|
||||
* Create a new DataView with the contents of the original.
|
||||
*/
|
||||
function copyDataView(dataView, state) {
|
||||
return new state.Constructor(copyArrayBuffer(dataView.buffer));
|
||||
}
|
||||
/**
|
||||
* Create a new Date based on the time of the original.
|
||||
*/
|
||||
function copyDate(date, state) {
|
||||
return new state.Constructor(date.getTime());
|
||||
}
|
||||
/**
|
||||
* Deeply copy the keys and values of the original.
|
||||
*/
|
||||
function copyMapLoose(map, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(map, clone);
|
||||
map.forEach((value, key) => {
|
||||
clone.set(key, state.copier(value, state));
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the keys and values of the original, as well as any custom properties.
|
||||
*/
|
||||
function copyMapStrict(map, state) {
|
||||
return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state);
|
||||
}
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original.
|
||||
*/
|
||||
function copyObjectLoose(object, state) {
|
||||
const clone = getCleanClone(state.prototype);
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(object, clone);
|
||||
for (const key in object) {
|
||||
if (hasOwnProperty.call(object, key)) {
|
||||
clone[key] = state.copier(object[key], state);
|
||||
}
|
||||
}
|
||||
const symbols = Object.getOwnPropertySymbols(object);
|
||||
for (let index = 0; index < symbols.length; ++index) {
|
||||
const symbol = symbols[index];
|
||||
if (propertyIsEnumerable.call(object, symbol)) {
|
||||
clone[symbol] = state.copier(object[symbol], state);
|
||||
}
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original, as well
|
||||
* as any hidden or non-enumerable properties.
|
||||
*/
|
||||
function copyObjectStrict(object, state) {
|
||||
const clone = getCleanClone(state.prototype);
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(object, clone);
|
||||
return copyOwnPropertiesStrict(object, clone, state);
|
||||
}
|
||||
/**
|
||||
* Create a new primitive wrapper from the value of the original.
|
||||
*/
|
||||
function copyPrimitiveWrapper(primitiveObject, state) {
|
||||
return new state.Constructor(primitiveObject.valueOf());
|
||||
}
|
||||
/**
|
||||
* Create a new RegExp based on the value and flags of the original.
|
||||
*/
|
||||
function copyRegExp(regExp, state) {
|
||||
const clone = new state.Constructor(regExp.source, regExp.flags);
|
||||
clone.lastIndex = regExp.lastIndex;
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Return the original value (an identity function).
|
||||
*
|
||||
* @note
|
||||
* THis is used for objects that cannot be copied, such as WeakMap.
|
||||
*/
|
||||
function copySelf(value, _state) {
|
||||
return value;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the values of the original.
|
||||
*/
|
||||
function copySetLoose(set, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(set, clone);
|
||||
set.forEach((value) => {
|
||||
clone.add(state.copier(value, state));
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the values of the original, as well as any custom properties.
|
||||
*/
|
||||
function copySetStrict(set, state) {
|
||||
return copyOwnPropertiesStrict(set, copySetLoose(set, state), state);
|
||||
}
|
||||
|
||||
function createDefaultCache() {
|
||||
return new WeakMap();
|
||||
}
|
||||
function getOptions({ createCache: createCacheOverride, methods: methodsOverride, strict, }) {
|
||||
const defaultMethods = {
|
||||
array: strict ? copyArrayStrict : copyArrayLoose,
|
||||
arrayBuffer: copyArrayBuffer,
|
||||
asyncGenerator: copySelf,
|
||||
blob: copyBlob,
|
||||
dataView: copyDataView,
|
||||
date: copyDate,
|
||||
error: copySelf,
|
||||
generator: copySelf,
|
||||
map: strict ? copyMapStrict : copyMapLoose,
|
||||
object: strict ? copyObjectStrict : copyObjectLoose,
|
||||
regExp: copyRegExp,
|
||||
set: strict ? copySetStrict : copySetLoose,
|
||||
};
|
||||
const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods;
|
||||
const copiers = getTagSpecificCopiers(methods);
|
||||
const createCache = createCacheOverride || createDefaultCache;
|
||||
// Extra safety check to ensure that object and array copiers are always provided,
|
||||
// avoiding runtime errors.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (!copiers.Object || !copiers.Array) {
|
||||
throw new Error('An object and array copier must be provided.');
|
||||
}
|
||||
return { createCache, copiers, methods, strict: Boolean(strict) };
|
||||
}
|
||||
/**
|
||||
* Get the copiers used for each specific object tag.
|
||||
*/
|
||||
function getTagSpecificCopiers(methods) {
|
||||
return {
|
||||
Arguments: methods.object,
|
||||
Array: methods.array,
|
||||
ArrayBuffer: methods.arrayBuffer,
|
||||
AsyncGenerator: methods.asyncGenerator,
|
||||
Blob: methods.blob,
|
||||
Boolean: copyPrimitiveWrapper,
|
||||
DataView: methods.dataView,
|
||||
Date: methods.date,
|
||||
Error: methods.error,
|
||||
Float32Array: methods.arrayBuffer,
|
||||
Float64Array: methods.arrayBuffer,
|
||||
Generator: methods.generator,
|
||||
Int8Array: methods.arrayBuffer,
|
||||
Int16Array: methods.arrayBuffer,
|
||||
Int32Array: methods.arrayBuffer,
|
||||
Map: methods.map,
|
||||
Number: copyPrimitiveWrapper,
|
||||
Object: methods.object,
|
||||
Promise: copySelf,
|
||||
RegExp: methods.regExp,
|
||||
Set: methods.set,
|
||||
String: copyPrimitiveWrapper,
|
||||
WeakMap: copySelf,
|
||||
WeakSet: copySelf,
|
||||
Uint8Array: methods.arrayBuffer,
|
||||
Uint8ClampedArray: methods.arrayBuffer,
|
||||
Uint16Array: methods.arrayBuffer,
|
||||
Uint32Array: methods.arrayBuffer,
|
||||
Uint64Array: methods.arrayBuffer,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a custom copier based on custom options for any of the following:
|
||||
* - `createCache` method to create a cache for copied objects
|
||||
* - custom copier `methods` for specific object types
|
||||
* - `strict` mode to copy all properties with their descriptors
|
||||
*/
|
||||
function createCopier(options = {}) {
|
||||
const { createCache, copiers } = getOptions(options);
|
||||
const { Array: copyArray, Object: copyObject } = copiers;
|
||||
function copier(value, state) {
|
||||
state.prototype = state.Constructor = undefined;
|
||||
if (!value || typeof value !== 'object') {
|
||||
return value;
|
||||
}
|
||||
if (state.cache.has(value)) {
|
||||
return state.cache.get(value);
|
||||
}
|
||||
state.prototype = Object.getPrototypeOf(value);
|
||||
// Using logical AND for speed, since optional chaining transforms to
|
||||
// a local variable usage.
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
|
||||
state.Constructor = state.prototype && state.prototype.constructor;
|
||||
// plain objects
|
||||
if (!state.Constructor || state.Constructor === Object) {
|
||||
return copyObject(value, state);
|
||||
}
|
||||
// arrays
|
||||
if (Array.isArray(value)) {
|
||||
return copyArray(value, state);
|
||||
}
|
||||
const tagSpecificCopier = copiers[getTag(value)];
|
||||
if (tagSpecificCopier) {
|
||||
return tagSpecificCopier(value, state);
|
||||
}
|
||||
return typeof value.then === 'function' ? value : copyObject(value, state);
|
||||
}
|
||||
return function copy(value) {
|
||||
return copier(value, {
|
||||
Constructor: undefined,
|
||||
cache: createCache(),
|
||||
copier,
|
||||
prototype: undefined,
|
||||
});
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Copy an value deeply as much as possible, where strict recreation of object properties
|
||||
* are maintained. All properties (including non-enumerable ones) are copied with their
|
||||
* original property descriptors on both objects and arrays.
|
||||
*/
|
||||
const copyStrict = createCopier({ strict: true });
|
||||
/**
|
||||
* Copy an value deeply as much as possible.
|
||||
*/
|
||||
const copy = createCopier();
|
||||
|
||||
export { copy, copyStrict, createCopier };
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
1
node_modules/fast-copy/dist/es/index.mjs.map
generated
vendored
1
node_modules/fast-copy/dist/es/index.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
68
node_modules/fast-copy/dist/es/options.d.mts
generated
vendored
68
node_modules/fast-copy/dist/es/options.d.mts
generated
vendored
@ -1,68 +0,0 @@
|
||||
import type { InternalCopier } from './copier.d.mts';
|
||||
import type { Cache } from './utils.d.mts';
|
||||
export interface CopierMethods {
|
||||
array?: InternalCopier<any[]>;
|
||||
arrayBuffer?: InternalCopier<ArrayBuffer>;
|
||||
asyncGenerator?: InternalCopier<AsyncGenerator>;
|
||||
blob?: InternalCopier<Blob>;
|
||||
dataView?: InternalCopier<DataView>;
|
||||
date?: InternalCopier<Date>;
|
||||
error?: InternalCopier<Error>;
|
||||
generator?: InternalCopier<Generator>;
|
||||
map?: InternalCopier<Map<any, any>>;
|
||||
object?: InternalCopier<Record<string, any>>;
|
||||
regExp?: InternalCopier<RegExp>;
|
||||
set?: InternalCopier<Set<any>>;
|
||||
}
|
||||
interface Copiers {
|
||||
[key: string]: InternalCopier<any> | undefined;
|
||||
Arguments: InternalCopier<Record<string, any>>;
|
||||
Array: InternalCopier<any[]>;
|
||||
ArrayBuffer: InternalCopier<ArrayBuffer>;
|
||||
AsyncGenerator: InternalCopier<AsyncGenerator>;
|
||||
Blob: InternalCopier<Blob>;
|
||||
Boolean: InternalCopier<Boolean>;
|
||||
DataView: InternalCopier<DataView>;
|
||||
Date: InternalCopier<Date>;
|
||||
Error: InternalCopier<Error>;
|
||||
Float32Array: InternalCopier<ArrayBuffer>;
|
||||
Float64Array: InternalCopier<ArrayBuffer>;
|
||||
Generator: InternalCopier<Generator>;
|
||||
Int8Array: InternalCopier<ArrayBuffer>;
|
||||
Int16Array: InternalCopier<ArrayBuffer>;
|
||||
Int32Array: InternalCopier<ArrayBuffer>;
|
||||
Map: InternalCopier<Map<any, any>>;
|
||||
Number: InternalCopier<Number>;
|
||||
Object: InternalCopier<Record<string, any>>;
|
||||
Promise: InternalCopier<Promise<any>>;
|
||||
RegExp: InternalCopier<RegExp>;
|
||||
Set: InternalCopier<Set<any>>;
|
||||
String: InternalCopier<String>;
|
||||
WeakMap: InternalCopier<WeakMap<any, any>>;
|
||||
WeakSet: InternalCopier<WeakSet<any>>;
|
||||
Uint8Array: InternalCopier<ArrayBuffer>;
|
||||
Uint8ClampedArray: InternalCopier<ArrayBuffer>;
|
||||
Uint16Array: InternalCopier<ArrayBuffer>;
|
||||
Uint32Array: InternalCopier<ArrayBuffer>;
|
||||
Uint64Array: InternalCopier<ArrayBuffer>;
|
||||
}
|
||||
export interface CreateCopierOptions {
|
||||
createCache?: () => Cache;
|
||||
methods?: CopierMethods;
|
||||
strict?: boolean;
|
||||
}
|
||||
export interface RequiredCreateCopierOptions extends Omit<Required<CreateCopierOptions>, 'methods'> {
|
||||
copiers: Copiers;
|
||||
methods: Required<CopierMethods>;
|
||||
}
|
||||
export declare function createDefaultCache(): Cache;
|
||||
export declare function getOptions({
|
||||
createCache: createCacheOverride,
|
||||
methods: methodsOverride,
|
||||
strict,
|
||||
}: CreateCopierOptions): RequiredCreateCopierOptions;
|
||||
/**
|
||||
* Get the copiers used for each specific object tag.
|
||||
*/
|
||||
export declare function getTagSpecificCopiers(methods: Required<CopierMethods>): Copiers;
|
||||
export {};
|
||||
13
node_modules/fast-copy/dist/es/utils.d.mts
generated
vendored
13
node_modules/fast-copy/dist/es/utils.d.mts
generated
vendored
@ -1,13 +0,0 @@
|
||||
export interface Cache {
|
||||
has: (value: any) => boolean;
|
||||
set: (key: any, value: any) => void;
|
||||
get: (key: any) => any;
|
||||
}
|
||||
/**
|
||||
* Get an empty version of the object with the same prototype it has.
|
||||
*/
|
||||
export declare function getCleanClone(prototype: any): any;
|
||||
/**
|
||||
* Get the tag of the value passed, so that the correct copier can be used.
|
||||
*/
|
||||
export declare function getTag(value: any): string;
|
||||
72
node_modules/fast-copy/dist/umd/copier.d.ts
generated
vendored
72
node_modules/fast-copy/dist/umd/copier.d.ts
generated
vendored
@ -1,72 +0,0 @@
|
||||
import type { Cache } from './utils.ts';
|
||||
export type InternalCopier<Value> = (value: Value, state: State) => Value;
|
||||
export interface State {
|
||||
Constructor: any;
|
||||
cache: Cache;
|
||||
copier: InternalCopier<any>;
|
||||
prototype: any;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array.
|
||||
*/
|
||||
export declare function copyArrayLoose(array: any[], state: State): any;
|
||||
/**
|
||||
* Deeply copy the indexed values in the array, as well as any custom properties.
|
||||
*/
|
||||
export declare function copyArrayStrict<Value extends any[]>(array: Value, state: State): Value;
|
||||
/**
|
||||
* Copy the contents of the ArrayBuffer.
|
||||
*/
|
||||
export declare function copyArrayBuffer<Value extends ArrayBufferLike>(arrayBuffer: Value, _state: State): Value;
|
||||
/**
|
||||
* Create a new Blob with the contents of the original.
|
||||
*/
|
||||
export declare function copyBlob<Value extends Blob>(blob: Value, _state: State): Value;
|
||||
/**
|
||||
* Create a new DataView with the contents of the original.
|
||||
*/
|
||||
export declare function copyDataView<Value extends DataView>(dataView: Value, state: State): Value;
|
||||
/**
|
||||
* Create a new Date based on the time of the original.
|
||||
*/
|
||||
export declare function copyDate<Value extends Date>(date: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the keys and values of the original.
|
||||
*/
|
||||
export declare function copyMapLoose<Value extends Map<any, any>>(map: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the keys and values of the original, as well as any custom properties.
|
||||
*/
|
||||
export declare function copyMapStrict<Value extends Map<any, any>>(map: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original.
|
||||
*/
|
||||
export declare function copyObjectLoose<Value extends Record<string, any>>(object: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original, as well
|
||||
* as any hidden or non-enumerable properties.
|
||||
*/
|
||||
export declare function copyObjectStrict<Value extends Record<string, any>>(object: Value, state: State): Value;
|
||||
/**
|
||||
* Create a new primitive wrapper from the value of the original.
|
||||
*/
|
||||
export declare function copyPrimitiveWrapper<Value extends Boolean | Number | String>(primitiveObject: Value, state: State): Value;
|
||||
/**
|
||||
* Create a new RegExp based on the value and flags of the original.
|
||||
*/
|
||||
export declare function copyRegExp<Value extends RegExp>(regExp: Value, state: State): Value;
|
||||
/**
|
||||
* Return the original value (an identity function).
|
||||
*
|
||||
* @note
|
||||
* THis is used for objects that cannot be copied, such as WeakMap.
|
||||
*/
|
||||
export declare function copySelf<Value>(value: Value, _state: State): Value;
|
||||
/**
|
||||
* Deeply copy the values of the original.
|
||||
*/
|
||||
export declare function copySetLoose<Value extends Set<any>>(set: Value, state: State): Value;
|
||||
/**
|
||||
* Deeply copy the values of the original, as well as any custom properties.
|
||||
*/
|
||||
export declare function copySetStrict<Value extends Set<any>>(set: Value, state: State): Value;
|
||||
20
node_modules/fast-copy/dist/umd/index.d.ts
generated
vendored
20
node_modules/fast-copy/dist/umd/index.d.ts
generated
vendored
@ -1,20 +0,0 @@
|
||||
import type { CreateCopierOptions } from './options.ts';
|
||||
export type { State } from './copier.ts';
|
||||
export type { CreateCopierOptions } from './options.ts';
|
||||
/**
|
||||
* Create a custom copier based on custom options for any of the following:
|
||||
* - `createCache` method to create a cache for copied objects
|
||||
* - custom copier `methods` for specific object types
|
||||
* - `strict` mode to copy all properties with their descriptors
|
||||
*/
|
||||
export declare function createCopier(options?: CreateCopierOptions): <Value>(value: Value) => Value;
|
||||
/**
|
||||
* Copy an value deeply as much as possible, where strict recreation of object properties
|
||||
* are maintained. All properties (including non-enumerable ones) are copied with their
|
||||
* original property descriptors on both objects and arrays.
|
||||
*/
|
||||
export declare const copyStrict: <Value>(value: Value) => Value;
|
||||
/**
|
||||
* Copy an value deeply as much as possible.
|
||||
*/
|
||||
export declare const copy: <Value>(value: Value) => Value;
|
||||
346
node_modules/fast-copy/dist/umd/index.js
generated
vendored
346
node_modules/fast-copy/dist/umd/index.js
generated
vendored
@ -1,346 +0,0 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global["fast-copy"] = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const toStringFunction = Function.prototype.toString;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const toStringObject = Object.prototype.toString;
|
||||
/**
|
||||
* Get an empty version of the object with the same prototype it has.
|
||||
*/
|
||||
function getCleanClone(prototype) {
|
||||
if (!prototype) {
|
||||
return Object.create(null);
|
||||
}
|
||||
const Constructor = prototype.constructor;
|
||||
if (Constructor === Object) {
|
||||
return prototype === Object.prototype ? {} : Object.create(prototype);
|
||||
}
|
||||
if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) {
|
||||
try {
|
||||
return new Constructor();
|
||||
}
|
||||
catch (_a) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return Object.create(prototype);
|
||||
}
|
||||
/**
|
||||
* Get the tag of the value passed, so that the correct copier can be used.
|
||||
*/
|
||||
function getTag(value) {
|
||||
const stringTag = value[Symbol.toStringTag];
|
||||
if (stringTag) {
|
||||
return stringTag;
|
||||
}
|
||||
const type = toStringObject.call(value);
|
||||
return type.substring(8, type.length - 1);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
const { hasOwnProperty, propertyIsEnumerable } = Object.prototype;
|
||||
function copyOwnDescriptor(original, clone, property, state) {
|
||||
const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: original[property],
|
||||
writable: true,
|
||||
};
|
||||
const descriptor = ownDescriptor.get || ownDescriptor.set
|
||||
? ownDescriptor
|
||||
: {
|
||||
configurable: ownDescriptor.configurable,
|
||||
enumerable: ownDescriptor.enumerable,
|
||||
value: state.copier(ownDescriptor.value, state),
|
||||
writable: ownDescriptor.writable,
|
||||
};
|
||||
try {
|
||||
Object.defineProperty(clone, property, descriptor);
|
||||
}
|
||||
catch (_a) {
|
||||
// The above can fail on node in extreme edge cases, so fall back to the loose assignment.
|
||||
clone[property] = descriptor.get ? descriptor.get() : descriptor.value;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Striclty copy all properties contained on the object.
|
||||
*/
|
||||
function copyOwnPropertiesStrict(value, clone, state) {
|
||||
const names = Object.getOwnPropertyNames(value);
|
||||
for (let index = 0; index < names.length; ++index) {
|
||||
copyOwnDescriptor(value, clone, names[index], state);
|
||||
}
|
||||
const symbols = Object.getOwnPropertySymbols(value);
|
||||
for (let index = 0; index < symbols.length; ++index) {
|
||||
copyOwnDescriptor(value, clone, symbols[index], state);
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array.
|
||||
*/
|
||||
function copyArrayLoose(array, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(array, clone);
|
||||
for (let index = 0; index < array.length; ++index) {
|
||||
clone[index] = state.copier(array[index], state);
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the indexed values in the array, as well as any custom properties.
|
||||
*/
|
||||
function copyArrayStrict(array, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(array, clone);
|
||||
return copyOwnPropertiesStrict(array, clone, state);
|
||||
}
|
||||
/**
|
||||
* Copy the contents of the ArrayBuffer.
|
||||
*/
|
||||
function copyArrayBuffer(arrayBuffer, _state) {
|
||||
return arrayBuffer.slice(0);
|
||||
}
|
||||
/**
|
||||
* Create a new Blob with the contents of the original.
|
||||
*/
|
||||
function copyBlob(blob, _state) {
|
||||
return blob.slice(0, blob.size, blob.type);
|
||||
}
|
||||
/**
|
||||
* Create a new DataView with the contents of the original.
|
||||
*/
|
||||
function copyDataView(dataView, state) {
|
||||
return new state.Constructor(copyArrayBuffer(dataView.buffer));
|
||||
}
|
||||
/**
|
||||
* Create a new Date based on the time of the original.
|
||||
*/
|
||||
function copyDate(date, state) {
|
||||
return new state.Constructor(date.getTime());
|
||||
}
|
||||
/**
|
||||
* Deeply copy the keys and values of the original.
|
||||
*/
|
||||
function copyMapLoose(map, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(map, clone);
|
||||
map.forEach((value, key) => {
|
||||
clone.set(key, state.copier(value, state));
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the keys and values of the original, as well as any custom properties.
|
||||
*/
|
||||
function copyMapStrict(map, state) {
|
||||
return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state);
|
||||
}
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original.
|
||||
*/
|
||||
function copyObjectLoose(object, state) {
|
||||
const clone = getCleanClone(state.prototype);
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(object, clone);
|
||||
for (const key in object) {
|
||||
if (hasOwnProperty.call(object, key)) {
|
||||
clone[key] = state.copier(object[key], state);
|
||||
}
|
||||
}
|
||||
const symbols = Object.getOwnPropertySymbols(object);
|
||||
for (let index = 0; index < symbols.length; ++index) {
|
||||
const symbol = symbols[index];
|
||||
if (propertyIsEnumerable.call(object, symbol)) {
|
||||
clone[symbol] = state.copier(object[symbol], state);
|
||||
}
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the properties (keys and symbols) and values of the original, as well
|
||||
* as any hidden or non-enumerable properties.
|
||||
*/
|
||||
function copyObjectStrict(object, state) {
|
||||
const clone = getCleanClone(state.prototype);
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(object, clone);
|
||||
return copyOwnPropertiesStrict(object, clone, state);
|
||||
}
|
||||
/**
|
||||
* Create a new primitive wrapper from the value of the original.
|
||||
*/
|
||||
function copyPrimitiveWrapper(primitiveObject, state) {
|
||||
return new state.Constructor(primitiveObject.valueOf());
|
||||
}
|
||||
/**
|
||||
* Create a new RegExp based on the value and flags of the original.
|
||||
*/
|
||||
function copyRegExp(regExp, state) {
|
||||
const clone = new state.Constructor(regExp.source, regExp.flags);
|
||||
clone.lastIndex = regExp.lastIndex;
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Return the original value (an identity function).
|
||||
*
|
||||
* @note
|
||||
* THis is used for objects that cannot be copied, such as WeakMap.
|
||||
*/
|
||||
function copySelf(value, _state) {
|
||||
return value;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the values of the original.
|
||||
*/
|
||||
function copySetLoose(set, state) {
|
||||
const clone = new state.Constructor();
|
||||
// set in the cache immediately to be able to reuse the object recursively
|
||||
state.cache.set(set, clone);
|
||||
set.forEach((value) => {
|
||||
clone.add(state.copier(value, state));
|
||||
});
|
||||
return clone;
|
||||
}
|
||||
/**
|
||||
* Deeply copy the values of the original, as well as any custom properties.
|
||||
*/
|
||||
function copySetStrict(set, state) {
|
||||
return copyOwnPropertiesStrict(set, copySetLoose(set, state), state);
|
||||
}
|
||||
|
||||
function createDefaultCache() {
|
||||
return new WeakMap();
|
||||
}
|
||||
function getOptions({ createCache: createCacheOverride, methods: methodsOverride, strict, }) {
|
||||
const defaultMethods = {
|
||||
array: strict ? copyArrayStrict : copyArrayLoose,
|
||||
arrayBuffer: copyArrayBuffer,
|
||||
asyncGenerator: copySelf,
|
||||
blob: copyBlob,
|
||||
dataView: copyDataView,
|
||||
date: copyDate,
|
||||
error: copySelf,
|
||||
generator: copySelf,
|
||||
map: strict ? copyMapStrict : copyMapLoose,
|
||||
object: strict ? copyObjectStrict : copyObjectLoose,
|
||||
regExp: copyRegExp,
|
||||
set: strict ? copySetStrict : copySetLoose,
|
||||
};
|
||||
const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods;
|
||||
const copiers = getTagSpecificCopiers(methods);
|
||||
const createCache = createCacheOverride || createDefaultCache;
|
||||
// Extra safety check to ensure that object and array copiers are always provided,
|
||||
// avoiding runtime errors.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (!copiers.Object || !copiers.Array) {
|
||||
throw new Error('An object and array copier must be provided.');
|
||||
}
|
||||
return { createCache, copiers, methods, strict: Boolean(strict) };
|
||||
}
|
||||
/**
|
||||
* Get the copiers used for each specific object tag.
|
||||
*/
|
||||
function getTagSpecificCopiers(methods) {
|
||||
return {
|
||||
Arguments: methods.object,
|
||||
Array: methods.array,
|
||||
ArrayBuffer: methods.arrayBuffer,
|
||||
AsyncGenerator: methods.asyncGenerator,
|
||||
Blob: methods.blob,
|
||||
Boolean: copyPrimitiveWrapper,
|
||||
DataView: methods.dataView,
|
||||
Date: methods.date,
|
||||
Error: methods.error,
|
||||
Float32Array: methods.arrayBuffer,
|
||||
Float64Array: methods.arrayBuffer,
|
||||
Generator: methods.generator,
|
||||
Int8Array: methods.arrayBuffer,
|
||||
Int16Array: methods.arrayBuffer,
|
||||
Int32Array: methods.arrayBuffer,
|
||||
Map: methods.map,
|
||||
Number: copyPrimitiveWrapper,
|
||||
Object: methods.object,
|
||||
Promise: copySelf,
|
||||
RegExp: methods.regExp,
|
||||
Set: methods.set,
|
||||
String: copyPrimitiveWrapper,
|
||||
WeakMap: copySelf,
|
||||
WeakSet: copySelf,
|
||||
Uint8Array: methods.arrayBuffer,
|
||||
Uint8ClampedArray: methods.arrayBuffer,
|
||||
Uint16Array: methods.arrayBuffer,
|
||||
Uint32Array: methods.arrayBuffer,
|
||||
Uint64Array: methods.arrayBuffer,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a custom copier based on custom options for any of the following:
|
||||
* - `createCache` method to create a cache for copied objects
|
||||
* - custom copier `methods` for specific object types
|
||||
* - `strict` mode to copy all properties with their descriptors
|
||||
*/
|
||||
function createCopier(options = {}) {
|
||||
const { createCache, copiers } = getOptions(options);
|
||||
const { Array: copyArray, Object: copyObject } = copiers;
|
||||
function copier(value, state) {
|
||||
state.prototype = state.Constructor = undefined;
|
||||
if (!value || typeof value !== 'object') {
|
||||
return value;
|
||||
}
|
||||
if (state.cache.has(value)) {
|
||||
return state.cache.get(value);
|
||||
}
|
||||
state.prototype = Object.getPrototypeOf(value);
|
||||
// Using logical AND for speed, since optional chaining transforms to
|
||||
// a local variable usage.
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
|
||||
state.Constructor = state.prototype && state.prototype.constructor;
|
||||
// plain objects
|
||||
if (!state.Constructor || state.Constructor === Object) {
|
||||
return copyObject(value, state);
|
||||
}
|
||||
// arrays
|
||||
if (Array.isArray(value)) {
|
||||
return copyArray(value, state);
|
||||
}
|
||||
const tagSpecificCopier = copiers[getTag(value)];
|
||||
if (tagSpecificCopier) {
|
||||
return tagSpecificCopier(value, state);
|
||||
}
|
||||
return typeof value.then === 'function' ? value : copyObject(value, state);
|
||||
}
|
||||
return function copy(value) {
|
||||
return copier(value, {
|
||||
Constructor: undefined,
|
||||
cache: createCache(),
|
||||
copier,
|
||||
prototype: undefined,
|
||||
});
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Copy an value deeply as much as possible, where strict recreation of object properties
|
||||
* are maintained. All properties (including non-enumerable ones) are copied with their
|
||||
* original property descriptors on both objects and arrays.
|
||||
*/
|
||||
const copyStrict = createCopier({ strict: true });
|
||||
/**
|
||||
* Copy an value deeply as much as possible.
|
||||
*/
|
||||
const copy = createCopier();
|
||||
|
||||
exports.copy = copy;
|
||||
exports.copyStrict = copyStrict;
|
||||
exports.createCopier = createCopier;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/fast-copy/dist/umd/index.js.map
generated
vendored
1
node_modules/fast-copy/dist/umd/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
64
node_modules/fast-copy/dist/umd/options.d.ts
generated
vendored
64
node_modules/fast-copy/dist/umd/options.d.ts
generated
vendored
@ -1,64 +0,0 @@
|
||||
import type { InternalCopier } from './copier.ts';
|
||||
import type { Cache } from './utils.ts';
|
||||
export interface CopierMethods {
|
||||
array?: InternalCopier<any[]>;
|
||||
arrayBuffer?: InternalCopier<ArrayBuffer>;
|
||||
asyncGenerator?: InternalCopier<AsyncGenerator>;
|
||||
blob?: InternalCopier<Blob>;
|
||||
dataView?: InternalCopier<DataView>;
|
||||
date?: InternalCopier<Date>;
|
||||
error?: InternalCopier<Error>;
|
||||
generator?: InternalCopier<Generator>;
|
||||
map?: InternalCopier<Map<any, any>>;
|
||||
object?: InternalCopier<Record<string, any>>;
|
||||
regExp?: InternalCopier<RegExp>;
|
||||
set?: InternalCopier<Set<any>>;
|
||||
}
|
||||
interface Copiers {
|
||||
[key: string]: InternalCopier<any> | undefined;
|
||||
Arguments: InternalCopier<Record<string, any>>;
|
||||
Array: InternalCopier<any[]>;
|
||||
ArrayBuffer: InternalCopier<ArrayBuffer>;
|
||||
AsyncGenerator: InternalCopier<AsyncGenerator>;
|
||||
Blob: InternalCopier<Blob>;
|
||||
Boolean: InternalCopier<Boolean>;
|
||||
DataView: InternalCopier<DataView>;
|
||||
Date: InternalCopier<Date>;
|
||||
Error: InternalCopier<Error>;
|
||||
Float32Array: InternalCopier<ArrayBuffer>;
|
||||
Float64Array: InternalCopier<ArrayBuffer>;
|
||||
Generator: InternalCopier<Generator>;
|
||||
Int8Array: InternalCopier<ArrayBuffer>;
|
||||
Int16Array: InternalCopier<ArrayBuffer>;
|
||||
Int32Array: InternalCopier<ArrayBuffer>;
|
||||
Map: InternalCopier<Map<any, any>>;
|
||||
Number: InternalCopier<Number>;
|
||||
Object: InternalCopier<Record<string, any>>;
|
||||
Promise: InternalCopier<Promise<any>>;
|
||||
RegExp: InternalCopier<RegExp>;
|
||||
Set: InternalCopier<Set<any>>;
|
||||
String: InternalCopier<String>;
|
||||
WeakMap: InternalCopier<WeakMap<any, any>>;
|
||||
WeakSet: InternalCopier<WeakSet<any>>;
|
||||
Uint8Array: InternalCopier<ArrayBuffer>;
|
||||
Uint8ClampedArray: InternalCopier<ArrayBuffer>;
|
||||
Uint16Array: InternalCopier<ArrayBuffer>;
|
||||
Uint32Array: InternalCopier<ArrayBuffer>;
|
||||
Uint64Array: InternalCopier<ArrayBuffer>;
|
||||
}
|
||||
export interface CreateCopierOptions {
|
||||
createCache?: () => Cache;
|
||||
methods?: CopierMethods;
|
||||
strict?: boolean;
|
||||
}
|
||||
export interface RequiredCreateCopierOptions extends Omit<Required<CreateCopierOptions>, 'methods'> {
|
||||
copiers: Copiers;
|
||||
methods: Required<CopierMethods>;
|
||||
}
|
||||
export declare function createDefaultCache(): Cache;
|
||||
export declare function getOptions({ createCache: createCacheOverride, methods: methodsOverride, strict, }: CreateCopierOptions): RequiredCreateCopierOptions;
|
||||
/**
|
||||
* Get the copiers used for each specific object tag.
|
||||
*/
|
||||
export declare function getTagSpecificCopiers(methods: Required<CopierMethods>): Copiers;
|
||||
export {};
|
||||
13
node_modules/fast-copy/dist/umd/utils.d.ts
generated
vendored
13
node_modules/fast-copy/dist/umd/utils.d.ts
generated
vendored
@ -1,13 +0,0 @@
|
||||
export interface Cache {
|
||||
has: (value: any) => boolean;
|
||||
set: (key: any, value: any) => void;
|
||||
get: (key: any) => any;
|
||||
}
|
||||
/**
|
||||
* Get an empty version of the object with the same prototype it has.
|
||||
*/
|
||||
export declare function getCleanClone(prototype: any): any;
|
||||
/**
|
||||
* Get the tag of the value passed, so that the correct copier can be used.
|
||||
*/
|
||||
export declare function getTag(value: any): string;
|
||||
54
node_modules/fast-copy/index.d.ts
generated
vendored
54
node_modules/fast-copy/index.d.ts
generated
vendored
@ -1,54 +0,0 @@
|
||||
interface Cache {
|
||||
has: (value: any) => boolean;
|
||||
set: (key: any, value: any) => void;
|
||||
get: (key: any) => any;
|
||||
}
|
||||
|
||||
type InternalCopier<Value> = (value: Value, state: State) => Value;
|
||||
interface State {
|
||||
Constructor: any;
|
||||
cache: Cache;
|
||||
copier: InternalCopier<any>;
|
||||
prototype: any;
|
||||
}
|
||||
|
||||
interface CopierMethods {
|
||||
array?: InternalCopier<any[]>;
|
||||
arrayBuffer?: InternalCopier<ArrayBuffer>;
|
||||
asyncGenerator?: InternalCopier<AsyncGenerator>;
|
||||
blob?: InternalCopier<Blob>;
|
||||
dataView?: InternalCopier<DataView>;
|
||||
date?: InternalCopier<Date>;
|
||||
error?: InternalCopier<Error>;
|
||||
generator?: InternalCopier<Generator>;
|
||||
map?: InternalCopier<Map<any, any>>;
|
||||
object?: InternalCopier<Record<string, any>>;
|
||||
regExp?: InternalCopier<RegExp>;
|
||||
set?: InternalCopier<Set<any>>;
|
||||
}
|
||||
interface CreateCopierOptions {
|
||||
createCache?: () => Cache;
|
||||
methods?: CopierMethods;
|
||||
strict?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a custom copier based on custom options for any of the following:
|
||||
* - `createCache` method to create a cache for copied objects
|
||||
* - custom copier `methods` for specific object types
|
||||
* - `strict` mode to copy all properties with their descriptors
|
||||
*/
|
||||
declare function createCopier(options?: CreateCopierOptions): <Value>(value: Value) => Value;
|
||||
/**
|
||||
* Copy an value deeply as much as possible, where strict recreation of object properties
|
||||
* are maintained. All properties (including non-enumerable ones) are copied with their
|
||||
* original property descriptors on both objects and arrays.
|
||||
*/
|
||||
declare const copyStrict: <Value>(value: Value) => Value;
|
||||
/**
|
||||
* Copy an value deeply as much as possible.
|
||||
*/
|
||||
declare const copy: <Value>(value: Value) => Value;
|
||||
|
||||
export { copy, copyStrict, createCopier };
|
||||
export type { CreateCopierOptions, State };
|
||||
103
node_modules/fast-copy/package.json
generated
vendored
103
node_modules/fast-copy/package.json
generated
vendored
@ -1,103 +0,0 @@
|
||||
{
|
||||
"author": "tony_quetano@planttheidea.com",
|
||||
"browser": "dist/umd/index.js",
|
||||
"bugs": {
|
||||
"url": "https://github.com/planttheidea/fast-copy/issues"
|
||||
},
|
||||
"contributors": [
|
||||
"Dariusz Rzepka <rzepkadarek@gmail.com>"
|
||||
],
|
||||
"description": "A blazing fast deep object copier",
|
||||
"devDependencies": {
|
||||
"@planttheidea/build-tools": "^1.2.2",
|
||||
"@types/lodash": "^4.17.21",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/ramda": "^0.31.1",
|
||||
"@types/react": "^19.2.7",
|
||||
"@vitest/coverage-v8": "^4.0.15",
|
||||
"cli-table3": "^0.6.5",
|
||||
"clone": "^2.1.2",
|
||||
"deepclone": "^1.0.2",
|
||||
"eslint": "^9.39.1",
|
||||
"fast-clone": "^1.5.13",
|
||||
"lodash": "^4.17.21",
|
||||
"prettier": "^3.7.4",
|
||||
"ramda": "^0.32.0",
|
||||
"react": "^19.2.1",
|
||||
"react-dom": "^19.2.1",
|
||||
"release-it": "19.0.6",
|
||||
"rollup": "^4.53.3",
|
||||
"tinybench": "^6.0.0",
|
||||
"typescript": "^5.9.3",
|
||||
"vite": "^7.2.6",
|
||||
"vitest": "^4.0.15"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./dist/es/index.d.mts",
|
||||
"default": "./dist/es/index.mjs"
|
||||
},
|
||||
"require": {
|
||||
"types": "./dist/cjs/index.d.cts",
|
||||
"default": "./dist/cjs/index.cjs"
|
||||
},
|
||||
"default": {
|
||||
"types": "./dist/umd/index.d.ts",
|
||||
"default": "./dist/umd/index.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
"CHANGELOG.md",
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"index.d.ts",
|
||||
"package.json"
|
||||
],
|
||||
"homepage": "https://github.com/planttheidea/fast-copy#readme",
|
||||
"keywords": [
|
||||
"clone",
|
||||
"deep",
|
||||
"copy",
|
||||
"fast"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "dist/cjs/index.cjs",
|
||||
"module": "dist/es/index.mjs",
|
||||
"name": "fast-copy",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/planttheidea/fast-copy.git"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "npm run build && node benchmark/index.js",
|
||||
"build": "npm run clean && npm run build:dist && npm run build:types",
|
||||
"build:dist": "NODE_ENV=production rollup -c config/rollup.config.js",
|
||||
"build:types": "pti fix-types -l dist",
|
||||
"clean": "rm -rf dist",
|
||||
"clean:cjs": "rm -rf dist/cjs",
|
||||
"clean:es": "rm -rf dist/es",
|
||||
"clean:esm": "rm -rf dist/esm",
|
||||
"clean:min": "rm -rf dist/min",
|
||||
"dev": "vite --config=config/vite.config.ts",
|
||||
"format": "prettier . --log-level=warn --write",
|
||||
"format:check": "prettier . --log-level=warn --check",
|
||||
"lint": "eslint --max-warnings=0",
|
||||
"lint:fix": "npm run lint -- --fix",
|
||||
"release:alpha": "release-it --config=config/release-it/alpha.json",
|
||||
"release:beta": "release-it --config=config/release-it/beta.json",
|
||||
"release:dry": "release-it --dry-run",
|
||||
"release:rc": "release-it --config=config/release-it/rc.json",
|
||||
"release:scripts": "npm run format:check && npm run typecheck && npm run lint && npm run test && npm run build",
|
||||
"release:stable": "release-it --config=config/release-it/stable.json",
|
||||
"start": "npm run dev",
|
||||
"test": "vitest run --config=config/vitest.config.ts",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"types": "./index.d.ts",
|
||||
"version": "4.0.2"
|
||||
}
|
||||
8
node_modules/fast-safe-stringify/.travis.yml
generated
vendored
8
node_modules/fast-safe-stringify/.travis.yml
generated
vendored
@ -1,8 +0,0 @@
|
||||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- '4'
|
||||
- '6'
|
||||
- '8'
|
||||
- '9'
|
||||
- '10'
|
||||
17
node_modules/fast-safe-stringify/CHANGELOG.md
generated
vendored
17
node_modules/fast-safe-stringify/CHANGELOG.md
generated
vendored
@ -1,17 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
## v.2.0.0
|
||||
|
||||
Features
|
||||
|
||||
- Added stable-stringify (see documentation)
|
||||
- Support replacer
|
||||
- Support spacer
|
||||
- toJSON support without forceDecirc property
|
||||
- Improved performance
|
||||
|
||||
Breaking changes
|
||||
|
||||
- Manipulating the input value in a `toJSON` function is not possible anymore in
|
||||
all cases (see documentation)
|
||||
- Dropped support for e.g. IE8 and Node.js < 4
|
||||
23
node_modules/fast-safe-stringify/LICENSE
generated
vendored
23
node_modules/fast-safe-stringify/LICENSE
generated
vendored
@ -1,23 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 David Mark Clements
|
||||
Copyright (c) 2017 David Mark Clements & Matteo Collina
|
||||
Copyright (c) 2018 David Mark Clements, Matteo Collina & Ruben Bridgewater
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
137
node_modules/fast-safe-stringify/benchmark.js
generated
vendored
137
node_modules/fast-safe-stringify/benchmark.js
generated
vendored
@ -1,137 +0,0 @@
|
||||
const Benchmark = require('benchmark')
|
||||
const suite = new Benchmark.Suite()
|
||||
const { inspect } = require('util')
|
||||
const jsonStringifySafe = require('json-stringify-safe')
|
||||
const fastSafeStringify = require('./')
|
||||
|
||||
const array = new Array(10).fill(0).map((_, i) => i)
|
||||
const obj = { foo: array }
|
||||
const circ = JSON.parse(JSON.stringify(obj))
|
||||
circ.o = { obj: circ, array }
|
||||
const circGetters = JSON.parse(JSON.stringify(obj))
|
||||
Object.assign(circGetters, { get o () { return { obj: circGetters, array } } })
|
||||
|
||||
const deep = require('./package.json')
|
||||
deep.deep = JSON.parse(JSON.stringify(deep))
|
||||
deep.deep.deep = JSON.parse(JSON.stringify(deep))
|
||||
deep.deep.deep.deep = JSON.parse(JSON.stringify(deep))
|
||||
deep.array = array
|
||||
|
||||
const deepCirc = JSON.parse(JSON.stringify(deep))
|
||||
deepCirc.deep.deep.deep.circ = deepCirc
|
||||
deepCirc.deep.deep.circ = deepCirc
|
||||
deepCirc.deep.circ = deepCirc
|
||||
deepCirc.array = array
|
||||
|
||||
const deepCircGetters = JSON.parse(JSON.stringify(deep))
|
||||
for (let i = 0; i < 10; i++) {
|
||||
deepCircGetters[i.toString()] = {
|
||||
deep: {
|
||||
deep: {
|
||||
get circ () { return deep.deep },
|
||||
deep: { get circ () { return deep.deep.deep } }
|
||||
},
|
||||
get circ () { return deep }
|
||||
},
|
||||
get array () { return array }
|
||||
}
|
||||
}
|
||||
|
||||
const deepCircNonCongifurableGetters = JSON.parse(JSON.stringify(deep))
|
||||
Object.defineProperty(deepCircNonCongifurableGetters.deep.deep.deep, 'circ', {
|
||||
get: () => deepCircNonCongifurableGetters,
|
||||
enumerable: true,
|
||||
configurable: false
|
||||
})
|
||||
Object.defineProperty(deepCircNonCongifurableGetters.deep.deep, 'circ', {
|
||||
get: () => deepCircNonCongifurableGetters,
|
||||
enumerable: true,
|
||||
configurable: false
|
||||
})
|
||||
Object.defineProperty(deepCircNonCongifurableGetters.deep, 'circ', {
|
||||
get: () => deepCircNonCongifurableGetters,
|
||||
enumerable: true,
|
||||
configurable: false
|
||||
})
|
||||
Object.defineProperty(deepCircNonCongifurableGetters, 'array', {
|
||||
get: () => array,
|
||||
enumerable: true,
|
||||
configurable: false
|
||||
})
|
||||
|
||||
suite.add('util.inspect: simple object ', function () {
|
||||
inspect(obj, { showHidden: false, depth: null })
|
||||
})
|
||||
suite.add('util.inspect: circular ', function () {
|
||||
inspect(circ, { showHidden: false, depth: null })
|
||||
})
|
||||
suite.add('util.inspect: circular getters ', function () {
|
||||
inspect(circGetters, { showHidden: false, depth: null })
|
||||
})
|
||||
suite.add('util.inspect: deep ', function () {
|
||||
inspect(deep, { showHidden: false, depth: null })
|
||||
})
|
||||
suite.add('util.inspect: deep circular ', function () {
|
||||
inspect(deepCirc, { showHidden: false, depth: null })
|
||||
})
|
||||
suite.add('util.inspect: large deep circular getters ', function () {
|
||||
inspect(deepCircGetters, { showHidden: false, depth: null })
|
||||
})
|
||||
suite.add('util.inspect: deep non-conf circular getters', function () {
|
||||
inspect(deepCircNonCongifurableGetters, { showHidden: false, depth: null })
|
||||
})
|
||||
|
||||
suite.add('\njson-stringify-safe: simple object ', function () {
|
||||
jsonStringifySafe(obj)
|
||||
})
|
||||
suite.add('json-stringify-safe: circular ', function () {
|
||||
jsonStringifySafe(circ)
|
||||
})
|
||||
suite.add('json-stringify-safe: circular getters ', function () {
|
||||
jsonStringifySafe(circGetters)
|
||||
})
|
||||
suite.add('json-stringify-safe: deep ', function () {
|
||||
jsonStringifySafe(deep)
|
||||
})
|
||||
suite.add('json-stringify-safe: deep circular ', function () {
|
||||
jsonStringifySafe(deepCirc)
|
||||
})
|
||||
suite.add('json-stringify-safe: large deep circular getters ', function () {
|
||||
jsonStringifySafe(deepCircGetters)
|
||||
})
|
||||
suite.add('json-stringify-safe: deep non-conf circular getters', function () {
|
||||
jsonStringifySafe(deepCircNonCongifurableGetters)
|
||||
})
|
||||
|
||||
suite.add('\nfast-safe-stringify: simple object ', function () {
|
||||
fastSafeStringify(obj)
|
||||
})
|
||||
suite.add('fast-safe-stringify: circular ', function () {
|
||||
fastSafeStringify(circ)
|
||||
})
|
||||
suite.add('fast-safe-stringify: circular getters ', function () {
|
||||
fastSafeStringify(circGetters)
|
||||
})
|
||||
suite.add('fast-safe-stringify: deep ', function () {
|
||||
fastSafeStringify(deep)
|
||||
})
|
||||
suite.add('fast-safe-stringify: deep circular ', function () {
|
||||
fastSafeStringify(deepCirc)
|
||||
})
|
||||
suite.add('fast-safe-stringify: large deep circular getters ', function () {
|
||||
fastSafeStringify(deepCircGetters)
|
||||
})
|
||||
suite.add('fast-safe-stringify: deep non-conf circular getters', function () {
|
||||
fastSafeStringify(deepCircNonCongifurableGetters)
|
||||
})
|
||||
|
||||
// add listeners
|
||||
suite.on('cycle', function (event) {
|
||||
console.log(String(event.target))
|
||||
})
|
||||
|
||||
suite.on('complete', function () {
|
||||
console.log('\nFastest is ' + this.filter('fastest').map('name'))
|
||||
})
|
||||
|
||||
suite.run({ delay: 1, minSamples: 150 })
|
||||
23
node_modules/fast-safe-stringify/index.d.ts
generated
vendored
23
node_modules/fast-safe-stringify/index.d.ts
generated
vendored
@ -1,23 +0,0 @@
|
||||
declare function stringify(
|
||||
value: any,
|
||||
replacer?: (key: string, value: any) => any,
|
||||
space?: string | number,
|
||||
options?: { depthLimit: number | undefined; edgesLimit: number | undefined }
|
||||
): string;
|
||||
|
||||
declare namespace stringify {
|
||||
export function stable(
|
||||
value: any,
|
||||
replacer?: (key: string, value: any) => any,
|
||||
space?: string | number,
|
||||
options?: { depthLimit: number | undefined; edgesLimit: number | undefined }
|
||||
): string;
|
||||
export function stableStringify(
|
||||
value: any,
|
||||
replacer?: (key: string, value: any) => any,
|
||||
space?: string | number,
|
||||
options?: { depthLimit: number | undefined; edgesLimit: number | undefined }
|
||||
): string;
|
||||
}
|
||||
|
||||
export default stringify;
|
||||
229
node_modules/fast-safe-stringify/index.js
generated
vendored
229
node_modules/fast-safe-stringify/index.js
generated
vendored
@ -1,229 +0,0 @@
|
||||
module.exports = stringify
|
||||
stringify.default = stringify
|
||||
stringify.stable = deterministicStringify
|
||||
stringify.stableStringify = deterministicStringify
|
||||
|
||||
var LIMIT_REPLACE_NODE = '[...]'
|
||||
var CIRCULAR_REPLACE_NODE = '[Circular]'
|
||||
|
||||
var arr = []
|
||||
var replacerStack = []
|
||||
|
||||
function defaultOptions () {
|
||||
return {
|
||||
depthLimit: Number.MAX_SAFE_INTEGER,
|
||||
edgesLimit: Number.MAX_SAFE_INTEGER
|
||||
}
|
||||
}
|
||||
|
||||
// Regular stringify
|
||||
function stringify (obj, replacer, spacer, options) {
|
||||
if (typeof options === 'undefined') {
|
||||
options = defaultOptions()
|
||||
}
|
||||
|
||||
decirc(obj, '', 0, [], undefined, 0, options)
|
||||
var res
|
||||
try {
|
||||
if (replacerStack.length === 0) {
|
||||
res = JSON.stringify(obj, replacer, spacer)
|
||||
} else {
|
||||
res = JSON.stringify(obj, replaceGetterValues(replacer), spacer)
|
||||
}
|
||||
} catch (_) {
|
||||
return JSON.stringify('[unable to serialize, circular reference is too complex to analyze]')
|
||||
} finally {
|
||||
while (arr.length !== 0) {
|
||||
var part = arr.pop()
|
||||
if (part.length === 4) {
|
||||
Object.defineProperty(part[0], part[1], part[3])
|
||||
} else {
|
||||
part[0][part[1]] = part[2]
|
||||
}
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
function setReplace (replace, val, k, parent) {
|
||||
var propertyDescriptor = Object.getOwnPropertyDescriptor(parent, k)
|
||||
if (propertyDescriptor.get !== undefined) {
|
||||
if (propertyDescriptor.configurable) {
|
||||
Object.defineProperty(parent, k, { value: replace })
|
||||
arr.push([parent, k, val, propertyDescriptor])
|
||||
} else {
|
||||
replacerStack.push([val, k, replace])
|
||||
}
|
||||
} else {
|
||||
parent[k] = replace
|
||||
arr.push([parent, k, val])
|
||||
}
|
||||
}
|
||||
|
||||
function decirc (val, k, edgeIndex, stack, parent, depth, options) {
|
||||
depth += 1
|
||||
var i
|
||||
if (typeof val === 'object' && val !== null) {
|
||||
for (i = 0; i < stack.length; i++) {
|
||||
if (stack[i] === val) {
|
||||
setReplace(CIRCULAR_REPLACE_NODE, val, k, parent)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
typeof options.depthLimit !== 'undefined' &&
|
||||
depth > options.depthLimit
|
||||
) {
|
||||
setReplace(LIMIT_REPLACE_NODE, val, k, parent)
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
typeof options.edgesLimit !== 'undefined' &&
|
||||
edgeIndex + 1 > options.edgesLimit
|
||||
) {
|
||||
setReplace(LIMIT_REPLACE_NODE, val, k, parent)
|
||||
return
|
||||
}
|
||||
|
||||
stack.push(val)
|
||||
// Optimize for Arrays. Big arrays could kill the performance otherwise!
|
||||
if (Array.isArray(val)) {
|
||||
for (i = 0; i < val.length; i++) {
|
||||
decirc(val[i], i, i, stack, val, depth, options)
|
||||
}
|
||||
} else {
|
||||
var keys = Object.keys(val)
|
||||
for (i = 0; i < keys.length; i++) {
|
||||
var key = keys[i]
|
||||
decirc(val[key], key, i, stack, val, depth, options)
|
||||
}
|
||||
}
|
||||
stack.pop()
|
||||
}
|
||||
}
|
||||
|
||||
// Stable-stringify
|
||||
function compareFunction (a, b) {
|
||||
if (a < b) {
|
||||
return -1
|
||||
}
|
||||
if (a > b) {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
function deterministicStringify (obj, replacer, spacer, options) {
|
||||
if (typeof options === 'undefined') {
|
||||
options = defaultOptions()
|
||||
}
|
||||
|
||||
var tmp = deterministicDecirc(obj, '', 0, [], undefined, 0, options) || obj
|
||||
var res
|
||||
try {
|
||||
if (replacerStack.length === 0) {
|
||||
res = JSON.stringify(tmp, replacer, spacer)
|
||||
} else {
|
||||
res = JSON.stringify(tmp, replaceGetterValues(replacer), spacer)
|
||||
}
|
||||
} catch (_) {
|
||||
return JSON.stringify('[unable to serialize, circular reference is too complex to analyze]')
|
||||
} finally {
|
||||
// Ensure that we restore the object as it was.
|
||||
while (arr.length !== 0) {
|
||||
var part = arr.pop()
|
||||
if (part.length === 4) {
|
||||
Object.defineProperty(part[0], part[1], part[3])
|
||||
} else {
|
||||
part[0][part[1]] = part[2]
|
||||
}
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
function deterministicDecirc (val, k, edgeIndex, stack, parent, depth, options) {
|
||||
depth += 1
|
||||
var i
|
||||
if (typeof val === 'object' && val !== null) {
|
||||
for (i = 0; i < stack.length; i++) {
|
||||
if (stack[i] === val) {
|
||||
setReplace(CIRCULAR_REPLACE_NODE, val, k, parent)
|
||||
return
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (typeof val.toJSON === 'function') {
|
||||
return
|
||||
}
|
||||
} catch (_) {
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
typeof options.depthLimit !== 'undefined' &&
|
||||
depth > options.depthLimit
|
||||
) {
|
||||
setReplace(LIMIT_REPLACE_NODE, val, k, parent)
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
typeof options.edgesLimit !== 'undefined' &&
|
||||
edgeIndex + 1 > options.edgesLimit
|
||||
) {
|
||||
setReplace(LIMIT_REPLACE_NODE, val, k, parent)
|
||||
return
|
||||
}
|
||||
|
||||
stack.push(val)
|
||||
// Optimize for Arrays. Big arrays could kill the performance otherwise!
|
||||
if (Array.isArray(val)) {
|
||||
for (i = 0; i < val.length; i++) {
|
||||
deterministicDecirc(val[i], i, i, stack, val, depth, options)
|
||||
}
|
||||
} else {
|
||||
// Create a temporary object in the required way
|
||||
var tmp = {}
|
||||
var keys = Object.keys(val).sort(compareFunction)
|
||||
for (i = 0; i < keys.length; i++) {
|
||||
var key = keys[i]
|
||||
deterministicDecirc(val[key], key, i, stack, val, depth, options)
|
||||
tmp[key] = val[key]
|
||||
}
|
||||
if (typeof parent !== 'undefined') {
|
||||
arr.push([parent, k, val])
|
||||
parent[k] = tmp
|
||||
} else {
|
||||
return tmp
|
||||
}
|
||||
}
|
||||
stack.pop()
|
||||
}
|
||||
}
|
||||
|
||||
// wraps replacer function to handle values we couldn't replace
|
||||
// and mark them as replaced value
|
||||
function replaceGetterValues (replacer) {
|
||||
replacer =
|
||||
typeof replacer !== 'undefined'
|
||||
? replacer
|
||||
: function (k, v) {
|
||||
return v
|
||||
}
|
||||
return function (key, val) {
|
||||
if (replacerStack.length > 0) {
|
||||
for (var i = 0; i < replacerStack.length; i++) {
|
||||
var part = replacerStack[i]
|
||||
if (part[1] === key && part[0] === val) {
|
||||
val = part[2]
|
||||
replacerStack.splice(i, 1)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return replacer.call(this, key, val)
|
||||
}
|
||||
}
|
||||
46
node_modules/fast-safe-stringify/package.json
generated
vendored
46
node_modules/fast-safe-stringify/package.json
generated
vendored
@ -1,46 +0,0 @@
|
||||
{
|
||||
"name": "fast-safe-stringify",
|
||||
"version": "2.1.1",
|
||||
"description": "Safely and quickly serialize JavaScript objects",
|
||||
"keywords": [
|
||||
"stable",
|
||||
"stringify",
|
||||
"JSON",
|
||||
"JSON.stringify",
|
||||
"safe",
|
||||
"serialize"
|
||||
],
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "standard && tap --no-esm test.js test-stable.js",
|
||||
"benchmark": "node benchmark.js"
|
||||
},
|
||||
"author": "David Mark Clements",
|
||||
"contributors": [
|
||||
"Ruben Bridgewater",
|
||||
"Matteo Collina",
|
||||
"Ben Gourley",
|
||||
"Gabriel Lesperance",
|
||||
"Alex Liu",
|
||||
"Christoph Walcher",
|
||||
"Nicholas Young"
|
||||
],
|
||||
"license": "MIT",
|
||||
"typings": "index",
|
||||
"devDependencies": {
|
||||
"benchmark": "^2.1.4",
|
||||
"clone": "^2.1.0",
|
||||
"json-stringify-safe": "^5.0.1",
|
||||
"standard": "^11.0.0",
|
||||
"tap": "^12.0.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/davidmarkclements/fast-safe-stringify.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/davidmarkclements/fast-safe-stringify/issues"
|
||||
},
|
||||
"homepage": "https://github.com/davidmarkclements/fast-safe-stringify#readme",
|
||||
"dependencies": {}
|
||||
}
|
||||
170
node_modules/fast-safe-stringify/readme.md
generated
vendored
170
node_modules/fast-safe-stringify/readme.md
generated
vendored
@ -1,170 +0,0 @@
|
||||
# fast-safe-stringify
|
||||
|
||||
Safe and fast serialization alternative to [JSON.stringify][].
|
||||
|
||||
Gracefully handles circular structures instead of throwing in most cases.
|
||||
It could return an error string if the circular object is too complex to analyze,
|
||||
e.g. in case there are proxies involved.
|
||||
|
||||
Provides a deterministic ("stable") version as well that will also gracefully
|
||||
handle circular structures. See the example below for further information.
|
||||
|
||||
## Usage
|
||||
|
||||
The same as [JSON.stringify][].
|
||||
|
||||
`stringify(value[, replacer[, space[, options]]])`
|
||||
|
||||
```js
|
||||
const safeStringify = require('fast-safe-stringify')
|
||||
const o = { a: 1 }
|
||||
o.o = o
|
||||
|
||||
console.log(safeStringify(o))
|
||||
// '{"a":1,"o":"[Circular]"}'
|
||||
console.log(JSON.stringify(o))
|
||||
// TypeError: Converting circular structure to JSON
|
||||
|
||||
function replacer(key, value) {
|
||||
console.log('Key:', JSON.stringify(key), 'Value:', JSON.stringify(value))
|
||||
// Remove the circular structure
|
||||
if (value === '[Circular]') {
|
||||
return
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// those are also defaults limits when no options object is passed into safeStringify
|
||||
// configure it to lower the limit.
|
||||
const options = {
|
||||
depthLimit: Number.MAX_SAFE_INTEGER,
|
||||
edgesLimit: Number.MAX_SAFE_INTEGER
|
||||
};
|
||||
|
||||
const serialized = safeStringify(o, replacer, 2, options)
|
||||
// Key: "" Value: {"a":1,"o":"[Circular]"}
|
||||
// Key: "a" Value: 1
|
||||
// Key: "o" Value: "[Circular]"
|
||||
console.log(serialized)
|
||||
// {
|
||||
// "a": 1
|
||||
// }
|
||||
```
|
||||
|
||||
|
||||
Using the deterministic version also works the same:
|
||||
|
||||
```js
|
||||
const safeStringify = require('fast-safe-stringify')
|
||||
const o = { b: 1, a: 0 }
|
||||
o.o = o
|
||||
|
||||
console.log(safeStringify(o))
|
||||
// '{"b":1,"a":0,"o":"[Circular]"}'
|
||||
console.log(safeStringify.stableStringify(o))
|
||||
// '{"a":0,"b":1,"o":"[Circular]"}'
|
||||
console.log(JSON.stringify(o))
|
||||
// TypeError: Converting circular structure to JSON
|
||||
```
|
||||
|
||||
A faster and side-effect free implementation is available in the
|
||||
[safe-stable-stringify][] module. However it is still considered experimental
|
||||
due to a new and more complex implementation.
|
||||
|
||||
### Replace strings constants
|
||||
|
||||
- `[Circular]` - when same reference is found
|
||||
- `[...]` - when some limit from options object is reached
|
||||
|
||||
## Differences to JSON.stringify
|
||||
|
||||
In general the behavior is identical to [JSON.stringify][]. The [`replacer`][]
|
||||
and [`space`][] options are also available.
|
||||
|
||||
A few exceptions exist to [JSON.stringify][] while using [`toJSON`][] or
|
||||
[`replacer`][]:
|
||||
|
||||
### Regular safe stringify
|
||||
|
||||
- Manipulating a circular structure of the passed in value in a `toJSON` or the
|
||||
`replacer` is not possible! It is possible for any other value and property.
|
||||
|
||||
- In case a circular structure is detected and the [`replacer`][] is used it
|
||||
will receive the string `[Circular]` as the argument instead of the circular
|
||||
object itself.
|
||||
|
||||
### Deterministic ("stable") safe stringify
|
||||
|
||||
- Manipulating the input object either in a [`toJSON`][] or the [`replacer`][]
|
||||
function will not have any effect on the output. The output entirely relies on
|
||||
the shape the input value had at the point passed to the stringify function!
|
||||
|
||||
- In case a circular structure is detected and the [`replacer`][] is used it
|
||||
will receive the string `[Circular]` as the argument instead of the circular
|
||||
object itself.
|
||||
|
||||
A side effect free variation without these limitations can be found as well
|
||||
([`safe-stable-stringify`][]). It is also faster than the current
|
||||
implementation. It is still considered experimental due to a new and more
|
||||
complex implementation.
|
||||
|
||||
## Benchmarks
|
||||
|
||||
Although not JSON, the Node.js `util.inspect` method can be used for similar
|
||||
purposes (e.g. logging) and also handles circular references.
|
||||
|
||||
Here we compare `fast-safe-stringify` with some alternatives:
|
||||
(Lenovo T450s with a i7-5600U CPU using Node.js 8.9.4)
|
||||
|
||||
```md
|
||||
fast-safe-stringify: simple object x 1,121,497 ops/sec ±0.75% (97 runs sampled)
|
||||
fast-safe-stringify: circular x 560,126 ops/sec ±0.64% (96 runs sampled)
|
||||
fast-safe-stringify: deep x 32,472 ops/sec ±0.57% (95 runs sampled)
|
||||
fast-safe-stringify: deep circular x 32,513 ops/sec ±0.80% (92 runs sampled)
|
||||
|
||||
util.inspect: simple object x 272,837 ops/sec ±1.48% (90 runs sampled)
|
||||
util.inspect: circular x 116,896 ops/sec ±1.19% (95 runs sampled)
|
||||
util.inspect: deep x 19,382 ops/sec ±0.66% (92 runs sampled)
|
||||
util.inspect: deep circular x 18,717 ops/sec ±0.63% (96 runs sampled)
|
||||
|
||||
json-stringify-safe: simple object x 233,621 ops/sec ±0.97% (94 runs sampled)
|
||||
json-stringify-safe: circular x 110,409 ops/sec ±1.85% (95 runs sampled)
|
||||
json-stringify-safe: deep x 8,705 ops/sec ±0.87% (96 runs sampled)
|
||||
json-stringify-safe: deep circular x 8,336 ops/sec ±2.20% (93 runs sampled)
|
||||
```
|
||||
|
||||
For stable stringify comparisons, see the performance benchmarks in the
|
||||
[`safe-stable-stringify`][] readme.
|
||||
|
||||
## Protip
|
||||
|
||||
Whether `fast-safe-stringify` or alternatives are used: if the use case
|
||||
consists of deeply nested objects without circular references the following
|
||||
pattern will give best results.
|
||||
Shallow or one level nested objects on the other hand will slow down with it.
|
||||
It is entirely dependant on the use case.
|
||||
|
||||
```js
|
||||
const stringify = require('fast-safe-stringify')
|
||||
|
||||
function tryJSONStringify (obj) {
|
||||
try { return JSON.stringify(obj) } catch (_) {}
|
||||
}
|
||||
|
||||
const serializedString = tryJSONStringify(deep) || stringify(deep)
|
||||
```
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
Sponsored by [nearForm](http://nearform.com)
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
[`replacer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The%20replacer%20parameter
|
||||
[`safe-stable-stringify`]: https://github.com/BridgeAR/safe-stable-stringify
|
||||
[`space`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The%20space%20argument
|
||||
[`toJSON`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON()_behavior
|
||||
[benchmark]: https://github.com/epoberezkin/fast-json-stable-stringify/blob/67f688f7441010cfef91a6147280cc501701e83b/benchmark
|
||||
[JSON.stringify]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify
|
||||
404
node_modules/fast-safe-stringify/test-stable.js
generated
vendored
404
node_modules/fast-safe-stringify/test-stable.js
generated
vendored
@ -1,404 +0,0 @@
|
||||
const test = require('tap').test
|
||||
const fss = require('./').stable
|
||||
const clone = require('clone')
|
||||
const s = JSON.stringify
|
||||
const stream = require('stream')
|
||||
|
||||
test('circular reference to root', function (assert) {
|
||||
const fixture = { name: 'Tywin Lannister' }
|
||||
fixture.circle = fixture
|
||||
const expected = s({ circle: '[Circular]', name: 'Tywin Lannister' })
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('circular getter reference to root', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
get circle () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
|
||||
const expected = s({ circle: '[Circular]', name: 'Tywin Lannister' })
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested circular reference to root', function (assert) {
|
||||
const fixture = { name: 'Tywin Lannister' }
|
||||
fixture.id = { circle: fixture }
|
||||
const expected = s({ id: { circle: '[Circular]' }, name: 'Tywin Lannister' })
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('child circular reference', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
child: { name: 'Tyrion Lannister' }
|
||||
}
|
||||
fixture.child.dinklage = fixture.child
|
||||
const expected = s({
|
||||
child: {
|
||||
dinklage: '[Circular]',
|
||||
name: 'Tyrion Lannister'
|
||||
},
|
||||
name: 'Tywin Lannister'
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested child circular reference', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
child: { name: 'Tyrion Lannister' }
|
||||
}
|
||||
fixture.child.actor = { dinklage: fixture.child }
|
||||
const expected = s({
|
||||
child: {
|
||||
actor: { dinklage: '[Circular]' },
|
||||
name: 'Tyrion Lannister'
|
||||
},
|
||||
name: 'Tywin Lannister'
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('circular objects in an array', function (assert) {
|
||||
const fixture = { name: 'Tywin Lannister' }
|
||||
fixture.hand = [fixture, fixture]
|
||||
const expected = s({
|
||||
hand: ['[Circular]', '[Circular]'],
|
||||
name: 'Tywin Lannister'
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested circular references in an array', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
offspring: [{ name: 'Tyrion Lannister' }, { name: 'Cersei Lannister' }]
|
||||
}
|
||||
fixture.offspring[0].dinklage = fixture.offspring[0]
|
||||
fixture.offspring[1].headey = fixture.offspring[1]
|
||||
|
||||
const expected = s({
|
||||
name: 'Tywin Lannister',
|
||||
offspring: [
|
||||
{ dinklage: '[Circular]', name: 'Tyrion Lannister' },
|
||||
{ headey: '[Circular]', name: 'Cersei Lannister' }
|
||||
]
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('circular arrays', function (assert) {
|
||||
const fixture = []
|
||||
fixture.push(fixture, fixture)
|
||||
const expected = s(['[Circular]', '[Circular]'])
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested circular arrays', function (assert) {
|
||||
const fixture = []
|
||||
fixture.push(
|
||||
{ name: 'Jon Snow', bastards: fixture },
|
||||
{ name: 'Ramsay Bolton', bastards: fixture }
|
||||
)
|
||||
const expected = s([
|
||||
{ bastards: '[Circular]', name: 'Jon Snow' },
|
||||
{ bastards: '[Circular]', name: 'Ramsay Bolton' }
|
||||
])
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('repeated non-circular references in objects', function (assert) {
|
||||
const daenerys = { name: 'Daenerys Targaryen' }
|
||||
const fixture = {
|
||||
motherOfDragons: daenerys,
|
||||
queenOfMeereen: daenerys
|
||||
}
|
||||
const expected = s(fixture)
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('repeated non-circular references in arrays', function (assert) {
|
||||
const daenerys = { name: 'Daenerys Targaryen' }
|
||||
const fixture = [daenerys, daenerys]
|
||||
const expected = s(fixture)
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('double child circular reference', function (assert) {
|
||||
// create circular reference
|
||||
const child = { name: 'Tyrion Lannister' }
|
||||
child.dinklage = child
|
||||
|
||||
// include it twice in the fixture
|
||||
const fixture = { name: 'Tywin Lannister', childA: child, childB: child }
|
||||
const cloned = clone(fixture)
|
||||
const expected = s({
|
||||
childA: {
|
||||
dinklage: '[Circular]',
|
||||
name: 'Tyrion Lannister'
|
||||
},
|
||||
childB: {
|
||||
dinklage: '[Circular]',
|
||||
name: 'Tyrion Lannister'
|
||||
},
|
||||
name: 'Tywin Lannister'
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
|
||||
// check if the fixture has not been modified
|
||||
assert.same(fixture, cloned)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('child circular reference with toJSON', function (assert) {
|
||||
// Create a test object that has an overridden `toJSON` property
|
||||
TestObject.prototype.toJSON = function () {
|
||||
return { special: 'case' }
|
||||
}
|
||||
function TestObject (content) {}
|
||||
|
||||
// Creating a simple circular object structure
|
||||
const parentObject = {}
|
||||
parentObject.childObject = new TestObject()
|
||||
parentObject.childObject.parentObject = parentObject
|
||||
|
||||
// Creating a simple circular object structure
|
||||
const otherParentObject = new TestObject()
|
||||
otherParentObject.otherChildObject = {}
|
||||
otherParentObject.otherChildObject.otherParentObject = otherParentObject
|
||||
|
||||
// Making sure our original tests work
|
||||
assert.same(parentObject.childObject.parentObject, parentObject)
|
||||
assert.same(
|
||||
otherParentObject.otherChildObject.otherParentObject,
|
||||
otherParentObject
|
||||
)
|
||||
|
||||
// Should both be idempotent
|
||||
assert.equal(fss(parentObject), '{"childObject":{"special":"case"}}')
|
||||
assert.equal(fss(otherParentObject), '{"special":"case"}')
|
||||
|
||||
// Therefore the following assertion should be `true`
|
||||
assert.same(parentObject.childObject.parentObject, parentObject)
|
||||
assert.same(
|
||||
otherParentObject.otherChildObject.otherParentObject,
|
||||
otherParentObject
|
||||
)
|
||||
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('null object', function (assert) {
|
||||
const expected = s(null)
|
||||
const actual = fss(null)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('null property', function (assert) {
|
||||
const expected = s({ f: null })
|
||||
const actual = fss({ f: null })
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested child circular reference in toJSON', function (assert) {
|
||||
var circle = { some: 'data' }
|
||||
circle.circle = circle
|
||||
var a = {
|
||||
b: {
|
||||
toJSON: function () {
|
||||
a.b = 2
|
||||
return '[Redacted]'
|
||||
}
|
||||
},
|
||||
baz: {
|
||||
circle,
|
||||
toJSON: function () {
|
||||
a.baz = circle
|
||||
return '[Redacted]'
|
||||
}
|
||||
}
|
||||
}
|
||||
var o = {
|
||||
a,
|
||||
bar: a
|
||||
}
|
||||
|
||||
const expected = s({
|
||||
a: {
|
||||
b: '[Redacted]',
|
||||
baz: '[Redacted]'
|
||||
},
|
||||
bar: {
|
||||
// TODO: This is a known limitation of the current implementation.
|
||||
// The ideal result would be:
|
||||
//
|
||||
// b: 2,
|
||||
// baz: {
|
||||
// circle: '[Circular]',
|
||||
// some: 'data'
|
||||
// }
|
||||
//
|
||||
b: '[Redacted]',
|
||||
baz: '[Redacted]'
|
||||
}
|
||||
})
|
||||
const actual = fss(o)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('circular getters are restored when stringified', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
get circle () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
fss(fixture)
|
||||
|
||||
assert.equal(fixture.circle, fixture)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('non-configurable circular getters use a replacer instead of markers', function (assert) {
|
||||
const fixture = { name: 'Tywin Lannister' }
|
||||
Object.defineProperty(fixture, 'circle', {
|
||||
configurable: false,
|
||||
get: function () {
|
||||
return fixture
|
||||
},
|
||||
enumerable: true
|
||||
})
|
||||
|
||||
fss(fixture)
|
||||
|
||||
assert.equal(fixture.circle, fixture)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('getter child circular reference', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
child: {
|
||||
name: 'Tyrion Lannister',
|
||||
get dinklage () {
|
||||
return fixture.child
|
||||
}
|
||||
},
|
||||
get self () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
|
||||
const expected = s({
|
||||
child: {
|
||||
dinklage: '[Circular]',
|
||||
name: 'Tyrion Lannister'
|
||||
},
|
||||
name: 'Tywin Lannister',
|
||||
self: '[Circular]'
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('Proxy throwing', function (assert) {
|
||||
assert.plan(1)
|
||||
const s = new stream.PassThrough()
|
||||
s.resume()
|
||||
s.write('', () => {
|
||||
assert.end()
|
||||
})
|
||||
const actual = fss({ s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) })
|
||||
assert.equal(actual, '"[unable to serialize, circular reference is too complex to analyze]"')
|
||||
})
|
||||
|
||||
test('depthLimit option - will replace deep objects', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
child: {
|
||||
name: 'Tyrion Lannister'
|
||||
},
|
||||
get self () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
|
||||
const expected = s({
|
||||
child: '[...]',
|
||||
name: 'Tywin Lannister',
|
||||
self: '[Circular]'
|
||||
})
|
||||
const actual = fss(fixture, undefined, undefined, {
|
||||
depthLimit: 1,
|
||||
edgesLimit: 1
|
||||
})
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('edgesLimit option - will replace deep objects', function (assert) {
|
||||
const fixture = {
|
||||
object: {
|
||||
1: { test: 'test' },
|
||||
2: { test: 'test' },
|
||||
3: { test: 'test' },
|
||||
4: { test: 'test' }
|
||||
},
|
||||
array: [
|
||||
{ test: 'test' },
|
||||
{ test: 'test' },
|
||||
{ test: 'test' },
|
||||
{ test: 'test' }
|
||||
],
|
||||
get self () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
|
||||
const expected = s({
|
||||
array: [{ test: 'test' }, { test: 'test' }, { test: 'test' }, '[...]'],
|
||||
object: {
|
||||
1: { test: 'test' },
|
||||
2: { test: 'test' },
|
||||
3: { test: 'test' },
|
||||
4: '[...]'
|
||||
},
|
||||
self: '[Circular]'
|
||||
})
|
||||
const actual = fss(fixture, undefined, undefined, {
|
||||
depthLimit: 3,
|
||||
edgesLimit: 3
|
||||
})
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
397
node_modules/fast-safe-stringify/test.js
generated
vendored
397
node_modules/fast-safe-stringify/test.js
generated
vendored
@ -1,397 +0,0 @@
|
||||
const test = require('tap').test
|
||||
const fss = require('./')
|
||||
const clone = require('clone')
|
||||
const s = JSON.stringify
|
||||
const stream = require('stream')
|
||||
|
||||
test('circular reference to root', function (assert) {
|
||||
const fixture = { name: 'Tywin Lannister' }
|
||||
fixture.circle = fixture
|
||||
const expected = s({ name: 'Tywin Lannister', circle: '[Circular]' })
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('circular getter reference to root', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
get circle () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
const expected = s({ name: 'Tywin Lannister', circle: '[Circular]' })
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested circular reference to root', function (assert) {
|
||||
const fixture = { name: 'Tywin Lannister' }
|
||||
fixture.id = { circle: fixture }
|
||||
const expected = s({ name: 'Tywin Lannister', id: { circle: '[Circular]' } })
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('child circular reference', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
child: { name: 'Tyrion Lannister' }
|
||||
}
|
||||
fixture.child.dinklage = fixture.child
|
||||
const expected = s({
|
||||
name: 'Tywin Lannister',
|
||||
child: {
|
||||
name: 'Tyrion Lannister',
|
||||
dinklage: '[Circular]'
|
||||
}
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested child circular reference', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
child: { name: 'Tyrion Lannister' }
|
||||
}
|
||||
fixture.child.actor = { dinklage: fixture.child }
|
||||
const expected = s({
|
||||
name: 'Tywin Lannister',
|
||||
child: {
|
||||
name: 'Tyrion Lannister',
|
||||
actor: { dinklage: '[Circular]' }
|
||||
}
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('circular objects in an array', function (assert) {
|
||||
const fixture = { name: 'Tywin Lannister' }
|
||||
fixture.hand = [fixture, fixture]
|
||||
const expected = s({
|
||||
name: 'Tywin Lannister',
|
||||
hand: ['[Circular]', '[Circular]']
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested circular references in an array', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
offspring: [{ name: 'Tyrion Lannister' }, { name: 'Cersei Lannister' }]
|
||||
}
|
||||
fixture.offspring[0].dinklage = fixture.offspring[0]
|
||||
fixture.offspring[1].headey = fixture.offspring[1]
|
||||
|
||||
const expected = s({
|
||||
name: 'Tywin Lannister',
|
||||
offspring: [
|
||||
{ name: 'Tyrion Lannister', dinklage: '[Circular]' },
|
||||
{ name: 'Cersei Lannister', headey: '[Circular]' }
|
||||
]
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('circular arrays', function (assert) {
|
||||
const fixture = []
|
||||
fixture.push(fixture, fixture)
|
||||
const expected = s(['[Circular]', '[Circular]'])
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested circular arrays', function (assert) {
|
||||
const fixture = []
|
||||
fixture.push(
|
||||
{ name: 'Jon Snow', bastards: fixture },
|
||||
{ name: 'Ramsay Bolton', bastards: fixture }
|
||||
)
|
||||
const expected = s([
|
||||
{ name: 'Jon Snow', bastards: '[Circular]' },
|
||||
{ name: 'Ramsay Bolton', bastards: '[Circular]' }
|
||||
])
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('repeated non-circular references in objects', function (assert) {
|
||||
const daenerys = { name: 'Daenerys Targaryen' }
|
||||
const fixture = {
|
||||
motherOfDragons: daenerys,
|
||||
queenOfMeereen: daenerys
|
||||
}
|
||||
const expected = s(fixture)
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('repeated non-circular references in arrays', function (assert) {
|
||||
const daenerys = { name: 'Daenerys Targaryen' }
|
||||
const fixture = [daenerys, daenerys]
|
||||
const expected = s(fixture)
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('double child circular reference', function (assert) {
|
||||
// create circular reference
|
||||
const child = { name: 'Tyrion Lannister' }
|
||||
child.dinklage = child
|
||||
|
||||
// include it twice in the fixture
|
||||
const fixture = { name: 'Tywin Lannister', childA: child, childB: child }
|
||||
const cloned = clone(fixture)
|
||||
const expected = s({
|
||||
name: 'Tywin Lannister',
|
||||
childA: {
|
||||
name: 'Tyrion Lannister',
|
||||
dinklage: '[Circular]'
|
||||
},
|
||||
childB: {
|
||||
name: 'Tyrion Lannister',
|
||||
dinklage: '[Circular]'
|
||||
}
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
|
||||
// check if the fixture has not been modified
|
||||
assert.same(fixture, cloned)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('child circular reference with toJSON', function (assert) {
|
||||
// Create a test object that has an overridden `toJSON` property
|
||||
TestObject.prototype.toJSON = function () {
|
||||
return { special: 'case' }
|
||||
}
|
||||
function TestObject (content) {}
|
||||
|
||||
// Creating a simple circular object structure
|
||||
const parentObject = {}
|
||||
parentObject.childObject = new TestObject()
|
||||
parentObject.childObject.parentObject = parentObject
|
||||
|
||||
// Creating a simple circular object structure
|
||||
const otherParentObject = new TestObject()
|
||||
otherParentObject.otherChildObject = {}
|
||||
otherParentObject.otherChildObject.otherParentObject = otherParentObject
|
||||
|
||||
// Making sure our original tests work
|
||||
assert.same(parentObject.childObject.parentObject, parentObject)
|
||||
assert.same(
|
||||
otherParentObject.otherChildObject.otherParentObject,
|
||||
otherParentObject
|
||||
)
|
||||
|
||||
// Should both be idempotent
|
||||
assert.equal(fss(parentObject), '{"childObject":{"special":"case"}}')
|
||||
assert.equal(fss(otherParentObject), '{"special":"case"}')
|
||||
|
||||
// Therefore the following assertion should be `true`
|
||||
assert.same(parentObject.childObject.parentObject, parentObject)
|
||||
assert.same(
|
||||
otherParentObject.otherChildObject.otherParentObject,
|
||||
otherParentObject
|
||||
)
|
||||
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('null object', function (assert) {
|
||||
const expected = s(null)
|
||||
const actual = fss(null)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('null property', function (assert) {
|
||||
const expected = s({ f: null })
|
||||
const actual = fss({ f: null })
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('nested child circular reference in toJSON', function (assert) {
|
||||
const circle = { some: 'data' }
|
||||
circle.circle = circle
|
||||
const a = {
|
||||
b: {
|
||||
toJSON: function () {
|
||||
a.b = 2
|
||||
return '[Redacted]'
|
||||
}
|
||||
},
|
||||
baz: {
|
||||
circle,
|
||||
toJSON: function () {
|
||||
a.baz = circle
|
||||
return '[Redacted]'
|
||||
}
|
||||
}
|
||||
}
|
||||
const o = {
|
||||
a,
|
||||
bar: a
|
||||
}
|
||||
|
||||
const expected = s({
|
||||
a: {
|
||||
b: '[Redacted]',
|
||||
baz: '[Redacted]'
|
||||
},
|
||||
bar: {
|
||||
b: 2,
|
||||
baz: {
|
||||
some: 'data',
|
||||
circle: '[Circular]'
|
||||
}
|
||||
}
|
||||
})
|
||||
const actual = fss(o)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('circular getters are restored when stringified', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
get circle () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
fss(fixture)
|
||||
|
||||
assert.equal(fixture.circle, fixture)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('non-configurable circular getters use a replacer instead of markers', function (assert) {
|
||||
const fixture = { name: 'Tywin Lannister' }
|
||||
Object.defineProperty(fixture, 'circle', {
|
||||
configurable: false,
|
||||
get: function () {
|
||||
return fixture
|
||||
},
|
||||
enumerable: true
|
||||
})
|
||||
|
||||
fss(fixture)
|
||||
|
||||
assert.equal(fixture.circle, fixture)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('getter child circular reference are replaced instead of marked', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
child: {
|
||||
name: 'Tyrion Lannister',
|
||||
get dinklage () {
|
||||
return fixture.child
|
||||
}
|
||||
},
|
||||
get self () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
|
||||
const expected = s({
|
||||
name: 'Tywin Lannister',
|
||||
child: {
|
||||
name: 'Tyrion Lannister',
|
||||
dinklage: '[Circular]'
|
||||
},
|
||||
self: '[Circular]'
|
||||
})
|
||||
const actual = fss(fixture)
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('Proxy throwing', function (assert) {
|
||||
assert.plan(1)
|
||||
const s = new stream.PassThrough()
|
||||
s.resume()
|
||||
s.write('', () => {
|
||||
assert.end()
|
||||
})
|
||||
const actual = fss({ s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) })
|
||||
assert.equal(actual, '"[unable to serialize, circular reference is too complex to analyze]"')
|
||||
})
|
||||
|
||||
test('depthLimit option - will replace deep objects', function (assert) {
|
||||
const fixture = {
|
||||
name: 'Tywin Lannister',
|
||||
child: {
|
||||
name: 'Tyrion Lannister'
|
||||
},
|
||||
get self () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
|
||||
const expected = s({
|
||||
name: 'Tywin Lannister',
|
||||
child: '[...]',
|
||||
self: '[Circular]'
|
||||
})
|
||||
const actual = fss(fixture, undefined, undefined, {
|
||||
depthLimit: 1,
|
||||
edgesLimit: 1
|
||||
})
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('edgesLimit option - will replace deep objects', function (assert) {
|
||||
const fixture = {
|
||||
object: {
|
||||
1: { test: 'test' },
|
||||
2: { test: 'test' },
|
||||
3: { test: 'test' },
|
||||
4: { test: 'test' }
|
||||
},
|
||||
array: [
|
||||
{ test: 'test' },
|
||||
{ test: 'test' },
|
||||
{ test: 'test' },
|
||||
{ test: 'test' }
|
||||
],
|
||||
get self () {
|
||||
return fixture
|
||||
}
|
||||
}
|
||||
|
||||
const expected = s({
|
||||
object: {
|
||||
1: { test: 'test' },
|
||||
2: { test: 'test' },
|
||||
3: { test: 'test' },
|
||||
4: '[...]'
|
||||
},
|
||||
array: [{ test: 'test' }, { test: 'test' }, { test: 'test' }, '[...]'],
|
||||
self: '[Circular]'
|
||||
})
|
||||
const actual = fss(fixture, undefined, undefined, {
|
||||
depthLimit: 3,
|
||||
edgesLimit: 3
|
||||
})
|
||||
assert.equal(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
28
node_modules/help-me/.github/workflows/ci.yml
generated
vendored
28
node_modules/help-me/.github/workflows/ci.yml
generated
vendored
@ -1,28 +0,0 @@
|
||||
name: ci
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
node-version: [14.x, 16.x, 18.x, 20.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
npm install
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
npm run test
|
||||
22
node_modules/help-me/LICENSE
generated
vendored
22
node_modules/help-me/LICENSE
generated
vendored
@ -1,22 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2022 Matteo Collina
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
66
node_modules/help-me/README.md
generated
vendored
66
node_modules/help-me/README.md
generated
vendored
@ -1,66 +0,0 @@
|
||||
help-me
|
||||
=======
|
||||
|
||||
Help command for node, to use with [minimist](http://npm.im/minimist) and [commist](http://npm.im/commist).
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
var helpMe = require('help-me')
|
||||
var path = require('path')
|
||||
var help = helpMe({
|
||||
dir: path.join(__dirname, 'doc'),
|
||||
// the default
|
||||
ext: '.txt'
|
||||
})
|
||||
|
||||
help
|
||||
.createStream(['hello']) // can support also strings
|
||||
.pipe(process.stdout)
|
||||
|
||||
// little helper to do the same
|
||||
help.toStdout(['hello'])
|
||||
```
|
||||
|
||||
Using ESM and top-level await::
|
||||
|
||||
```js
|
||||
import { help } from 'help-me'
|
||||
import { join } from 'desm'
|
||||
|
||||
await help({
|
||||
dir: join(import.meta.url, 'doc'),
|
||||
// the default
|
||||
ext: '.txt'
|
||||
}, ['hello'])
|
||||
```
|
||||
|
||||
Usage with commist
|
||||
------------------
|
||||
|
||||
[Commist](http://npm.im/commist) provide a command system for node.
|
||||
|
||||
```js
|
||||
var commist = require('commist')()
|
||||
var path = require('path')
|
||||
var help = require('help-me')({
|
||||
dir: path.join(__dirname, 'doc')
|
||||
})
|
||||
|
||||
commist.register('help', help.toStdout)
|
||||
|
||||
commist.parse(process.argv.splice(2))
|
||||
```
|
||||
|
||||
Acknowledgements
|
||||
----------------
|
||||
|
||||
This project was kindly sponsored by [nearForm](http://nearform.com).
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
MIT
|
||||
1
node_modules/help-me/doc/hello.txt
generated
vendored
1
node_modules/help-me/doc/hello.txt
generated
vendored
@ -1 +0,0 @@
|
||||
this is hello world
|
||||
5
node_modules/help-me/doc/help.txt
generated
vendored
5
node_modules/help-me/doc/help.txt
generated
vendored
@ -1,5 +0,0 @@
|
||||
HELP-ME by Matteo
|
||||
|
||||
* start starts a script
|
||||
* help shows help
|
||||
|
||||
18
node_modules/help-me/example.js
generated
vendored
18
node_modules/help-me/example.js
generated
vendored
@ -1,18 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const commist = require('commist')()
|
||||
const help = require('./')({
|
||||
dir: path.join(path.dirname(require.main.filename), 'doc')
|
||||
})
|
||||
|
||||
commist.register('help', help.toStdout)
|
||||
commist.register('start', function () {
|
||||
console.log('Starting the script!')
|
||||
})
|
||||
|
||||
const res = commist.parse(process.argv.splice(2))
|
||||
|
||||
if (res) {
|
||||
help.toStdout()
|
||||
}
|
||||
1
node_modules/help-me/fixture/basic/hello.txt
generated
vendored
1
node_modules/help-me/fixture/basic/hello.txt
generated
vendored
@ -1 +0,0 @@
|
||||
ahdsadhdash
|
||||
1
node_modules/help-me/fixture/basic/help.txt
generated
vendored
1
node_modules/help-me/fixture/basic/help.txt
generated
vendored
@ -1 +0,0 @@
|
||||
hello world
|
||||
0
node_modules/help-me/fixture/dir/a/b.txt
generated
vendored
0
node_modules/help-me/fixture/dir/a/b.txt
generated
vendored
1
node_modules/help-me/fixture/no-ext/hello
generated
vendored
1
node_modules/help-me/fixture/no-ext/hello
generated
vendored
@ -1 +0,0 @@
|
||||
ghghghhg
|
||||
1
node_modules/help-me/fixture/sameprefix/hello world.txt
generated
vendored
1
node_modules/help-me/fixture/sameprefix/hello world.txt
generated
vendored
@ -1 +0,0 @@
|
||||
hello world
|
||||
1
node_modules/help-me/fixture/sameprefix/hello.txt
generated
vendored
1
node_modules/help-me/fixture/sameprefix/hello.txt
generated
vendored
@ -1 +0,0 @@
|
||||
hello
|
||||
1
node_modules/help-me/fixture/shortnames/abcde fghi lmno.txt
generated
vendored
1
node_modules/help-me/fixture/shortnames/abcde fghi lmno.txt
generated
vendored
@ -1 +0,0 @@
|
||||
ewweqjewqjewqj
|
||||
1
node_modules/help-me/fixture/shortnames/abcde hello.txt
generated
vendored
1
node_modules/help-me/fixture/shortnames/abcde hello.txt
generated
vendored
@ -1 +0,0 @@
|
||||
45678
|
||||
1
node_modules/help-me/fixture/shortnames/hello world.txt
generated
vendored
1
node_modules/help-me/fixture/shortnames/hello world.txt
generated
vendored
@ -1 +0,0 @@
|
||||
12345
|
||||
134
node_modules/help-me/help-me.js
generated
vendored
134
node_modules/help-me/help-me.js
generated
vendored
@ -1,134 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const { PassThrough, Writable, pipeline } = require('stream')
|
||||
const process = require('process')
|
||||
const { join } = require('path')
|
||||
|
||||
const defaults = {
|
||||
ext: '.txt',
|
||||
help: 'help'
|
||||
}
|
||||
|
||||
function isDirectory (path) {
|
||||
try {
|
||||
const stat = fs.lstatSync(path)
|
||||
return stat.isDirectory()
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function createDefaultStream () {
|
||||
return new Writable({
|
||||
write (chunk, encoding, callback) {
|
||||
process.stdout.write(chunk, callback)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function helpMe (opts) {
|
||||
opts = Object.assign({}, defaults, opts)
|
||||
|
||||
if (!opts.dir) {
|
||||
throw new Error('missing dir')
|
||||
}
|
||||
|
||||
if (!isDirectory(opts.dir)) {
|
||||
throw new Error(`${opts.dir} is not a directory`)
|
||||
}
|
||||
|
||||
return {
|
||||
createStream: createStream,
|
||||
toStdout: toStdout
|
||||
}
|
||||
|
||||
function createStream (args) {
|
||||
if (typeof args === 'string') {
|
||||
args = args.split(' ')
|
||||
} else if (!args || args.length === 0) {
|
||||
args = [opts.help]
|
||||
}
|
||||
|
||||
const out = new PassThrough()
|
||||
const re = new RegExp(
|
||||
args
|
||||
.map(function (arg) {
|
||||
return arg + '[a-zA-Z0-9]*'
|
||||
})
|
||||
.join('[ /]+')
|
||||
)
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
opts.dir = opts.dir.split('\\').join('/')
|
||||
}
|
||||
|
||||
fs.readdir(opts.dir, function (err, files) {
|
||||
if (err) return out.emit('error', err)
|
||||
|
||||
const regexp = new RegExp('.*' + opts.ext + '$')
|
||||
files = files
|
||||
.filter(function (file) {
|
||||
const matched = file.match(regexp)
|
||||
return !!matched
|
||||
})
|
||||
.map(function (relative) {
|
||||
return { file: join(opts.dir, relative), relative }
|
||||
})
|
||||
.filter(function (file) {
|
||||
return file.relative.match(re)
|
||||
})
|
||||
|
||||
if (files.length === 0) {
|
||||
return out.emit('error', new Error('no such help file'))
|
||||
} else if (files.length > 1) {
|
||||
const exactMatch = files.find(
|
||||
(file) => file.relative === `${args[0]}${opts.ext}`
|
||||
)
|
||||
if (!exactMatch) {
|
||||
out.write('There are ' + files.length + ' help pages ')
|
||||
out.write('that matches the given request, please disambiguate:\n')
|
||||
files.forEach(function (file) {
|
||||
out.write(' * ')
|
||||
out.write(file.relative.replace(opts.ext, ''))
|
||||
out.write('\n')
|
||||
})
|
||||
out.end()
|
||||
return
|
||||
}
|
||||
files = [exactMatch]
|
||||
}
|
||||
|
||||
pipeline(fs.createReadStream(files[0].file), out, () => {})
|
||||
})
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
function toStdout (args = [], opts) {
|
||||
opts = opts || {}
|
||||
const stream = opts.stream || createDefaultStream()
|
||||
const _onMissingHelp = opts.onMissingHelp || onMissingHelp
|
||||
return new Promise((resolve, reject) => {
|
||||
createStream(args)
|
||||
.on('error', (err) => {
|
||||
_onMissingHelp(err, args, stream).then(resolve, reject)
|
||||
})
|
||||
.pipe(stream)
|
||||
.on('close', resolve)
|
||||
.on('end', resolve)
|
||||
})
|
||||
}
|
||||
|
||||
function onMissingHelp (_, args, stream) {
|
||||
stream.write(`no such help file: ${args.join(' ')}.\n\n`)
|
||||
return toStdout([], { stream, async onMissingHelp () {} })
|
||||
}
|
||||
}
|
||||
|
||||
function help (opts, args) {
|
||||
return helpMe(opts).toStdout(args, opts)
|
||||
}
|
||||
|
||||
module.exports = helpMe
|
||||
module.exports.help = help
|
||||
36
node_modules/help-me/package.json
generated
vendored
36
node_modules/help-me/package.json
generated
vendored
@ -1,36 +0,0 @@
|
||||
{
|
||||
"name": "help-me",
|
||||
"version": "5.0.0",
|
||||
"description": "Help command for node, partner of minimist and commist",
|
||||
"main": "help-me.js",
|
||||
"scripts": {
|
||||
"test": "standard && node test.js | tap-spec"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/mcollina/help-me.git"
|
||||
},
|
||||
"keywords": [
|
||||
"help",
|
||||
"command",
|
||||
"minimist",
|
||||
"commist"
|
||||
],
|
||||
"author": "Matteo Collina <hello@matteocollina.com>",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/mcollina/help-me/issues"
|
||||
},
|
||||
"homepage": "https://github.com/mcollina/help-me",
|
||||
"devDependencies": {
|
||||
"commist": "^2.0.0",
|
||||
"concat-stream": "^2.0.0",
|
||||
"pre-commit": "^1.1.3",
|
||||
"proxyquire": "^2.1.3",
|
||||
"standard": "^16.0.0",
|
||||
"tap-spec": "^5.0.0",
|
||||
"tape": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
}
|
||||
}
|
||||
316
node_modules/help-me/test.js
generated
vendored
316
node_modules/help-me/test.js
generated
vendored
@ -1,316 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const concat = require('concat-stream')
|
||||
const fs = require('fs')
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const helpMe = require('./')
|
||||
const proxyquire = require('proxyquire')
|
||||
|
||||
test('throws if no directory is passed', function (t) {
|
||||
try {
|
||||
helpMe()
|
||||
t.fail()
|
||||
} catch (err) {
|
||||
t.equal(err.message, 'missing dir')
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('throws if a normal file is passed', function (t) {
|
||||
try {
|
||||
helpMe({
|
||||
dir: __filename
|
||||
})
|
||||
t.fail()
|
||||
} catch (err) {
|
||||
t.equal(err.message, `${__filename} is not a directory`)
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('throws if the directory cannot be accessed', function (t) {
|
||||
try {
|
||||
helpMe({
|
||||
dir: './foo'
|
||||
})
|
||||
t.fail()
|
||||
} catch (err) {
|
||||
t.equal(err.message, './foo is not a directory')
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('show a generic help.txt from a folder to a stream with relative path in dir', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helpMe({
|
||||
dir: 'fixture/basic'
|
||||
}).createStream()
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/basic/help.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
test('show a generic help.txt from a folder to a stream with absolute path in dir', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helpMe({
|
||||
dir: path.join(__dirname, 'fixture/basic')
|
||||
}).createStream()
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/basic/help.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
test('custom help command with an array', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helpMe({
|
||||
dir: 'fixture/basic'
|
||||
}).createStream(['hello'])
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/basic/hello.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
test('custom help command without an ext', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helpMe({
|
||||
dir: 'fixture/no-ext',
|
||||
ext: ''
|
||||
}).createStream(['hello'])
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/no-ext/hello', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
test('custom help command with a string', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helpMe({
|
||||
dir: 'fixture/basic'
|
||||
}).createStream('hello')
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/basic/hello.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
test('missing help file', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
helpMe({
|
||||
dir: 'fixture/basic'
|
||||
}).createStream('abcde')
|
||||
.on('error', function (err) {
|
||||
t.equal(err.message, 'no such help file')
|
||||
})
|
||||
.resume()
|
||||
})
|
||||
|
||||
test('custom help command with an array', function (t) {
|
||||
const helper = helpMe({
|
||||
dir: 'fixture/shortnames'
|
||||
})
|
||||
|
||||
t.test('abbreviates two words in one', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helper
|
||||
.createStream(['world'])
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/shortnames/hello world.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
t.test('abbreviates three words in two', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helper
|
||||
.createStream(['abcde', 'fghi'])
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/shortnames/abcde fghi lmno.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
t.test('abbreviates a word', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helper
|
||||
.createStream(['abc', 'fg'])
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/shortnames/abcde fghi lmno.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
t.test('abbreviates a word using strings', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
helper
|
||||
.createStream('abc fg')
|
||||
.pipe(concat(function (data) {
|
||||
fs.readFile('fixture/shortnames/abcde fghi lmno.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
t.test('print a disambiguation', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const expected = '' +
|
||||
'There are 2 help pages that matches the given request, please disambiguate:\n' +
|
||||
' * abcde fghi lmno\n' +
|
||||
' * abcde hello\n'
|
||||
|
||||
helper
|
||||
.createStream(['abc'])
|
||||
.pipe(concat({ encoding: 'string' }, function (data) {
|
||||
t.equal(data, expected)
|
||||
}))
|
||||
})
|
||||
|
||||
t.test('choose exact match over partial', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
helpMe({
|
||||
dir: 'fixture/sameprefix'
|
||||
}).createStream(['hello'])
|
||||
.pipe(concat({ encoding: 'string' }, function (data) {
|
||||
t.equal(data, 'hello')
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
test('toStdout helper', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
let completed = false
|
||||
const stream = concat(function (data) {
|
||||
completed = true
|
||||
fs.readFile('fixture/basic/help.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
})
|
||||
|
||||
await helpMe({
|
||||
dir: 'fixture/basic'
|
||||
}).toStdout([], { stream })
|
||||
|
||||
t.ok(completed)
|
||||
})
|
||||
|
||||
test('handle error in toStdout', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
let completed = false
|
||||
const stream = concat(function (data) {
|
||||
completed = true
|
||||
fs.readFile('fixture/basic/help.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), 'no such help file: something.\n\n' + expected.toString())
|
||||
})
|
||||
})
|
||||
|
||||
await helpMe({
|
||||
dir: 'fixture/basic'
|
||||
}).toStdout(['something'], {
|
||||
stream
|
||||
})
|
||||
|
||||
t.ok(completed)
|
||||
})
|
||||
|
||||
test('customize missing help fle message', async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const stream = concat(function (data) {
|
||||
t.equal(data.toString(), 'kaboom\n\n')
|
||||
})
|
||||
|
||||
await helpMe({
|
||||
dir: 'fixture/basic'
|
||||
}).toStdout(['something'], {
|
||||
stream,
|
||||
async onMissingHelp (err, args, stream) {
|
||||
t.equal(err.message, 'no such help file')
|
||||
t.deepEquals(args, ['something'])
|
||||
stream.end('kaboom\n\n')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('toStdout without factory', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
let completed = false
|
||||
const stream = concat(function (data) {
|
||||
completed = true
|
||||
fs.readFile('fixture/basic/help.txt', function (err, expected) {
|
||||
t.error(err)
|
||||
t.equal(data.toString(), expected.toString())
|
||||
})
|
||||
})
|
||||
|
||||
await helpMe.help({
|
||||
dir: 'fixture/basic',
|
||||
stream
|
||||
}, [])
|
||||
|
||||
t.ok(completed)
|
||||
})
|
||||
|
||||
test('should allow for awaiting the response with default stdout stream', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const _process = Object.create(process)
|
||||
const stdout = Object.create(process.stdout)
|
||||
Object.defineProperty(_process, 'stdout', {
|
||||
value: stdout
|
||||
})
|
||||
|
||||
let completed = false
|
||||
stdout.write = (data, cb) => {
|
||||
t.equal(data.toString(), 'hello world' + os.EOL)
|
||||
completed = true
|
||||
cb()
|
||||
}
|
||||
|
||||
const helpMe = proxyquire('./help-me', {
|
||||
process: _process
|
||||
})
|
||||
|
||||
await helpMe.help({
|
||||
dir: 'fixture/basic'
|
||||
})
|
||||
|
||||
t.ok(completed)
|
||||
})
|
||||
21
node_modules/joycon/LICENSE
generated
vendored
21
node_modules/joycon/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) egoist <0x142857@gmail.com> (https://github.com/egoist)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
133
node_modules/joycon/README.md
generated
vendored
133
node_modules/joycon/README.md
generated
vendored
@ -1,133 +0,0 @@
|
||||
|
||||
# joycon
|
||||
|
||||
[](https://npmjs.com/package/joycon) [](https://npmjs.com/package/joycon) [](https://packagephobia.now.sh/result?p=joycon@2.0.0) [](https://circleci.com/gh/egoist/joycon/tree/master) [](https://github.com/egoist/donate) [](https://chat.egoist.moe)
|
||||
|
||||
## Differences with [cosmiconfig](https://github.com/davidtheclark/cosmiconfig)?
|
||||
|
||||
JoyCon is zero-dependency but feature-complete.
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
yarn add joycon
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const JoyCon = require('joycon')
|
||||
|
||||
const joycon = new JoyCon()
|
||||
|
||||
joycon.load(['package-lock.json', 'yarn.lock'])
|
||||
.then(result => {
|
||||
// result is {} when files do not exist
|
||||
// otherwise { path, data }
|
||||
})
|
||||
```
|
||||
|
||||
By default non-js files are parsed as JSON, if you want something different you can add a loader:
|
||||
|
||||
```js
|
||||
const joycon = new JoyCon()
|
||||
|
||||
joycon.addLoader({
|
||||
test: /\.toml$/,
|
||||
load(filepath) {
|
||||
return require('toml').parse(filepath)
|
||||
}
|
||||
})
|
||||
|
||||
joycon.load(['cargo.toml'])
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### constructor([options])
|
||||
|
||||
#### options
|
||||
|
||||
##### files
|
||||
|
||||
- Type: `string[]`
|
||||
|
||||
The files to search.
|
||||
|
||||
##### cwd
|
||||
|
||||
The directory to search files.
|
||||
|
||||
##### stopDir
|
||||
|
||||
The directory to stop searching.
|
||||
|
||||
##### packageKey
|
||||
|
||||
You can load config from certain property in a `package.json` file. For example, when you set `packageKey: 'babel'`, it will load the `babel` property in `package.json` instead of the entire data.
|
||||
|
||||
##### parseJSON
|
||||
|
||||
- Type: `(str: string) => any`
|
||||
- Default: `JSON.parse`
|
||||
|
||||
The function used to parse JSON string.
|
||||
|
||||
### resolve([files], [cwd], [stopDir])
|
||||
### resolve([options])
|
||||
|
||||
`files` defaults to `options.files`.
|
||||
|
||||
`cwd` defaults to `options.cwd`.
|
||||
|
||||
`stopDir` defaults to `options.stopDir` then `path.parse(cwd).root`.
|
||||
|
||||
If using a single object `options`, it will be the same as constructor options.
|
||||
|
||||
Search files and resolve the path of the file we found.
|
||||
|
||||
There's also `.resolveSync` method.
|
||||
|
||||
### load(...args)
|
||||
|
||||
The signature is the same as [resolve](#resolvefiles-cwd-stopdir).
|
||||
|
||||
Search files and resolve `{ path, data }` of the file we found.
|
||||
|
||||
There's also `.loadSync` method.
|
||||
|
||||
### addLoader(Loader)
|
||||
|
||||
```typescript
|
||||
interface Loader {
|
||||
name?: string
|
||||
test: RegExp
|
||||
load(filepath: string)?: Promise<any>
|
||||
loadSync(filepath: string)?: any
|
||||
}
|
||||
```
|
||||
|
||||
At least one of `load` and `loadSync` is required, depending on whether you're calling the synchonous methods or not.
|
||||
|
||||
### removeLoader(name)
|
||||
|
||||
Remove loaders by loader name.
|
||||
|
||||
### clearCache()
|
||||
|
||||
Each JoyCon instance uses its own cache.
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork it!
|
||||
2. Create your feature branch: `git checkout -b my-new-feature`
|
||||
3. Commit your changes: `git commit -am 'Add some feature'`
|
||||
4. Push to the branch: `git push origin my-new-feature`
|
||||
5. Submit a pull request :D
|
||||
|
||||
## Author
|
||||
|
||||
**joycon** © [egoist](https://github.com/egoist), Released under the [MIT](./LICENSE) License.<br>
|
||||
Authored and maintained by egoist with help from contributors ([list](https://github.com/egoist/joycon/contributors)).
|
||||
|
||||
> [github.com/egoist](https://github.com/egoist) · GitHub [@egoist](https://github.com/egoist) · Twitter [@_egoistlily](https://twitter.com/_egoistlily)
|
||||
286
node_modules/joycon/lib/index.js
generated
vendored
286
node_modules/joycon/lib/index.js
generated
vendored
@ -1,286 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
|
||||
var _fs = _interopRequireDefault(require("fs"));
|
||||
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
const readFileSync = fp => {
|
||||
return _fs.default.readFileSync(fp, 'utf8');
|
||||
};
|
||||
|
||||
const pathExists = fp => new Promise(resolve => {
|
||||
_fs.default.access(fp, err => {
|
||||
resolve(!err);
|
||||
});
|
||||
});
|
||||
|
||||
const pathExistsSync = _fs.default.existsSync;
|
||||
|
||||
class JoyCon {
|
||||
constructor({
|
||||
files,
|
||||
cwd = process.cwd(),
|
||||
stopDir,
|
||||
packageKey,
|
||||
parseJSON = JSON.parse
|
||||
} = {}) {
|
||||
this.options = {
|
||||
files,
|
||||
cwd,
|
||||
stopDir,
|
||||
packageKey,
|
||||
parseJSON
|
||||
};
|
||||
this.existsCache = new Map();
|
||||
this.loaders = new Set();
|
||||
this.packageJsonCache = new Map();
|
||||
this.loadCache = new Map();
|
||||
}
|
||||
|
||||
addLoader(loader) {
|
||||
this.loaders.add(loader);
|
||||
return this;
|
||||
}
|
||||
|
||||
removeLoader(name) {
|
||||
for (const loader of this.loaders) {
|
||||
if (name && loader.name === name) {
|
||||
this.loaders.delete(loader);
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
async recusivelyResolve(options) {
|
||||
if (options.cwd === options.stopDir || _path.default.basename(options.cwd) === 'node_modules') {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const filename of options.files) {
|
||||
const file = _path.default.resolve(options.cwd, filename);
|
||||
|
||||
const exists = process.env.NODE_ENV !== 'test' && this.existsCache.has(file) ? this.existsCache.get(file) : await pathExists(file);
|
||||
this.existsCache.set(file, exists);
|
||||
|
||||
if (exists) {
|
||||
if (!options.packageKey || _path.default.basename(file) !== 'package.json') {
|
||||
return file;
|
||||
}
|
||||
|
||||
const data = require(file);
|
||||
|
||||
delete require.cache[file];
|
||||
const hasPackageKey = Object.prototype.hasOwnProperty.call(data, options.packageKey);
|
||||
|
||||
if (hasPackageKey) {
|
||||
this.packageJsonCache.set(file, data);
|
||||
return file;
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
return this.recusivelyResolve(Object.assign({}, options, {
|
||||
cwd: _path.default.dirname(options.cwd)
|
||||
}));
|
||||
}
|
||||
|
||||
recusivelyResolveSync(options) {
|
||||
if (options.cwd === options.stopDir || _path.default.basename(options.cwd) === 'node_modules') {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const filename of options.files) {
|
||||
const file = _path.default.resolve(options.cwd, filename);
|
||||
|
||||
const exists = process.env.NODE_ENV !== 'test' && this.existsCache.has(file) ? this.existsCache.get(file) : pathExistsSync(file);
|
||||
this.existsCache.set(file, exists);
|
||||
|
||||
if (exists) {
|
||||
if (!options.packageKey || _path.default.basename(file) !== 'package.json') {
|
||||
return file;
|
||||
}
|
||||
|
||||
const data = require(file);
|
||||
|
||||
delete require.cache[file];
|
||||
const hasPackageKey = Object.prototype.hasOwnProperty.call(data, options.packageKey);
|
||||
|
||||
if (hasPackageKey) {
|
||||
this.packageJsonCache.set(file, data);
|
||||
return file;
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
return this.recusivelyResolveSync(Object.assign({}, options, {
|
||||
cwd: _path.default.dirname(options.cwd)
|
||||
}));
|
||||
}
|
||||
|
||||
async resolve(...args) {
|
||||
const options = this.normalizeOptions(args);
|
||||
return this.recusivelyResolve(options);
|
||||
}
|
||||
|
||||
resolveSync(...args) {
|
||||
const options = this.normalizeOptions(args);
|
||||
return this.recusivelyResolveSync(options);
|
||||
}
|
||||
|
||||
runLoaderSync(loader, filepath) {
|
||||
return loader.loadSync(filepath);
|
||||
}
|
||||
|
||||
runLoader(loader, filepath) {
|
||||
if (!loader.load) return loader.loadSync(filepath);
|
||||
return loader.load(filepath);
|
||||
}
|
||||
|
||||
async load(...args) {
|
||||
const options = this.normalizeOptions(args);
|
||||
const filepath = await this.recusivelyResolve(options);
|
||||
|
||||
if (filepath) {
|
||||
const defaultLoader = {
|
||||
test: /\.+/,
|
||||
loadSync: filepath => {
|
||||
const extname = _path.default.extname(filepath).slice(1);
|
||||
|
||||
if (extname === 'js' || extname === 'cjs') {
|
||||
delete require.cache[filepath];
|
||||
return require(filepath);
|
||||
}
|
||||
|
||||
if (this.packageJsonCache.has(filepath)) {
|
||||
return this.packageJsonCache.get(filepath)[options.packageKey];
|
||||
}
|
||||
|
||||
const data = this.options.parseJSON(readFileSync(filepath));
|
||||
return data;
|
||||
}
|
||||
};
|
||||
const loader = this.findLoader(filepath) || defaultLoader;
|
||||
let data;
|
||||
|
||||
if (this.loadCache.has(filepath)) {
|
||||
data = this.loadCache.get(filepath);
|
||||
} else {
|
||||
data = await this.runLoader(loader, filepath);
|
||||
this.loadCache.set(filepath, data);
|
||||
}
|
||||
|
||||
return {
|
||||
path: filepath,
|
||||
data
|
||||
};
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
loadSync(...args) {
|
||||
const options = this.normalizeOptions(args);
|
||||
const filepath = this.recusivelyResolveSync(options);
|
||||
|
||||
if (filepath) {
|
||||
const defaultLoader = {
|
||||
test: /\.+/,
|
||||
loadSync: filepath => {
|
||||
const extname = _path.default.extname(filepath).slice(1);
|
||||
|
||||
if (extname === 'js' || extname === 'cjs') {
|
||||
delete require.cache[filepath];
|
||||
return require(filepath);
|
||||
}
|
||||
|
||||
if (this.packageJsonCache.has(filepath)) {
|
||||
return this.packageJsonCache.get(filepath)[options.packageKey];
|
||||
}
|
||||
|
||||
const data = this.options.parseJSON(readFileSync(filepath));
|
||||
return data;
|
||||
}
|
||||
};
|
||||
const loader = this.findLoader(filepath) || defaultLoader;
|
||||
let data;
|
||||
|
||||
if (this.loadCache.has(filepath)) {
|
||||
data = this.loadCache.get(filepath);
|
||||
} else {
|
||||
data = this.runLoaderSync(loader, filepath);
|
||||
this.loadCache.set(filepath, data);
|
||||
}
|
||||
|
||||
return {
|
||||
path: filepath,
|
||||
data
|
||||
};
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
findLoader(filepath) {
|
||||
for (const loader of this.loaders) {
|
||||
if (loader.test && loader.test.test(filepath)) {
|
||||
return loader;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
clearCache() {
|
||||
this.existsCache.clear();
|
||||
this.packageJsonCache.clear();
|
||||
this.loadCache.clear();
|
||||
return this;
|
||||
}
|
||||
|
||||
normalizeOptions(args) {
|
||||
const options = Object.assign({}, this.options);
|
||||
|
||||
if (Object.prototype.toString.call(args[0]) === '[object Object]') {
|
||||
Object.assign(options, args[0]);
|
||||
} else {
|
||||
if (args[0]) {
|
||||
options.files = args[0];
|
||||
}
|
||||
|
||||
if (args[1]) {
|
||||
options.cwd = args[1];
|
||||
}
|
||||
|
||||
if (args[2]) {
|
||||
options.stopDir = args[2];
|
||||
}
|
||||
}
|
||||
|
||||
options.cwd = _path.default.resolve(options.cwd);
|
||||
options.stopDir = options.stopDir ? _path.default.resolve(options.stopDir) : _path.default.parse(options.cwd).root;
|
||||
|
||||
if (!options.files || options.files.length === 0) {
|
||||
throw new Error('[joycon] files must be an non-empty array!');
|
||||
}
|
||||
|
||||
options.__normalized__ = true;
|
||||
return options;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.default = JoyCon;
|
||||
module.exports = JoyCon;
|
||||
module.exports.default = JoyCon;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user