diff --git a/node_modules/.bin/pino b/node_modules/.bin/pino new file mode 100644 index 0000000..bc1666b --- /dev/null +++ b/node_modules/.bin/pino @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../pino/bin.js" "$@" +else + exec node "$basedir/../pino/bin.js" "$@" +fi diff --git a/node_modules/.bin/pino-pretty b/node_modules/.bin/pino-pretty new file mode 100644 index 0000000..24c3df2 --- /dev/null +++ b/node_modules/.bin/pino-pretty @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../pino-pretty/bin.js" "$@" +else + exec node "$basedir/../pino-pretty/bin.js" "$@" +fi diff --git a/node_modules/.bin/pino-pretty.cmd b/node_modules/.bin/pino-pretty.cmd new file mode 100644 index 0000000..dae2150 --- /dev/null +++ b/node_modules/.bin/pino-pretty.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\pino-pretty\bin.js" %* diff --git a/node_modules/.bin/pino-pretty.ps1 b/node_modules/.bin/pino-pretty.ps1 new file mode 100644 index 0000000..905a97b --- /dev/null +++ b/node_modules/.bin/pino-pretty.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../pino-pretty/bin.js" $args + } else { + & "$basedir/node$exe" "$basedir/../pino-pretty/bin.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../pino-pretty/bin.js" $args + } else { + & "node$exe" "$basedir/../pino-pretty/bin.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/pino.cmd b/node_modules/.bin/pino.cmd new file mode 100644 index 0000000..5bd8981 --- /dev/null +++ b/node_modules/.bin/pino.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\pino\bin.js" %* diff --git a/node_modules/.bin/pino.ps1 b/node_modules/.bin/pino.ps1 new file mode 100644 index 0000000..72fc9d9 --- /dev/null +++ b/node_modules/.bin/pino.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../pino/bin.js" $args + } else { + & "$basedir/node$exe" "$basedir/../pino/bin.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../pino/bin.js" $args + } else { + & "node$exe" "$basedir/../pino/bin.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..1634ad7 --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,281 @@ +{ + "name": "polymech-astro", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@pinojs/redact": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@pinojs/redact/-/redact-0.4.0.tgz", + "integrity": "sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==", + "license": "MIT" + }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "license": "MIT" + }, + "node_modules/dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/fast-copy": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-4.0.2.tgz", + "integrity": "sha512-ybA6PDXIXOXivLJK/z9e+Otk7ve13I4ckBvGO5I2RRmBU1gMHLVDJYEuJYhGwez7YNlYji2M2DvVU+a9mSFDlw==", + "license": "MIT" + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "license": "MIT" + }, + "node_modules/help-me": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz", + "integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==", + "license": "MIT" + }, + "node_modules/joycon": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/pino": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-10.1.0.tgz", + "integrity": "sha512-0zZC2ygfdqvqK8zJIr1e+wT1T/L+LF6qvqvbzEQ6tiMAoTqEVK9a1K3YRu8HEUvGEvNqZyPJTtb2sNIoTkB83w==", + "license": "MIT", + "dependencies": { + "@pinojs/redact": "^0.4.0", + "atomic-sleep": "^1.0.0", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^5.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^4.0.1", + "thread-stream": "^3.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz", + "integrity": "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-pretty": { + "version": "13.1.3", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.3.tgz", + "integrity": "sha512-ttXRkkOz6WWC95KeY9+xxWL6AtImwbyMHrL1mSwqwW9u+vLp/WIElvHvCSDg0xO/Dzrggz1zv3rN5ovTRVowKg==", + "license": "MIT", + "dependencies": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^4.0.0", + "fast-safe-stringify": "^2.1.1", + "help-me": "^5.0.0", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^3.0.0", + "pump": "^3.0.0", + "secure-json-parse": "^4.0.0", + "sonic-boom": "^4.0.1", + "strip-json-comments": "^5.0.2" + }, + "bin": { + "pino-pretty": "bin.js" + } + }, + "node_modules/pino-pretty/node_modules/pino-abstract-transport": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz", + "integrity": "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-std-serializers": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz", + "integrity": "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==", + "license": "MIT" + }, + "node_modules/process-warning": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz", + "integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", + "license": "MIT" + }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "license": "MIT", + "engines": { + "node": ">= 12.13.0" + } + }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/secure-json-parse": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.1.0.tgz", + "integrity": "sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/sonic-boom": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", + "integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/strip-json-comments": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz", + "integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/thread-stream": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz", + "integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==", + "license": "MIT", + "dependencies": { + "real-require": "^0.2.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + } + } +} diff --git a/node_modules/@pinojs/redact/.github/dependabot.yml b/node_modules/@pinojs/redact/.github/dependabot.yml new file mode 100644 index 0000000..35d66ca --- /dev/null +++ b/node_modules/@pinojs/redact/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 diff --git a/node_modules/@pinojs/redact/.github/workflows/ci.yml b/node_modules/@pinojs/redact/.github/workflows/ci.yml new file mode 100644 index 0000000..29edd64 --- /dev/null +++ b/node_modules/@pinojs/redact/.github/workflows/ci.yml @@ -0,0 +1,48 @@ +name: CI + +on: + push: + branches: + - main + - 'v*' + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +# This allows a subsequently queued workflow run to interrupt previous runs +concurrency: + group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" + cancel-in-progress: true + +jobs: + test: + name: ${{ matrix.node-version }} ${{ matrix.os }} + runs-on: ${{ matrix.os }} + permissions: + contents: read + strategy: + fail-fast: false + matrix: + os: [macOS-latest, windows-latest, ubuntu-latest] + node-version: [18, 20, 22, 24] + + steps: + - name: Check out repo + uses: actions/checkout@v5.0.0 + with: + persist-credentials: false + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v5 + with: + node-version: ${{ matrix.node-version }} + + - name: Install dependencies + run: npm i --ignore-scripts + + - name: Run tests + run: npm run test diff --git a/node_modules/@pinojs/redact/.github/workflows/publish-release.yml b/node_modules/@pinojs/redact/.github/workflows/publish-release.yml new file mode 100644 index 0000000..0defa19 --- /dev/null +++ b/node_modules/@pinojs/redact/.github/workflows/publish-release.yml @@ -0,0 +1,43 @@ +name: Publish release + +on: + workflow_dispatch: + inputs: + version: + description: 'The version number to tag and release' + required: true + type: string + prerelease: + description: 'Release as pre-release' + required: false + type: boolean + default: false + +jobs: + release-npm: + runs-on: ubuntu-latest + environment: main + permissions: + contents: write + id-token: write + steps: + - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4 + - uses: actions/setup-node@v5 + with: + node-version: '22' + registry-url: 'https://registry.npmjs.org' + - run: npm install npm -g + - run: npm install + - name: Change version number and sync + run: | + node scripts/sync-version.mjs ${{ inputs.version }} + - name: GIT commit and push all changed files + run: | + git config --global user.name "mcollina" + git config --global user.email "hello@matteocollina.com" + git commit -n -a -m "Bumped v${{ inputs.version }}" + git push origin HEAD:${{ github.ref }} + - run: npm publish --access public --tag ${{ inputs.prerelease == true && 'next' || 'latest' }} + - name: 'Create release notes' + run: | + npx @matteo.collina/release-notes -a ${{ secrets.GITHUB_TOKEN }} -t v${{ inputs.version }} -r redact -o pinojs ${{ github.event.inputs.prerelease == 'true' && '-p' || '' }} -c ${{ github.ref }} diff --git a/node_modules/@pinojs/redact/LICENSE b/node_modules/@pinojs/redact/LICENSE new file mode 100644 index 0000000..135f89b --- /dev/null +++ b/node_modules/@pinojs/redact/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 pinojs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@pinojs/redact/README.md b/node_modules/@pinojs/redact/README.md new file mode 100644 index 0000000..5067010 --- /dev/null +++ b/node_modules/@pinojs/redact/README.md @@ -0,0 +1,350 @@ +# @pinojs/redact + +> Smart object redaction for JavaScript applications - safe AND fast! + +Redact JS objects with the same API as [fast-redact](https://github.com/davidmarkclements/fast-redact), but uses innovative **selective cloning** instead of mutating the original. This provides immutability guarantees with **performance competitive** to fast-redact for real-world usage patterns. + +## Install + +```bash +npm install @pinojs/redact +``` + +## Usage + +```js +const slowRedact = require('@pinojs/redact') + +const redact = slowRedact({ + paths: ['headers.cookie', 'headers.authorization', 'user.password'] +}) + +const obj = { + headers: { + cookie: 'secret-session-token', + authorization: 'Bearer abc123', + 'x-forwarded-for': '192.168.1.1' + }, + user: { + name: 'john', + password: 'secret123' + } +} + +console.log(redact(obj)) +// Output: {"headers":{"cookie":"[REDACTED]","authorization":"[REDACTED]","x-forwarded-for":"192.168.1.1"},"user":{"name":"john","password":"[REDACTED]"}} + +// Original object is completely unchanged: +console.log(obj.headers.cookie) // 'secret-session-token' +``` + +## API + +### slowRedact(options) → Function + +Creates a redaction function with the specified options. + +#### Options + +- **paths** `string[]` (required): An array of strings describing the nested location of a key in an object +- **censor** `any` (optional, default: `'[REDACTED]'`): The value to replace sensitive data with. Can be a static value or function. +- **serialize** `Function|boolean` (optional, default: `JSON.stringify`): Serialization function. Set to `false` to return the redacted object. +- **remove** `boolean` (optional, default: `false`): Remove redacted keys from serialized output +- **strict** `boolean` (optional, default: `true`): Throw on non-object values or pass through primitives + +#### Path Syntax + +Supports the same path syntax as fast-redact: + +- **Dot notation**: `'user.name'`, `'headers.cookie'` +- **Bracket notation**: `'user["password"]'`, `'headers["X-Forwarded-For"]'` +- **Array indices**: `'users[0].password'`, `'items[1].secret'` +- **Wildcards**: + - Terminal: `'users.*.password'` (redacts password for all users) + - Intermediate: `'*.password'` (redacts password at any level) + - Array wildcard: `'items.*'` (redacts all array elements) + +#### Examples + +**Custom censor value:** +```js +const redact = slowRedact({ + paths: ['password'], + censor: '***HIDDEN***' +}) +``` + +**Dynamic censor function:** +```js +const redact = slowRedact({ + paths: ['password'], + censor: (value, path) => `REDACTED:${path}` +}) +``` + +**Return object instead of JSON string:** +```js +const redact = slowRedact({ + paths: ['secret'], + serialize: false +}) + +const result = redact({ secret: 'hidden', public: 'data' }) +console.log(result.secret) // '[REDACTED]' +console.log(result.public) // 'data' + +// Restore original values +const restored = result.restore() +console.log(restored.secret) // 'hidden' +``` + +**Custom serialization:** +```js +const redact = slowRedact({ + paths: ['password'], + serialize: obj => JSON.stringify(obj, null, 2) +}) +``` + +**Remove keys instead of redacting:** +```js +const redact = slowRedact({ + paths: ['password', 'user.secret'], + remove: true +}) + +const obj = { username: 'john', password: 'secret123', user: { name: 'Jane', secret: 'hidden' } } +console.log(redact(obj)) +// Output: {"username":"john","user":{"name":"Jane"}} +// Note: 'password' and 'user.secret' are completely absent, not redacted +``` + +**Wildcard patterns:** +```js +// Redact all properties in secrets object +const redact1 = slowRedact({ paths: ['secrets.*'] }) + +// Redact password for any user +const redact2 = slowRedact({ paths: ['users.*.password'] }) + +// Redact all items in an array +const redact3 = slowRedact({ paths: ['items.*'] }) + +// Remove all secrets instead of redacting them +const redact4 = slowRedact({ paths: ['secrets.*'], remove: true }) +``` + +## Key Differences from fast-redact + +### Safety First +- **No mutation**: Original objects are never modified +- **Selective cloning**: Only clones paths that need redaction, shares references for everything else +- **Restore capability**: Can restore original values when `serialize: false` + +### Feature Compatibility +- **Remove option**: Full compatibility with fast-redact's `remove: true` option to completely omit keys from output +- **All path patterns**: Supports same syntax including wildcards, bracket notation, and array indices +- **Censor functions**: Dynamic censoring with path information passed as arrays +- **Serialization**: Custom serializers and `serialize: false` mode + +### Smart Performance Approach +- **Selective cloning**: Analyzes redaction paths and only clones necessary object branches +- **Reference sharing**: Non-redacted properties maintain original object references +- **Memory efficiency**: Dramatically reduced memory usage for large objects with minimal redaction +- **Setup-time optimization**: Path analysis happens once during setup, not per redaction + +### When to Use @pinojs/redact +- When immutability is critical +- When you need to preserve original objects +- When objects are shared across multiple contexts +- In functional programming environments +- When debugging and you need to compare before/after +- **Large objects with selective redaction** (now performance-competitive!) +- When memory efficiency with reference sharing is important + +### When to Use fast-redact +- When absolute maximum performance is critical +- In extremely high-throughput scenarios (>100,000 ops/sec) +- When you control the object lifecycle and mutation is acceptable +- Very small objects where setup overhead matters + +## Performance Benchmarks + +@pinojs/redact uses **selective cloning** that provides good performance while maintaining immutability guarantees: + +### Performance Results + +| Operation Type | @pinojs/redact | fast-redact | Performance Ratio | +|---------------|-------------|-------------|-------------------| +| **Small objects** | ~690ns | ~200ns | ~3.5x slower | +| **Large objects (minimal redaction)** | **~18μs** | ~17μs | **~same performance** | +| **Large objects (wildcards)** | **~48μs** | ~37μs | **~1.3x slower** | +| **No redaction (large objects)** | **~18μs** | ~17μs | **~same performance** | + +### Performance Improvements + +@pinojs/redact is performance-competitive with fast-redact for large objects. + +1. **Selective cloning approach**: Only clones object paths that need redaction +2. **Reference sharing**: Non-redacted properties share original object references +3. **Setup-time optimization**: Path analysis happens once, not per redaction +4. **Memory efficiency**: Dramatically reduced memory usage for typical use cases + +### Benchmark Details + +**Small Objects (~180 bytes)**: +- @pinojs/redact: **690ns** per operation +- fast-redact: **200ns** per operation +- **Slight setup overhead for small objects** + +**Large Objects (~18KB, minimal redaction)**: +- @pinojs/redact: **18μs** per operation +- fast-redact: **17μs** per operation +- Near-identical performance + +**Large Objects (~18KB, wildcard patterns)**: +- @pinojs/redact: **48μs** per operation +- fast-redact: **37μs** per operation +- Competitive performance for complex patterns + +**Memory Considerations**: +- @pinojs/redact: **Selective reference sharing** (much lower memory usage than before) +- fast-redact: Mutates in-place (lowest memory usage) +- Large objects with few redacted paths now share most references + +### When Performance Matters + +Choose **fast-redact** when: +- Absolute maximum performance is critical (>100,000 ops/sec) +- Working with very small objects frequently +- Mutation is acceptable and controlled +- Every microsecond counts + +Choose **@pinojs/redact** when: +- Immutability is required (with competitive performance) +- Objects are shared across contexts +- Large objects with selective redaction +- Memory efficiency through reference sharing is important +- Safety and functionality are priorities +- Most production applications (performance gap is minimal) + +Run benchmarks yourself: +```bash +npm run bench +``` + +## How Selective Cloning Works + +@pinojs/redact uses an innovative **selective cloning** approach that provides immutability guarantees while dramatically improving performance: + +### Traditional Approach (before optimization) +```js +// Old approach: Deep clone entire object, then redact +const fullClone = deepClone(originalObject) // Clone everything +redact(fullClone, paths) // Then redact specific paths +``` + +### Selective Cloning Approach (current) +```js +// New approach: Analyze paths, clone only what's needed +const pathStructure = buildPathStructure(paths) // One-time setup +const selectiveClone = cloneOnlyNeededPaths(obj, pathStructure) // Smart cloning +redact(selectiveClone, paths) // Redact pre-identified paths +``` + +### Key Innovations + +1. **Path Analysis**: Pre-processes redaction paths into an efficient tree structure +2. **Selective Cloning**: Only creates new objects for branches that contain redaction targets +3. **Reference Sharing**: Non-redacted properties maintain exact same object references +4. **Setup Optimization**: Path parsing happens once during redactor creation, not per redaction + +### Example: Reference Sharing in Action + +```js +const largeConfig = { + database: { /* large config object */ }, + api: { /* another large config */ }, + secrets: { password: 'hidden', apiKey: 'secret' } +} + +const redact = slowRedact({ paths: ['secrets.password'] }) +const result = redact(largeConfig) + +// Only secrets object is cloned, database and api share original references +console.log(result.database === largeConfig.database) // true - shared reference! +console.log(result.api === largeConfig.api) // true - shared reference! +console.log(result.secrets === largeConfig.secrets) // false - cloned for redaction +``` + +This approach provides **immutability where it matters** while **sharing references where it's safe**. + +## Remove Option + +The `remove: true` option provides full compatibility with fast-redact's key removal functionality: + +```js +const redact = slowRedact({ + paths: ['password', 'secrets.*', 'users.*.credentials'], + remove: true +}) + +const data = { + username: 'john', + password: 'secret123', + secrets: { apiKey: 'abc', token: 'xyz' }, + users: [ + { name: 'Alice', credentials: { password: 'pass1' } }, + { name: 'Bob', credentials: { password: 'pass2' } } + ] +} + +console.log(redact(data)) +// Output: {"username":"john","secrets":{},"users":[{"name":"Alice"},{"name":"Bob"}]} +``` + +### Remove vs Redact Behavior + +| Option | Behavior | Output Example | +|--------|----------|----------------| +| Default (redact) | Replaces values with censor | `{"password":"[REDACTED]"}` | +| `remove: true` | Completely omits keys | `{}` | + +### Compatibility Notes + +- **Same output as fast-redact**: Identical JSON output when using `remove: true` +- **Wildcard support**: Works with all wildcard patterns (`*`, `users.*`, `items.*.secret`) +- **Array handling**: Array items are set to `undefined` (omitted in JSON output) +- **Nested paths**: Supports deep removal (`users.*.credentials.password`) +- **Serialize compatibility**: Only works with `JSON.stringify` serializer (like fast-redact) + +## Testing + +```bash +# Run unit tests +npm test + +# Run integration tests comparing with fast-redact +npm run test:integration + +# Run all tests (unit + integration) +npm run test:all + +# Run benchmarks +npm run bench +``` + +### Test Coverage + +- **16 unit tests**: Core functionality and edge cases +- **16 integration tests**: Output compatibility with fast-redact +- **All major features**: Paths, wildcards, serialization, custom censors +- **Performance benchmarks**: Direct comparison with fast-redact + +## License + +MIT + +## Contributing + +Pull requests welcome! Please ensure all tests pass and add tests for new features. \ No newline at end of file diff --git a/node_modules/@pinojs/redact/benchmarks/basic.js b/node_modules/@pinojs/redact/benchmarks/basic.js new file mode 100644 index 0000000..b6a40e0 --- /dev/null +++ b/node_modules/@pinojs/redact/benchmarks/basic.js @@ -0,0 +1,184 @@ +const { bench, group, run } = require('mitata') +const slowRedact = require('../index.js') +const fastRedact = require('fast-redact') + +// Test objects +const smallObj = { + user: { name: 'john', password: 'secret123' }, + headers: { cookie: 'session-token', authorization: 'Bearer abc123' } +} + +const largeObj = { + users: [], + metadata: { + version: '1.0.0', + secret: 'app-secret-key', + database: { + host: 'localhost', + password: 'db-password' + } + } +} + +// Populate users array with for loop instead of Array.from +for (let i = 0; i < 100; i++) { + largeObj.users.push({ + id: i, + name: `user${i}`, + email: `user${i}@example.com`, + password: `secret${i}`, + profile: { + age: 20 + (i % 50), + preferences: { + theme: 'dark', + notifications: true, + apiKey: `key-${i}-secret` + } + } + }) +} + +// Redaction configurations +const basicSlowRedact = slowRedact({ + paths: ['user.password', 'headers.cookie'] +}) + +const basicFastRedact = fastRedact({ + paths: ['user.password', 'headers.cookie'] +}) + +const wildcardSlowRedact = slowRedact({ + paths: ['users.*.password', 'users.*.profile.preferences.apiKey'] +}) + +const wildcardFastRedact = fastRedact({ + paths: ['users.*.password', 'users.*.profile.preferences.apiKey'] +}) + +const deepSlowRedact = slowRedact({ + paths: ['metadata.secret', 'metadata.database.password'] +}) + +const deepFastRedact = fastRedact({ + paths: ['metadata.secret', 'metadata.database.password'] +}) + +group('Small Object Redaction - @pinojs/redact', () => { + bench('basic paths', () => { + basicSlowRedact(smallObj) + }) + + bench('serialize: false', () => { + const redact = slowRedact({ + paths: ['user.password'], + serialize: false + }) + redact(smallObj) + }) + + bench('custom censor function', () => { + const redact = slowRedact({ + paths: ['user.password'], + censor: (value, path) => `HIDDEN:${path}` + }) + redact(smallObj) + }) +}) + +group('Small Object Redaction - fast-redact', () => { + bench('basic paths', () => { + basicFastRedact(smallObj) + }) + + bench('serialize: false', () => { + const redact = fastRedact({ + paths: ['user.password'], + serialize: false + }) + redact(smallObj) + }) + + bench('custom censor function', () => { + const redact = fastRedact({ + paths: ['user.password'], + censor: (value, path) => `HIDDEN:${path}` + }) + redact(smallObj) + }) +}) + +group('Large Object Redaction - @pinojs/redact', () => { + bench('wildcard patterns', () => { + wildcardSlowRedact(largeObj) + }) + + bench('deep nested paths', () => { + deepSlowRedact(largeObj) + }) + + bench('multiple wildcards', () => { + const redact = slowRedact({ + paths: ['users.*.password', 'users.*.profile.preferences.*'] + }) + redact(largeObj) + }) +}) + +group('Large Object Redaction - fast-redact', () => { + bench('wildcard patterns', () => { + wildcardFastRedact(largeObj) + }) + + bench('deep nested paths', () => { + deepFastRedact(largeObj) + }) + + bench('multiple wildcards', () => { + const redact = fastRedact({ + paths: ['users.*.password', 'users.*.profile.preferences.*'] + }) + redact(largeObj) + }) +}) + +group('Direct Performance Comparison', () => { + bench('@pinojs/redact - basic paths', () => { + basicSlowRedact(smallObj) + }) + + bench('fast-redact - basic paths', () => { + basicFastRedact(smallObj) + }) + + bench('@pinojs/redact - wildcards', () => { + wildcardSlowRedact(largeObj) + }) + + bench('fast-redact - wildcards', () => { + wildcardFastRedact(largeObj) + }) +}) + +group('Object Cloning Overhead', () => { + bench('@pinojs/redact - no redaction (clone only)', () => { + const redact = slowRedact({ paths: [] }) + redact(smallObj) + }) + + bench('fast-redact - no redaction', () => { + const redact = fastRedact({ paths: [] }) + redact(smallObj) + }) + + bench('@pinojs/redact - large object clone', () => { + const redact = slowRedact({ paths: [] }) + redact(largeObj) + }) + + bench('fast-redact - large object', () => { + const redact = fastRedact({ paths: [] }) + redact(largeObj) + }) +}) + +run() diff --git a/node_modules/@pinojs/redact/eslint.config.js b/node_modules/@pinojs/redact/eslint.config.js new file mode 100644 index 0000000..6ccece6 --- /dev/null +++ b/node_modules/@pinojs/redact/eslint.config.js @@ -0,0 +1 @@ +module.exports = require('neostandard')() diff --git a/node_modules/@pinojs/redact/index.d.ts b/node_modules/@pinojs/redact/index.d.ts new file mode 100644 index 0000000..927d16a --- /dev/null +++ b/node_modules/@pinojs/redact/index.d.ts @@ -0,0 +1,52 @@ +export = F; + +/** + * When called without any options, or with a zero length paths array, @pinojs/redact will return JSON.stringify or the serialize option, if set. + * @param redactOptions + * @param redactOptions.paths An array of strings describing the nested location of a key in an object. + * @param redactOptions.censor This is the value which overwrites redacted properties. + * @param redactOptions.remove The remove option, when set to true will cause keys to be removed from the serialized output. + * @param redactOptions.serialize The serialize option may either be a function or a boolean. If a function is supplied, this will be used to serialize the redacted object. + * @param redactOptions.strict The strict option, when set to true, will cause the redactor function to throw if instead of an object it finds a primitive. + * @returns Redacted value from input + */ +declare function F( + redactOptions: F.RedactOptionsNoSerialize +): F.redactFnNoSerialize; +declare function F(redactOptions?: F.RedactOptions): F.redactFn; + +declare namespace F { + /** Redacts input */ + type redactFn = (input: T) => string | T; + + /** Redacts input without serialization */ + type redactFnNoSerialize = redactFn & { + /** Method that allowing the redacted keys to be restored with the original data. Supplied only when serialize option set to false. */ + restore(input: T): T; + }; + + interface RedactOptions { + /** An array of strings describing the nested location of a key in an object. */ + paths?: string[] | undefined; + + /** This is the value which overwrites redacted properties. */ + censor?: string | ((v: any) => any) | undefined; + + /** The remove option, when set to true will cause keys to be removed from the serialized output. */ + remove?: boolean | undefined; + + /** + * The serialize option may either be a function or a boolean. If a function is supplied, this will be used to serialize the redacted object. + * The default serialize is the function JSON.stringify + */ + serialize?: boolean | ((v: any) => any) | undefined; + + /** The strict option, when set to true, will cause the redactor function to throw if instead of an object it finds a primitive. */ + strict?: boolean | undefined; + } + + /** RedactOptions without serialization. Instead of the serialized object, the output of the redactor function will be the mutated object itself. */ + interface RedactOptionsNoSerialize extends RedactOptions { + serialize: false; + } +} diff --git a/node_modules/@pinojs/redact/index.js b/node_modules/@pinojs/redact/index.js new file mode 100644 index 0000000..c2f5f95 --- /dev/null +++ b/node_modules/@pinojs/redact/index.js @@ -0,0 +1,529 @@ +'use strict' + +function deepClone (obj) { + if (obj === null || typeof obj !== 'object') { + return obj + } + + if (obj instanceof Date) { + return new Date(obj.getTime()) + } + + if (obj instanceof Array) { + const cloned = [] + for (let i = 0; i < obj.length; i++) { + cloned[i] = deepClone(obj[i]) + } + return cloned + } + + if (typeof obj === 'object') { + const cloned = Object.create(Object.getPrototypeOf(obj)) + for (const key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + cloned[key] = deepClone(obj[key]) + } + } + return cloned + } + + return obj +} + +function parsePath (path) { + const parts = [] + let current = '' + let inBrackets = false + let inQuotes = false + let quoteChar = '' + + for (let i = 0; i < path.length; i++) { + const char = path[i] + + if (!inBrackets && char === '.') { + if (current) { + parts.push(current) + current = '' + } + } else if (char === '[') { + if (current) { + parts.push(current) + current = '' + } + inBrackets = true + } else if (char === ']' && inBrackets) { + // Always push the current value when closing brackets, even if it's an empty string + parts.push(current) + current = '' + inBrackets = false + inQuotes = false + } else if ((char === '"' || char === "'") && inBrackets) { + if (!inQuotes) { + inQuotes = true + quoteChar = char + } else if (char === quoteChar) { + inQuotes = false + quoteChar = '' + } else { + current += char + } + } else { + current += char + } + } + + if (current) { + parts.push(current) + } + + return parts +} + +function setValue (obj, parts, value) { + let current = obj + + for (let i = 0; i < parts.length - 1; i++) { + const key = parts[i] + // Type safety: Check if current is an object before using 'in' operator + if (typeof current !== 'object' || current === null || !(key in current)) { + return false // Path doesn't exist, don't create it + } + if (typeof current[key] !== 'object' || current[key] === null) { + return false // Path doesn't exist properly + } + current = current[key] + } + + const lastKey = parts[parts.length - 1] + if (lastKey === '*') { + if (Array.isArray(current)) { + for (let i = 0; i < current.length; i++) { + current[i] = value + } + } else if (typeof current === 'object' && current !== null) { + for (const key in current) { + if (Object.prototype.hasOwnProperty.call(current, key)) { + current[key] = value + } + } + } + } else { + // Type safety: Check if current is an object before using 'in' operator + if (typeof current === 'object' && current !== null && lastKey in current && Object.prototype.hasOwnProperty.call(current, lastKey)) { + current[lastKey] = value + } + } + return true +} + +function removeKey (obj, parts) { + let current = obj + + for (let i = 0; i < parts.length - 1; i++) { + const key = parts[i] + // Type safety: Check if current is an object before using 'in' operator + if (typeof current !== 'object' || current === null || !(key in current)) { + return false // Path doesn't exist, don't create it + } + if (typeof current[key] !== 'object' || current[key] === null) { + return false // Path doesn't exist properly + } + current = current[key] + } + + const lastKey = parts[parts.length - 1] + if (lastKey === '*') { + if (Array.isArray(current)) { + // For arrays, we can't really "remove" all items as that would change indices + // Instead, we set them to undefined which will be omitted by JSON.stringify + for (let i = 0; i < current.length; i++) { + current[i] = undefined + } + } else if (typeof current === 'object' && current !== null) { + for (const key in current) { + if (Object.prototype.hasOwnProperty.call(current, key)) { + delete current[key] + } + } + } + } else { + // Type safety: Check if current is an object before using 'in' operator + if (typeof current === 'object' && current !== null && lastKey in current && Object.prototype.hasOwnProperty.call(current, lastKey)) { + delete current[lastKey] + } + } + return true +} + +// Sentinel object to distinguish between undefined value and non-existent path +const PATH_NOT_FOUND = Symbol('PATH_NOT_FOUND') + +function getValueIfExists (obj, parts) { + let current = obj + + for (const part of parts) { + if (current === null || current === undefined) { + return PATH_NOT_FOUND + } + // Type safety: Check if current is an object before property access + if (typeof current !== 'object' || current === null) { + return PATH_NOT_FOUND + } + // Check if the property exists before accessing it + if (!(part in current)) { + return PATH_NOT_FOUND + } + current = current[part] + } + + return current +} + +function getValue (obj, parts) { + let current = obj + + for (const part of parts) { + if (current === null || current === undefined) { + return undefined + } + // Type safety: Check if current is an object before property access + if (typeof current !== 'object' || current === null) { + return undefined + } + current = current[part] + } + + return current +} + +function redactPaths (obj, paths, censor, remove = false) { + for (const path of paths) { + const parts = parsePath(path) + + if (parts.includes('*')) { + redactWildcardPath(obj, parts, censor, path, remove) + } else { + if (remove) { + removeKey(obj, parts) + } else { + // Get value only if path exists - single traversal + const value = getValueIfExists(obj, parts) + if (value === PATH_NOT_FOUND) { + continue + } + + const actualCensor = typeof censor === 'function' + ? censor(value, parts) + : censor + setValue(obj, parts, actualCensor) + } + } + } +} + +function redactWildcardPath (obj, parts, censor, originalPath, remove = false) { + const wildcardIndex = parts.indexOf('*') + + if (wildcardIndex === parts.length - 1) { + const parentParts = parts.slice(0, -1) + let current = obj + + for (const part of parentParts) { + if (current === null || current === undefined) return + // Type safety: Check if current is an object before property access + if (typeof current !== 'object' || current === null) return + current = current[part] + } + + if (Array.isArray(current)) { + if (remove) { + // For arrays, set all items to undefined which will be omitted by JSON.stringify + for (let i = 0; i < current.length; i++) { + current[i] = undefined + } + } else { + for (let i = 0; i < current.length; i++) { + const indexPath = [...parentParts, i.toString()] + const actualCensor = typeof censor === 'function' + ? censor(current[i], indexPath) + : censor + current[i] = actualCensor + } + } + } else if (typeof current === 'object' && current !== null) { + if (remove) { + // Collect keys to delete to avoid issues with deleting during iteration + const keysToDelete = [] + for (const key in current) { + if (Object.prototype.hasOwnProperty.call(current, key)) { + keysToDelete.push(key) + } + } + for (const key of keysToDelete) { + delete current[key] + } + } else { + for (const key in current) { + const keyPath = [...parentParts, key] + const actualCensor = typeof censor === 'function' + ? censor(current[key], keyPath) + : censor + current[key] = actualCensor + } + } + } + } else { + redactIntermediateWildcard(obj, parts, censor, wildcardIndex, originalPath, remove) + } +} + +function redactIntermediateWildcard (obj, parts, censor, wildcardIndex, originalPath, remove = false) { + const beforeWildcard = parts.slice(0, wildcardIndex) + const afterWildcard = parts.slice(wildcardIndex + 1) + const pathArray = [] // Cached array to avoid allocations + + function traverse (current, pathLength) { + if (pathLength === beforeWildcard.length) { + if (Array.isArray(current)) { + for (let i = 0; i < current.length; i++) { + pathArray[pathLength] = i.toString() + traverse(current[i], pathLength + 1) + } + } else if (typeof current === 'object' && current !== null) { + for (const key in current) { + pathArray[pathLength] = key + traverse(current[key], pathLength + 1) + } + } + } else if (pathLength < beforeWildcard.length) { + const nextKey = beforeWildcard[pathLength] + // Type safety: Check if current is an object before using 'in' operator + if (current && typeof current === 'object' && current !== null && nextKey in current) { + pathArray[pathLength] = nextKey + traverse(current[nextKey], pathLength + 1) + } + } else { + // Check if afterWildcard contains more wildcards + if (afterWildcard.includes('*')) { + // Recursively handle remaining wildcards + // Wrap censor to prepend current path context + const wrappedCensor = typeof censor === 'function' + ? (value, path) => { + const fullPath = [...pathArray.slice(0, pathLength), ...path] + return censor(value, fullPath) + } + : censor + redactWildcardPath(current, afterWildcard, wrappedCensor, originalPath, remove) + } else { + // No more wildcards, apply the redaction directly + if (remove) { + removeKey(current, afterWildcard) + } else { + const actualCensor = typeof censor === 'function' + ? censor(getValue(current, afterWildcard), [...pathArray.slice(0, pathLength), ...afterWildcard]) + : censor + setValue(current, afterWildcard, actualCensor) + } + } + } + } + + if (beforeWildcard.length === 0) { + traverse(obj, 0) + } else { + let current = obj + for (let i = 0; i < beforeWildcard.length; i++) { + const part = beforeWildcard[i] + if (current === null || current === undefined) return + // Type safety: Check if current is an object before property access + if (typeof current !== 'object' || current === null) return + current = current[part] + pathArray[i] = part + } + if (current !== null && current !== undefined) { + traverse(current, beforeWildcard.length) + } + } +} + +function buildPathStructure (pathsToClone) { + if (pathsToClone.length === 0) { + return null // No paths to redact + } + + // Parse all paths and organize by depth + const pathStructure = new Map() + for (const path of pathsToClone) { + const parts = parsePath(path) + let current = pathStructure + for (let i = 0; i < parts.length; i++) { + const part = parts[i] + if (!current.has(part)) { + current.set(part, new Map()) + } + current = current.get(part) + } + } + return pathStructure +} + +function selectiveClone (obj, pathStructure) { + if (!pathStructure) { + return obj // No paths to redact, return original + } + + function cloneSelectively (source, pathMap, depth = 0) { + if (!pathMap || pathMap.size === 0) { + return source // No more paths to clone, return reference + } + + if (source === null || typeof source !== 'object') { + return source + } + + if (source instanceof Date) { + return new Date(source.getTime()) + } + + if (Array.isArray(source)) { + const cloned = [] + for (let i = 0; i < source.length; i++) { + const indexStr = i.toString() + if (pathMap.has(indexStr) || pathMap.has('*')) { + cloned[i] = cloneSelectively(source[i], pathMap.get(indexStr) || pathMap.get('*')) + } else { + cloned[i] = source[i] // Share reference for non-redacted items + } + } + return cloned + } + + // Handle objects + const cloned = Object.create(Object.getPrototypeOf(source)) + for (const key in source) { + if (Object.prototype.hasOwnProperty.call(source, key)) { + if (pathMap.has(key) || pathMap.has('*')) { + cloned[key] = cloneSelectively(source[key], pathMap.get(key) || pathMap.get('*')) + } else { + cloned[key] = source[key] // Share reference for non-redacted properties + } + } + } + return cloned + } + + return cloneSelectively(obj, pathStructure) +} + +function validatePath (path) { + if (typeof path !== 'string') { + throw new Error('Paths must be (non-empty) strings') + } + + if (path === '') { + throw new Error('Invalid redaction path ()') + } + + // Check for double dots + if (path.includes('..')) { + throw new Error(`Invalid redaction path (${path})`) + } + + // Check for comma-separated paths (invalid syntax) + if (path.includes(',')) { + throw new Error(`Invalid redaction path (${path})`) + } + + // Check for unmatched brackets + let bracketCount = 0 + let inQuotes = false + let quoteChar = '' + + for (let i = 0; i < path.length; i++) { + const char = path[i] + + if ((char === '"' || char === "'") && bracketCount > 0) { + if (!inQuotes) { + inQuotes = true + quoteChar = char + } else if (char === quoteChar) { + inQuotes = false + quoteChar = '' + } + } else if (char === '[' && !inQuotes) { + bracketCount++ + } else if (char === ']' && !inQuotes) { + bracketCount-- + if (bracketCount < 0) { + throw new Error(`Invalid redaction path (${path})`) + } + } + } + + if (bracketCount !== 0) { + throw new Error(`Invalid redaction path (${path})`) + } +} + +function validatePaths (paths) { + if (!Array.isArray(paths)) { + throw new TypeError('paths must be an array') + } + + for (const path of paths) { + validatePath(path) + } +} + +function slowRedact (options = {}) { + const { + paths = [], + censor = '[REDACTED]', + serialize = JSON.stringify, + strict = true, + remove = false + } = options + + // Validate paths upfront to match fast-redact behavior + validatePaths(paths) + + // Build path structure once during setup, not on every call + const pathStructure = buildPathStructure(paths) + + return function redact (obj) { + if (strict && (obj === null || typeof obj !== 'object')) { + if (obj === null || obj === undefined) { + return serialize ? serialize(obj) : obj + } + if (typeof obj !== 'object') { + return serialize ? serialize(obj) : obj + } + } + + // Only clone paths that need redaction + const cloned = selectiveClone(obj, pathStructure) + const original = obj // Keep reference to original for restore + + let actualCensor = censor + if (typeof censor === 'function') { + actualCensor = censor + } + + redactPaths(cloned, paths, actualCensor, remove) + + if (serialize === false) { + cloned.restore = function () { + return deepClone(original) // Full clone only when restore is called + } + return cloned + } + + if (typeof serialize === 'function') { + return serialize(cloned) + } + + return JSON.stringify(cloned) + } +} + +module.exports = slowRedact diff --git a/node_modules/@pinojs/redact/index.test-d.ts b/node_modules/@pinojs/redact/index.test-d.ts new file mode 100644 index 0000000..6a9ab1e --- /dev/null +++ b/node_modules/@pinojs/redact/index.test-d.ts @@ -0,0 +1,22 @@ +import { expectType, expectAssignable } from "tsd"; +import slowRedact from "."; +import type { redactFn, redactFnNoSerialize } from "."; + +// should return redactFn +expectType(slowRedact()); +expectType(slowRedact({ paths: [] })); +expectType(slowRedact({ paths: ["some.path"] })); +expectType(slowRedact({ paths: [], censor: "[REDACTED]" })); +expectType(slowRedact({ paths: [], strict: true })); +expectType(slowRedact({ paths: [], serialize: JSON.stringify })); +expectType(slowRedact({ paths: [], serialize: true })); +expectType(slowRedact({ paths: [], serialize: false })); +expectType(slowRedact({ paths: [], remove: true })); + +// should return string +expectType(slowRedact()("")); + +// should return string or T +expectAssignable( + slowRedact()({ someField: "someValue" }) +); diff --git a/node_modules/@pinojs/redact/package.json b/node_modules/@pinojs/redact/package.json new file mode 100644 index 0000000..f87c0f7 --- /dev/null +++ b/node_modules/@pinojs/redact/package.json @@ -0,0 +1,37 @@ +{ + "name": "@pinojs/redact", + "version": "0.4.0", + "description": "Redact JS objects", + "main": "index.js", + "types": "index.d.ts", + "scripts": { + "test": "node --test && npm run test:types", + "test:integration": "node --test test/integration.test.js", + "test:types": "tsd", + "test:all": "node --test test/*.test.js", + "lint": "eslint .", + "lint:fix": "eslint . --fix", + "bench": "node benchmarks/basic.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/pinojs/redact.git" + }, + "keywords": [ + "redact" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/pinojs/redact/issues" + }, + "homepage": "https://github.com/pinojs/redact#readme", + "devDependencies": { + "eslint": "^9.36.0", + "fast-redact": "^3.5.0", + "mitata": "^1.0.34", + "neostandard": "^0.12.2", + "tsd": "^0.33.0", + "typescript": "^5.9.2" + } +} diff --git a/node_modules/@pinojs/redact/scripts/sync-version.mjs b/node_modules/@pinojs/redact/scripts/sync-version.mjs new file mode 100644 index 0000000..4cafec7 --- /dev/null +++ b/node_modules/@pinojs/redact/scripts/sync-version.mjs @@ -0,0 +1,20 @@ +import fs from 'node:fs' +import path from 'node:path' + +const packageJsonPath = path.resolve(import.meta.dirname, '../package.json') +let { version } = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8')) + +let passedVersion = process.argv[2] + +if (passedVersion) { + passedVersion = passedVersion.trim().replace(/^v/, '') + if (version !== passedVersion) { + console.log(`Syncing version from ${version} to ${passedVersion}`) + version = passedVersion + const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8')) + packageJson.version = version + fs.writeFileSync(path.resolve('./package.json'), JSON.stringify(packageJson, null, 2) + '\n', { encoding: 'utf-8' }) + } +} else { + throw new Error('Version argument is required') +} diff --git a/node_modules/@pinojs/redact/test/actual-redact-comparison.test.js b/node_modules/@pinojs/redact/test/actual-redact-comparison.test.js new file mode 100644 index 0000000..cd3248d --- /dev/null +++ b/node_modules/@pinojs/redact/test/actual-redact-comparison.test.js @@ -0,0 +1,211 @@ +'use strict' + +// Node.js test comparing @pinojs/redact vs fast-redact for multiple wildcard patterns +// This test validates that @pinojs/redact correctly handles 3+ consecutive wildcards +// matching the behavior of fast-redact + +const { test } = require('node:test') +const { strict: assert } = require('node:assert') +const fastRedact = require('fast-redact') +const slowRedact = require('../index.js') + +// Helper function to test redaction and track which values were censored +function testRedactDirect (library, pattern, testData = {}) { + const matches = [] + const redactor = library === '@pinojs/redact' ? slowRedact : fastRedact + + try { + const redact = redactor({ + paths: [pattern], + censor: (value, path) => { + if ( + value !== undefined && + value !== null && + typeof value === 'string' && + value.includes('secret') + ) { + matches.push({ + value, + path: path ? path.join('.') : 'unknown' + }) + } + return '[REDACTED]' + } + }) + + redact(JSON.parse(JSON.stringify(testData))) + + return { + library, + pattern, + matches, + success: true, + testData + } + } catch (error) { + return { + library, + pattern, + matches: [], + success: false, + error: error.message, + testData + } + } +} + +function testSlowRedactDirect (pattern, testData) { + return testRedactDirect('@pinojs/redact', pattern, testData) +} + +function testFastRedactDirect (pattern, testData) { + return testRedactDirect('fast-redact', pattern, testData) +} + +test('@pinojs/redact: *.password (2 levels)', () => { + const result = testSlowRedactDirect('*.password', { + simple: { password: 'secret-2-levels' } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'secret-2-levels') +}) + +test('@pinojs/redact: *.*.password (3 levels)', () => { + const result = testSlowRedactDirect('*.*.password', { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'secret-3-levels') +}) + +test('@pinojs/redact: *.*.*.password (4 levels)', () => { + const result = testSlowRedactDirect('*.*.*.password', { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'secret-4-levels') +}) + +test('@pinojs/redact: *.*.*.*.password (5 levels)', () => { + const result = testSlowRedactDirect('*.*.*.*.password', { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } }, + config: { + user: { auth: { settings: { password: 'secret-5-levels' } } } + } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'secret-5-levels') +}) + +test('@pinojs/redact: *.*.*.*.*.password (6 levels)', () => { + const result = testSlowRedactDirect('*.*.*.*.*.password', { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } }, + config: { + user: { auth: { settings: { password: 'secret-5-levels' } } } + }, + data: { + reqConfig: { + data: { + credentials: { + settings: { + password: 'real-secret-6-levels' + } + } + } + } + } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'real-secret-6-levels') +}) + +test('fast-redact: *.password (2 levels)', () => { + const result = testFastRedactDirect('*.password', { + simple: { password: 'secret-2-levels' } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'secret-2-levels') +}) + +test('fast-redact: *.*.password (3 levels)', () => { + const result = testFastRedactDirect('*.*.password', { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'secret-3-levels') +}) + +test('fast-redact: *.*.*.password (4 levels)', () => { + const result = testFastRedactDirect('*.*.*.password', { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'secret-4-levels') +}) + +test('fast-redact: *.*.*.*.password (5 levels)', () => { + const result = testFastRedactDirect('*.*.*.*.password', { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } }, + config: { + user: { auth: { settings: { password: 'secret-5-levels' } } } + } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'secret-5-levels') +}) + +test('fast-redact: *.*.*.*.*.password (6 levels)', () => { + const result = testFastRedactDirect('*.*.*.*.*.password', { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } }, + config: { + user: { auth: { settings: { password: 'secret-5-levels' } } } + }, + data: { + reqConfig: { + data: { + credentials: { + settings: { + password: 'real-secret-6-levels' + } + } + } + } + } + }) + + assert.strictEqual(result.success, true) + assert.strictEqual(result.matches.length, 1) + assert.strictEqual(result.matches[0].value, 'real-secret-6-levels') +}) diff --git a/node_modules/@pinojs/redact/test/index.test.js b/node_modules/@pinojs/redact/test/index.test.js new file mode 100644 index 0000000..93a8149 --- /dev/null +++ b/node_modules/@pinojs/redact/test/index.test.js @@ -0,0 +1,824 @@ +const { test } = require('node:test') +const { strict: assert } = require('node:assert') +const slowRedact = require('../index.js') + +test('basic path redaction', () => { + const obj = { + headers: { + cookie: 'secret-cookie', + authorization: 'Bearer token' + }, + body: { message: 'hello' } + } + + const redact = slowRedact({ paths: ['headers.cookie'] }) + const result = redact(obj) + + // Original object should remain unchanged + assert.strictEqual(obj.headers.cookie, 'secret-cookie') + + // Result should have redacted path + const parsed = JSON.parse(result) + assert.strictEqual(parsed.headers.cookie, '[REDACTED]') + assert.strictEqual(parsed.headers.authorization, 'Bearer token') + assert.strictEqual(parsed.body.message, 'hello') +}) + +test('multiple paths redaction', () => { + const obj = { + user: { name: 'john', password: 'secret' }, + session: { token: 'abc123' } + } + + const redact = slowRedact({ + paths: ['user.password', 'session.token'] + }) + const result = redact(obj) + + // Original unchanged + assert.strictEqual(obj.user.password, 'secret') + assert.strictEqual(obj.session.token, 'abc123') + + // Result redacted + const parsed = JSON.parse(result) + assert.strictEqual(parsed.user.password, '[REDACTED]') + assert.strictEqual(parsed.session.token, '[REDACTED]') + assert.strictEqual(parsed.user.name, 'john') +}) + +test('custom censor value', () => { + const obj = { secret: 'hidden' } + const redact = slowRedact({ + paths: ['secret'], + censor: '***' + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.secret, '***') +}) + +test('serialize: false returns object with restore method', () => { + const obj = { secret: 'hidden' } + const redact = slowRedact({ + paths: ['secret'], + serialize: false + }) + const result = redact(obj) + + // Should be object, not string + assert.strictEqual(typeof result, 'object') + assert.strictEqual(result.secret, '[REDACTED]') + + // Should have restore method + assert.strictEqual(typeof result.restore, 'function') + + const restored = result.restore() + assert.strictEqual(restored.secret, 'hidden') +}) + +test('bracket notation paths', () => { + const obj = { + 'weird-key': { 'another-weird': 'secret' }, + normal: 'public' + } + + const redact = slowRedact({ + paths: ['["weird-key"]["another-weird"]'] + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed['weird-key']['another-weird'], '[REDACTED]') + assert.strictEqual(parsed.normal, 'public') +}) + +test('array paths', () => { + const obj = { + users: [ + { name: 'john', password: 'secret1' }, + { name: 'jane', password: 'secret2' } + ] + } + + const redact = slowRedact({ + paths: ['users[0].password', 'users[1].password'] + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.users[0].password, '[REDACTED]') + assert.strictEqual(parsed.users[1].password, '[REDACTED]') + assert.strictEqual(parsed.users[0].name, 'john') + assert.strictEqual(parsed.users[1].name, 'jane') +}) + +test('wildcard at end of path', () => { + const obj = { + secrets: { + key1: 'secret1', + key2: 'secret2' + }, + public: 'data' + } + + const redact = slowRedact({ + paths: ['secrets.*'] + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.secrets.key1, '[REDACTED]') + assert.strictEqual(parsed.secrets.key2, '[REDACTED]') + assert.strictEqual(parsed.public, 'data') +}) + +test('wildcard with arrays', () => { + const obj = { + items: ['secret1', 'secret2', 'secret3'] + } + + const redact = slowRedact({ + paths: ['items.*'] + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.items[0], '[REDACTED]') + assert.strictEqual(parsed.items[1], '[REDACTED]') + assert.strictEqual(parsed.items[2], '[REDACTED]') +}) + +test('intermediate wildcard', () => { + const obj = { + users: { + user1: { password: 'secret1' }, + user2: { password: 'secret2' } + } + } + + const redact = slowRedact({ + paths: ['users.*.password'] + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.users.user1.password, '[REDACTED]') + assert.strictEqual(parsed.users.user2.password, '[REDACTED]') +}) + +test('censor function', () => { + const obj = { secret: 'hidden' } + const redact = slowRedact({ + paths: ['secret'], + censor: (value, path) => `REDACTED:${path.join('.')}` + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.secret, 'REDACTED:secret') +}) + +test('custom serialize function', () => { + const obj = { secret: 'hidden', public: 'data' } + const redact = slowRedact({ + paths: ['secret'], + serialize: (obj) => `custom:${JSON.stringify(obj)}` + }) + const result = redact(obj) + + assert(result.startsWith('custom:')) + const parsed = JSON.parse(result.slice(7)) + assert.strictEqual(parsed.secret, '[REDACTED]') + assert.strictEqual(parsed.public, 'data') +}) + +test('nested paths', () => { + const obj = { + level1: { + level2: { + level3: { + secret: 'hidden' + } + } + } + } + + const redact = slowRedact({ + paths: ['level1.level2.level3.secret'] + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.level1.level2.level3.secret, '[REDACTED]') +}) + +test('non-existent paths are ignored', () => { + const obj = { existing: 'value' } + const redact = slowRedact({ + paths: ['nonexistent.path'] + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.existing, 'value') + assert.strictEqual(parsed.nonexistent, undefined) +}) + +test('null and undefined handling', () => { + const obj = { + nullValue: null, + undefinedValue: undefined, + nested: { + nullValue: null + } + } + + const redact = slowRedact({ + paths: ['nullValue', 'nested.nullValue'] + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.nullValue, '[REDACTED]') + assert.strictEqual(parsed.nested.nullValue, '[REDACTED]') +}) + +test('original object remains unchanged', () => { + const original = { + secret: 'hidden', + nested: { secret: 'hidden2' } + } + const copy = JSON.parse(JSON.stringify(original)) + + const redact = slowRedact({ + paths: ['secret', 'nested.secret'] + }) + redact(original) + + // Original should be completely unchanged + assert.deepStrictEqual(original, copy) +}) + +test('strict mode with primitives', () => { + const redact = slowRedact({ + paths: ['test'], + strict: true + }) + + const stringResult = redact('primitive') + assert.strictEqual(stringResult, '"primitive"') + + const numberResult = redact(42) + assert.strictEqual(numberResult, '42') +}) + +// Path validation tests to match fast-redact behavior +test('path validation - non-string paths should throw', () => { + assert.throws(() => { + slowRedact({ paths: [123] }) + }, { + message: 'Paths must be (non-empty) strings' + }) + + assert.throws(() => { + slowRedact({ paths: [null] }) + }, { + message: 'Paths must be (non-empty) strings' + }) + + assert.throws(() => { + slowRedact({ paths: [undefined] }) + }, { + message: 'Paths must be (non-empty) strings' + }) +}) + +test('path validation - empty string should throw', () => { + assert.throws(() => { + slowRedact({ paths: [''] }) + }, { + message: 'Invalid redaction path ()' + }) +}) + +test('path validation - double dots should throw', () => { + assert.throws(() => { + slowRedact({ paths: ['invalid..path'] }) + }, { + message: 'Invalid redaction path (invalid..path)' + }) + + assert.throws(() => { + slowRedact({ paths: ['a..b..c'] }) + }, { + message: 'Invalid redaction path (a..b..c)' + }) +}) + +test('path validation - unmatched brackets should throw', () => { + assert.throws(() => { + slowRedact({ paths: ['invalid[unclosed'] }) + }, { + message: 'Invalid redaction path (invalid[unclosed)' + }) + + assert.throws(() => { + slowRedact({ paths: ['invalid]unopened'] }) + }, { + message: 'Invalid redaction path (invalid]unopened)' + }) + + assert.throws(() => { + slowRedact({ paths: ['nested[a[b]'] }) + }, { + message: 'Invalid redaction path (nested[a[b])' + }) +}) + +test('path validation - comma-separated paths should throw', () => { + assert.throws(() => { + slowRedact({ paths: ['req,headers.cookie'] }) + }, { + message: 'Invalid redaction path (req,headers.cookie)' + }) + + assert.throws(() => { + slowRedact({ paths: ['user,profile,name'] }) + }, { + message: 'Invalid redaction path (user,profile,name)' + }) + + assert.throws(() => { + slowRedact({ paths: ['a,b'] }) + }, { + message: 'Invalid redaction path (a,b)' + }) +}) + +test('path validation - mixed valid and invalid should throw', () => { + assert.throws(() => { + slowRedact({ paths: ['valid.path', 123, 'another.valid'] }) + }, { + message: 'Paths must be (non-empty) strings' + }) + + assert.throws(() => { + slowRedact({ paths: ['valid.path', 'invalid..path'] }) + }, { + message: 'Invalid redaction path (invalid..path)' + }) + + assert.throws(() => { + slowRedact({ paths: ['valid.path', 'req,headers.cookie'] }) + }, { + message: 'Invalid redaction path (req,headers.cookie)' + }) +}) + +test('path validation - valid paths should work', () => { + // These should not throw + assert.doesNotThrow(() => { + slowRedact({ paths: [] }) + }) + + assert.doesNotThrow(() => { + slowRedact({ paths: ['valid.path'] }) + }) + + assert.doesNotThrow(() => { + slowRedact({ paths: ['user.password', 'data[0].secret'] }) + }) + + assert.doesNotThrow(() => { + slowRedact({ paths: ['["quoted-key"].value'] }) + }) + + assert.doesNotThrow(() => { + slowRedact({ paths: ["['single-quoted'].value"] }) + }) + + assert.doesNotThrow(() => { + slowRedact({ paths: ['array[0]', 'object.property', 'wildcard.*'] }) + }) +}) + +// fast-redact compatibility tests +test('censor function receives path as array (fast-redact compatibility)', () => { + const obj = { + headers: { + authorization: 'Bearer token', + 'x-api-key': 'secret-key' + } + } + + const pathsReceived = [] + const redact = slowRedact({ + paths: ['headers.authorization', 'headers["x-api-key"]'], + censor: (value, path) => { + pathsReceived.push(path) + assert(Array.isArray(path), 'Path should be an array') + return '[REDACTED]' + } + }) + + redact(obj) + + // Verify paths are arrays + assert.strictEqual(pathsReceived.length, 2) + assert.deepStrictEqual(pathsReceived[0], ['headers', 'authorization']) + assert.deepStrictEqual(pathsReceived[1], ['headers', 'x-api-key']) +}) + +test('censor function with nested paths receives correct array', () => { + const obj = { + user: { + profile: { + credentials: { + password: 'secret123' + } + } + } + } + + let receivedPath + const redact = slowRedact({ + paths: ['user.profile.credentials.password'], + censor: (value, path) => { + receivedPath = path + assert.strictEqual(value, 'secret123') + assert(Array.isArray(path)) + return '[REDACTED]' + } + }) + + redact(obj) + + assert.deepStrictEqual(receivedPath, ['user', 'profile', 'credentials', 'password']) +}) + +test('censor function with wildcards receives correct array paths', () => { + const obj = { + users: { + user1: { password: 'secret1' }, + user2: { password: 'secret2' } + } + } + + const pathsReceived = [] + const redact = slowRedact({ + paths: ['users.*.password'], + censor: (value, path) => { + pathsReceived.push([...path]) // copy the array + assert(Array.isArray(path)) + return '[REDACTED]' + } + }) + + redact(obj) + + assert.strictEqual(pathsReceived.length, 2) + assert.deepStrictEqual(pathsReceived[0], ['users', 'user1', 'password']) + assert.deepStrictEqual(pathsReceived[1], ['users', 'user2', 'password']) +}) + +test('censor function with array wildcard receives correct array paths', () => { + const obj = { + items: [ + { secret: 'value1' }, + { secret: 'value2' } + ] + } + + const pathsReceived = [] + const redact = slowRedact({ + paths: ['items.*.secret'], + censor: (value, path) => { + pathsReceived.push([...path]) + assert(Array.isArray(path)) + return '[REDACTED]' + } + }) + + redact(obj) + + assert.strictEqual(pathsReceived.length, 2) + assert.deepStrictEqual(pathsReceived[0], ['items', '0', 'secret']) + assert.deepStrictEqual(pathsReceived[1], ['items', '1', 'secret']) +}) + +test('censor function with end wildcard receives correct array paths', () => { + const obj = { + secrets: { + key1: 'secret1', + key2: 'secret2' + } + } + + const pathsReceived = [] + const redact = slowRedact({ + paths: ['secrets.*'], + censor: (value, path) => { + pathsReceived.push([...path]) + assert(Array.isArray(path)) + return '[REDACTED]' + } + }) + + redact(obj) + + assert.strictEqual(pathsReceived.length, 2) + // Sort paths for consistent testing since object iteration order isn't guaranteed + pathsReceived.sort((a, b) => a[1].localeCompare(b[1])) + assert.deepStrictEqual(pathsReceived[0], ['secrets', 'key1']) + assert.deepStrictEqual(pathsReceived[1], ['secrets', 'key2']) +}) + +test('type safety: accessing properties on primitive values should not throw', () => { + // Test case from GitHub issue #5 + const redactor = slowRedact({ paths: ['headers.authorization'] }) + const data = { + headers: 123 // primitive value + } + + assert.doesNotThrow(() => { + const result = redactor(data) + const parsed = JSON.parse(result) + assert.strictEqual(parsed.headers, 123) // Should remain unchanged + }) + + // Test wildcards with primitives + const redactor2 = slowRedact({ paths: ['data.*.nested'] }) + const data2 = { + data: { + item1: 123, // primitive, trying to access .nested on it + item2: { nested: 'secret' } + } + } + + assert.doesNotThrow(() => { + const result2 = redactor2(data2) + const parsed2 = JSON.parse(result2) + assert.strictEqual(parsed2.data.item1, 123) // Primitive unchanged + assert.strictEqual(parsed2.data.item2.nested, '[REDACTED]') // Object property redacted + }) + + // Test deep nested access on primitives + const redactor3 = slowRedact({ paths: ['user.name.first.charAt'] }) + const data3 = { + user: { + name: 'John' // string primitive + } + } + + assert.doesNotThrow(() => { + const result3 = redactor3(data3) + const parsed3 = JSON.parse(result3) + assert.strictEqual(parsed3.user.name, 'John') // Should remain unchanged + }) +}) + +// Remove option tests +test('remove option: basic key removal', () => { + const obj = { username: 'john', password: 'secret123' } + const redact = slowRedact({ paths: ['password'], remove: true }) + const result = redact(obj) + + // Original object should remain unchanged + assert.strictEqual(obj.password, 'secret123') + + // Result should have password completely removed + const parsed = JSON.parse(result) + assert.strictEqual(parsed.username, 'john') + assert.strictEqual('password' in parsed, false) + assert.strictEqual(parsed.password, undefined) +}) + +test('remove option: multiple paths removal', () => { + const obj = { + user: { name: 'john', password: 'secret' }, + session: { token: 'abc123', id: 'session1' } + } + + const redact = slowRedact({ + paths: ['user.password', 'session.token'], + remove: true + }) + const result = redact(obj) + + // Original unchanged + assert.strictEqual(obj.user.password, 'secret') + assert.strictEqual(obj.session.token, 'abc123') + + // Result has keys completely removed + const parsed = JSON.parse(result) + assert.strictEqual(parsed.user.name, 'john') + assert.strictEqual(parsed.session.id, 'session1') + assert.strictEqual('password' in parsed.user, false) + assert.strictEqual('token' in parsed.session, false) +}) + +test('remove option: wildcard removal', () => { + const obj = { + secrets: { + key1: 'secret1', + key2: 'secret2' + }, + public: 'data' + } + + const redact = slowRedact({ + paths: ['secrets.*'], + remove: true + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.public, 'data') + assert.deepStrictEqual(parsed.secrets, {}) // All keys removed +}) + +test('remove option: array wildcard removal', () => { + const obj = { + items: ['secret1', 'secret2', 'secret3'], + meta: 'data' + } + + const redact = slowRedact({ + paths: ['items.*'], + remove: true + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.meta, 'data') + // Array items set to undefined are omitted by JSON.stringify + assert.deepStrictEqual(parsed.items, [null, null, null]) +}) + +test('remove option: intermediate wildcard removal', () => { + const obj = { + users: { + user1: { password: 'secret1', name: 'john' }, + user2: { password: 'secret2', name: 'jane' } + } + } + + const redact = slowRedact({ + paths: ['users.*.password'], + remove: true + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.users.user1.name, 'john') + assert.strictEqual(parsed.users.user2.name, 'jane') + assert.strictEqual('password' in parsed.users.user1, false) + assert.strictEqual('password' in parsed.users.user2, false) +}) + +test('remove option: serialize false returns object with removed keys', () => { + const obj = { secret: 'hidden', public: 'data' } + const redact = slowRedact({ + paths: ['secret'], + remove: true, + serialize: false + }) + const result = redact(obj) + + // Should be object, not string + assert.strictEqual(typeof result, 'object') + assert.strictEqual(result.public, 'data') + assert.strictEqual('secret' in result, false) + + // Should have restore method + assert.strictEqual(typeof result.restore, 'function') + + const restored = result.restore() + assert.strictEqual(restored.secret, 'hidden') +}) + +test('remove option: non-existent paths are ignored', () => { + const obj = { existing: 'value' } + const redact = slowRedact({ + paths: ['nonexistent.path'], + remove: true + }) + const result = redact(obj) + + const parsed = JSON.parse(result) + assert.strictEqual(parsed.existing, 'value') + assert.strictEqual(parsed.nonexistent, undefined) +}) + +// Test for Issue #13: Empty string bracket notation paths not being redacted correctly +test('empty string bracket notation path', () => { + const obj = { '': { c: 'sensitive-data' } } + const redact = slowRedact({ paths: ["[''].c"] }) + const result = redact(obj) + + // Original object should remain unchanged + assert.strictEqual(obj[''].c, 'sensitive-data') + + // Result should have redacted path + const parsed = JSON.parse(result) + assert.strictEqual(parsed[''].c, '[REDACTED]') +}) + +test('empty string bracket notation with double quotes', () => { + const obj = { '': { c: 'sensitive-data' } } + const redact = slowRedact({ paths: ['[""].c'] }) + const result = redact(obj) + + // Original object should remain unchanged + assert.strictEqual(obj[''].c, 'sensitive-data') + + // Result should have redacted path + const parsed = JSON.parse(result) + assert.strictEqual(parsed[''].c, '[REDACTED]') +}) + +test('empty string key with nested bracket notation', () => { + const obj = { '': { '': { secret: 'value' } } } + const redact = slowRedact({ paths: ["[''][''].secret"] }) + const result = redact(obj) + + // Original object should remain unchanged + assert.strictEqual(obj[''][''].secret, 'value') + + // Result should have redacted path + const parsed = JSON.parse(result) + assert.strictEqual(parsed[''][''].secret, '[REDACTED]') +}) + +// Test for Pino issue #2313: censor should only be called when path exists +test('censor function not called for non-existent paths', () => { + let censorCallCount = 0 + const censorCalls = [] + + const redact = slowRedact({ + paths: ['a.b.c', 'req.authorization', 'url'], + serialize: false, + censor (value, path) { + censorCallCount++ + censorCalls.push({ value, path: path.slice() }) + return '***' + } + }) + + // Test case 1: { req: { id: 'test' } } + // req.authorization doesn't exist, censor should not be called for it + censorCallCount = 0 + censorCalls.length = 0 + redact({ req: { id: 'test' } }) + + // Should not have been called for any path since none exist + assert.strictEqual(censorCallCount, 0, 'censor should not be called when paths do not exist') + + // Test case 2: { a: { d: 'test' } } + // a.b.c doesn't exist (a.d exists, but not a.b.c) + censorCallCount = 0 + redact({ a: { d: 'test' } }) + assert.strictEqual(censorCallCount, 0) + + // Test case 3: paths that do exist should still call censor + censorCallCount = 0 + censorCalls.length = 0 + const result = redact({ req: { authorization: 'bearer token' } }) + assert.strictEqual(censorCallCount, 1, 'censor should be called when path exists') + assert.deepStrictEqual(censorCalls[0].path, ['req', 'authorization']) + assert.strictEqual(censorCalls[0].value, 'bearer token') + assert.strictEqual(result.req.authorization, '***') +}) + +test('censor function not called for non-existent nested paths', () => { + let censorCallCount = 0 + + const redact = slowRedact({ + paths: ['headers.authorization'], + serialize: false, + censor (value, path) { + censorCallCount++ + return '[REDACTED]' + } + }) + + // headers exists but authorization doesn't + censorCallCount = 0 + const result1 = redact({ headers: { 'content-type': 'application/json' } }) + assert.strictEqual(censorCallCount, 0) + assert.deepStrictEqual(result1.headers, { 'content-type': 'application/json' }) + + // headers doesn't exist at all + censorCallCount = 0 + const result2 = redact({ body: 'data' }) + assert.strictEqual(censorCallCount, 0) + assert.strictEqual(result2.body, 'data') + assert.strictEqual(typeof result2.restore, 'function') + + // headers.authorization exists - should call censor + censorCallCount = 0 + const result3 = redact({ headers: { authorization: 'Bearer token' } }) + assert.strictEqual(censorCallCount, 1) + assert.strictEqual(result3.headers.authorization, '[REDACTED]') +}) diff --git a/node_modules/@pinojs/redact/test/integration.test.js b/node_modules/@pinojs/redact/test/integration.test.js new file mode 100644 index 0000000..1e1a01a --- /dev/null +++ b/node_modules/@pinojs/redact/test/integration.test.js @@ -0,0 +1,390 @@ +const { test } = require('node:test') +const { strict: assert } = require('node:assert') +const slowRedact = require('../index.js') +const fastRedact = require('fast-redact') + +test('integration: basic path redaction matches fast-redact', () => { + const obj = { + headers: { + cookie: 'secret-cookie', + authorization: 'Bearer token' + }, + body: { message: 'hello' } + } + + const slowResult = slowRedact({ paths: ['headers.cookie'] })(obj) + const fastResult = fastRedact({ paths: ['headers.cookie'] })(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: multiple paths match fast-redact', () => { + const obj = { + user: { name: 'john', password: 'secret' }, + session: { token: 'abc123' } + } + + const paths = ['user.password', 'session.token'] + const slowResult = slowRedact({ paths })(obj) + const fastResult = fastRedact({ paths })(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: custom censor value matches fast-redact', () => { + const obj = { secret: 'hidden' } + const options = { paths: ['secret'], censor: '***' } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: bracket notation matches fast-redact', () => { + const obj = { + 'weird-key': { 'another-weird': 'secret' }, + normal: 'public' + } + + const options = { paths: ['["weird-key"]["another-weird"]'] } + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: array paths match fast-redact', () => { + const obj = { + users: [ + { name: 'john', password: 'secret1' }, + { name: 'jane', password: 'secret2' } + ] + } + + const options = { paths: ['users[0].password', 'users[1].password'] } + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: wildcard at end matches fast-redact', () => { + const obj = { + secrets: { + key1: 'secret1', + key2: 'secret2' + }, + public: 'data' + } + + const options = { paths: ['secrets.*'] } + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: wildcard with arrays matches fast-redact', () => { + const obj = { + items: ['secret1', 'secret2', 'secret3'] + } + + const options = { paths: ['items.*'] } + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: intermediate wildcard matches fast-redact', () => { + const obj = { + users: { + user1: { password: 'secret1' }, + user2: { password: 'secret2' } + } + } + + const options = { paths: ['users.*.password'] } + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: custom serialize function matches fast-redact', () => { + const obj = { secret: 'hidden', public: 'data' } + const options = { + paths: ['secret'], + serialize: (obj) => `custom:${JSON.stringify(obj)}` + } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: nested paths match fast-redact', () => { + const obj = { + level1: { + level2: { + level3: { + secret: 'hidden' + } + } + } + } + + const options = { paths: ['level1.level2.level3.secret'] } + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: non-existent paths match fast-redact', () => { + const obj = { existing: 'value' } + const options = { paths: ['nonexistent.path'] } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: null and undefined handling - legitimate difference', () => { + const obj = { + nullValue: null, + undefinedValue: undefined, + nested: { + nullValue: null + } + } + + const options = { paths: ['nullValue', 'nested.nullValue'] } + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + // This is a legitimate behavioral difference: + // @pinojs/redact redacts null values, fast-redact doesn't + const slowParsed = JSON.parse(slowResult) + const fastParsed = JSON.parse(fastResult) + + // @pinojs/redact redacts nulls + assert.strictEqual(slowParsed.nullValue, '[REDACTED]') + assert.strictEqual(slowParsed.nested.nullValue, '[REDACTED]') + + // fast-redact preserves nulls + assert.strictEqual(fastParsed.nullValue, null) + assert.strictEqual(fastParsed.nested.nullValue, null) +}) + +test('integration: strict mode with primitives - different error handling', () => { + const options = { paths: ['test'], strict: true } + + const slowRedactFn = slowRedact(options) + const fastRedactFn = fastRedact(options) + + // @pinojs/redact handles primitives gracefully + const stringSlowResult = slowRedactFn('primitive') + assert.strictEqual(stringSlowResult, '"primitive"') + + const numberSlowResult = slowRedactFn(42) + assert.strictEqual(numberSlowResult, '42') + + // fast-redact throws an error for primitives in strict mode + assert.throws(() => { + fastRedactFn('primitive') + }, /primitives cannot be redacted/) + + assert.throws(() => { + fastRedactFn(42) + }, /primitives cannot be redacted/) +}) + +test('integration: serialize false behavior difference', () => { + const slowObj = { secret: 'hidden' } + const fastObj = { secret: 'hidden' } + const options = { paths: ['secret'], serialize: false } + + const slowResult = slowRedact(options)(slowObj) + const fastResult = fastRedact(options)(fastObj) + + // Both should redact the secret + assert.strictEqual(slowResult.secret, '[REDACTED]') + assert.strictEqual(fastResult.secret, '[REDACTED]') + + // @pinojs/redact always has restore method + assert.strictEqual(typeof slowResult.restore, 'function') + + // @pinojs/redact should restore to original value + assert.strictEqual(slowResult.restore().secret, 'hidden') + + // Key difference: original object state + // fast-redact mutates the original, @pinojs/redact doesn't + assert.strictEqual(slowObj.secret, 'hidden') // @pinojs/redact preserves original + assert.strictEqual(fastObj.secret, '[REDACTED]') // fast-redact mutates original +}) + +test('integration: censor function behavior', () => { + const obj = { secret: 'hidden' } + const options = { + paths: ['secret'], + censor: (value, path) => `REDACTED:${path}` + } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: complex object with mixed patterns', () => { + const obj = { + users: [ + { + id: 1, + name: 'john', + credentials: { password: 'secret1', apiKey: 'key1' } + }, + { + id: 2, + name: 'jane', + credentials: { password: 'secret2', apiKey: 'key2' } + } + ], + config: { + database: { password: 'db-secret' }, + api: { keys: ['key1', 'key2', 'key3'] } + } + } + + const options = { + paths: [ + 'users.*.credentials.password', + 'users.*.credentials.apiKey', + 'config.database.password', + 'config.api.keys.*' + ] + } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +// Remove option integration tests - comparing with fast-redact +test('integration: remove option basic comparison with fast-redact', () => { + const obj = { username: 'john', password: 'secret123' } + const options = { paths: ['password'], remove: true } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) + + // Verify the key is actually removed + const parsed = JSON.parse(slowResult) + assert.strictEqual(parsed.username, 'john') + assert.strictEqual('password' in parsed, false) +}) + +test('integration: remove option multiple paths comparison with fast-redact', () => { + const obj = { + user: { name: 'john', password: 'secret' }, + session: { token: 'abc123', id: 'session1' } + } + + const options = { + paths: ['user.password', 'session.token'], + remove: true + } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: remove option wildcard comparison with fast-redact', () => { + const obj = { + secrets: { + key1: 'secret1', + key2: 'secret2' + }, + public: 'data' + } + + const options = { + paths: ['secrets.*'], + remove: true + } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: remove option intermediate wildcard comparison with fast-redact', () => { + const obj = { + users: { + user1: { password: 'secret1', name: 'john' }, + user2: { password: 'secret2', name: 'jane' } + } + } + + const options = { + paths: ['users.*.password'], + remove: true + } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) +}) + +test('integration: remove option with custom censor comparison with fast-redact', () => { + const obj = { secret: 'hidden', public: 'data' } + const options = { + paths: ['secret'], + censor: '***', + remove: true + } + + const slowResult = slowRedact(options)(obj) + const fastResult = fastRedact(options)(obj) + + assert.strictEqual(slowResult, fastResult) + + // With remove: true, censor value should be ignored + const parsed = JSON.parse(slowResult) + assert.strictEqual('secret' in parsed, false) + assert.strictEqual(parsed.public, 'data') +}) + +test('integration: remove option serialize false behavior - @pinojs/redact only', () => { + // fast-redact doesn't support remove option with serialize: false + // so we test @pinojs/redact's behavior only + const obj = { secret: 'hidden', public: 'data' } + const options = { paths: ['secret'], remove: true, serialize: false } + + const result = slowRedact(options)(obj) + + // Should have the key removed + assert.strictEqual('secret' in result, false) + assert.strictEqual(result.public, 'data') + + // Should have restore method + assert.strictEqual(typeof result.restore, 'function') + + // Original object should be preserved + assert.strictEqual(obj.secret, 'hidden') + + // Restore should bring back the removed key + const restored = result.restore() + assert.strictEqual(restored.secret, 'hidden') +}) diff --git a/node_modules/@pinojs/redact/test/multiple-wildcards.test.js b/node_modules/@pinojs/redact/test/multiple-wildcards.test.js new file mode 100644 index 0000000..9526497 --- /dev/null +++ b/node_modules/@pinojs/redact/test/multiple-wildcards.test.js @@ -0,0 +1,227 @@ +'use strict' + +const { test } = require('node:test') +const { strict: assert } = require('node:assert') +const slowRedact = require('../index.js') + +// Tests for Issue #2319: @pinojs/redact fails to redact patterns with 3+ consecutive wildcards +test('three consecutive wildcards: *.*.*.password (4 levels deep)', () => { + const obj = { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } } + } + + const redact = slowRedact({ + paths: ['*.*.*.password'] + }) + const result = redact(obj) + const parsed = JSON.parse(result) + + // Only the 4-level deep password should be redacted + assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted') + assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted') + assert.strictEqual(parsed.nested.deep.auth.password, '[REDACTED]', '4-level password SHOULD be redacted') +}) + +test('four consecutive wildcards: *.*.*.*.password (5 levels deep)', () => { + const obj = { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } }, + config: { user: { auth: { settings: { password: 'secret-5-levels' } } } } + } + + const redact = slowRedact({ + paths: ['*.*.*.*.password'] + }) + const result = redact(obj) + const parsed = JSON.parse(result) + + // Only the 5-level deep password should be redacted + assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted') + assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted') + assert.strictEqual(parsed.nested.deep.auth.password, 'secret-4-levels', '4-level password should NOT be redacted') + assert.strictEqual(parsed.config.user.auth.settings.password, '[REDACTED]', '5-level password SHOULD be redacted') +}) + +test('five consecutive wildcards: *.*.*.*.*.password (6 levels deep)', () => { + const obj = { + simple: { password: 'secret-2-levels' }, + user: { auth: { password: 'secret-3-levels' } }, + nested: { deep: { auth: { password: 'secret-4-levels' } } }, + config: { user: { auth: { settings: { password: 'secret-5-levels' } } } }, + data: { + reqConfig: { + data: { + credentials: { + settings: { + password: 'secret-6-levels' + } + } + } + } + } + } + + const redact = slowRedact({ + paths: ['*.*.*.*.*.password'] + }) + const result = redact(obj) + const parsed = JSON.parse(result) + + // Only the 6-level deep password should be redacted + assert.strictEqual(parsed.simple.password, 'secret-2-levels', '2-level password should NOT be redacted') + assert.strictEqual(parsed.user.auth.password, 'secret-3-levels', '3-level password should NOT be redacted') + assert.strictEqual(parsed.nested.deep.auth.password, 'secret-4-levels', '4-level password should NOT be redacted') + assert.strictEqual(parsed.config.user.auth.settings.password, 'secret-5-levels', '5-level password should NOT be redacted') + assert.strictEqual(parsed.data.reqConfig.data.credentials.settings.password, '[REDACTED]', '6-level password SHOULD be redacted') +}) + +test('three wildcards with censor function receives correct values', () => { + const obj = { + nested: { deep: { auth: { password: 'secret-value' } } } + } + + const censorCalls = [] + const redact = slowRedact({ + paths: ['*.*.*.password'], + censor: (value, path) => { + censorCalls.push({ value, path: [...path] }) + return '[REDACTED]' + } + }) + + const result = redact(obj) + const parsed = JSON.parse(result) + + // Should have been called exactly once with the correct value + assert.strictEqual(censorCalls.length, 1, 'censor should be called once') + assert.strictEqual(censorCalls[0].value, 'secret-value', 'censor should receive the actual value') + assert.deepStrictEqual(censorCalls[0].path, ['nested', 'deep', 'auth', 'password'], 'censor should receive correct path') + assert.strictEqual(parsed.nested.deep.auth.password, '[REDACTED]') +}) + +test('three wildcards with multiple matches', () => { + const obj = { + api1: { v1: { auth: { token: 'token1' } } }, + api2: { v2: { auth: { token: 'token2' } } }, + api3: { v1: { auth: { token: 'token3' } } } + } + + const redact = slowRedact({ + paths: ['*.*.*.token'] + }) + const result = redact(obj) + const parsed = JSON.parse(result) + + // All three tokens should be redacted + assert.strictEqual(parsed.api1.v1.auth.token, '[REDACTED]') + assert.strictEqual(parsed.api2.v2.auth.token, '[REDACTED]') + assert.strictEqual(parsed.api3.v1.auth.token, '[REDACTED]') +}) + +test('three wildcards with remove option', () => { + const obj = { + nested: { deep: { auth: { password: 'secret', username: 'admin' } } } + } + + const redact = slowRedact({ + paths: ['*.*.*.password'], + remove: true + }) + const result = redact(obj) + const parsed = JSON.parse(result) + + // Password should be removed entirely + assert.strictEqual('password' in parsed.nested.deep.auth, false, 'password key should be removed') + assert.strictEqual(parsed.nested.deep.auth.username, 'admin', 'username should remain') +}) + +test('mixed: two and three wildcards in same redactor', () => { + const obj = { + user: { auth: { password: 'secret-3-levels' } }, + config: { deep: { auth: { password: 'secret-4-levels' } } } + } + + const redact = slowRedact({ + paths: ['*.*.password', '*.*.*.password'] + }) + const result = redact(obj) + const parsed = JSON.parse(result) + + // Both should be redacted + assert.strictEqual(parsed.user.auth.password, '[REDACTED]', '3-level should be redacted by *.*.password') + assert.strictEqual(parsed.config.deep.auth.password, '[REDACTED]', '4-level should be redacted by *.*.*.password') +}) + +test('three wildcards should not call censor for non-existent paths', () => { + const obj = { + shallow: { data: 'value' }, + nested: { deep: { auth: { password: 'secret' } } } + } + + let censorCallCount = 0 + const redact = slowRedact({ + paths: ['*.*.*.password'], + censor: (value, path) => { + censorCallCount++ + return '[REDACTED]' + } + }) + + redact(obj) + + // Should only be called once for the path that exists + assert.strictEqual(censorCallCount, 1, 'censor should only be called for existing paths') +}) + +test('three wildcards with arrays', () => { + const obj = { + users: [ + { auth: { password: 'secret1' } }, + { auth: { password: 'secret2' } } + ] + } + + const redact = slowRedact({ + paths: ['*.*.*.password'] + }) + const result = redact(obj) + const parsed = JSON.parse(result) + + // Both passwords should be redacted (users[0].auth.password is 4 levels) + assert.strictEqual(parsed.users[0].auth.password, '[REDACTED]') + assert.strictEqual(parsed.users[1].auth.password, '[REDACTED]') +}) + +test('four wildcards with authorization header (real-world case)', () => { + const obj = { + requests: { + api1: { + config: { + headers: { + authorization: 'Bearer secret-token' + } + } + }, + api2: { + config: { + headers: { + authorization: 'Bearer another-token' + } + } + } + } + } + + const redact = slowRedact({ + paths: ['*.*.*.*.authorization'] + }) + const result = redact(obj) + const parsed = JSON.parse(result) + + // Both authorization headers should be redacted + assert.strictEqual(parsed.requests.api1.config.headers.authorization, '[REDACTED]') + assert.strictEqual(parsed.requests.api2.config.headers.authorization, '[REDACTED]') +}) diff --git a/node_modules/@pinojs/redact/test/prototype-pollution.test.js b/node_modules/@pinojs/redact/test/prototype-pollution.test.js new file mode 100644 index 0000000..4ed0a75 --- /dev/null +++ b/node_modules/@pinojs/redact/test/prototype-pollution.test.js @@ -0,0 +1,223 @@ +const { test } = require('node:test') +const { strict: assert } = require('node:assert') +const slowRedact = require('../index.js') + +/* eslint-disable no-proto */ + +test('prototype pollution: __proto__ path should not pollute Object prototype', () => { + const obj = { + user: { name: 'john' }, + __proto__: { isAdmin: true } + } + + const redact = slowRedact({ + paths: ['__proto__.isAdmin'], + serialize: false + }) + + const result = redact(obj) + + // Should not pollute Object.prototype + assert.strictEqual(Object.prototype.isAdmin, undefined) + assert.strictEqual({}.isAdmin, undefined) + + // Should redact the __proto__ property if it exists as a regular property + assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]') +}) + +test('prototype pollution: constructor.prototype path should not pollute', () => { + const obj = { + user: { name: 'john' }, + constructor: { + prototype: { isAdmin: true } + } + } + + const redact = slowRedact({ + paths: ['constructor.prototype.isAdmin'], + serialize: false + }) + + const result = redact(obj) + + // Should not pollute Object.prototype + assert.strictEqual(Object.prototype.isAdmin, undefined) + assert.strictEqual({}.isAdmin, undefined) + + // Should redact the constructor.prototype property if it exists as a regular property + assert.strictEqual(result.constructor.prototype.isAdmin, '[REDACTED]') +}) + +test('prototype pollution: nested __proto__ should not pollute', () => { + const obj = { + user: { + settings: { + __proto__: { isAdmin: true } + } + } + } + + const redact = slowRedact({ + paths: ['user.settings.__proto__.isAdmin'], + serialize: false + }) + + const result = redact(obj) + + // Should not pollute Object.prototype + assert.strictEqual(Object.prototype.isAdmin, undefined) + assert.strictEqual({}.isAdmin, undefined) + + // Should redact the nested __proto__ property + assert.strictEqual(result.user.settings.__proto__.isAdmin, '[REDACTED]') +}) + +test('prototype pollution: bracket notation __proto__ should not pollute', () => { + const obj = { + user: { name: 'john' }, + __proto__: { isAdmin: true } + } + + const redact = slowRedact({ + paths: ['["__proto__"]["isAdmin"]'], + serialize: false + }) + + const result = redact(obj) + + // Should not pollute Object.prototype + assert.strictEqual(Object.prototype.isAdmin, undefined) + assert.strictEqual({}.isAdmin, undefined) + + // Should redact the __proto__ property when accessed via bracket notation + assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]') +}) + +test('prototype pollution: wildcard with __proto__ should not pollute', () => { + const obj = { + users: { + __proto__: { isAdmin: true }, + user1: { name: 'john' }, + user2: { name: 'jane' } + } + } + + const redact = slowRedact({ + paths: ['users.*'], + serialize: false + }) + + const result = redact(obj) + + // Should not pollute Object.prototype + assert.strictEqual(Object.prototype.isAdmin, undefined) + assert.strictEqual({}.isAdmin, undefined) + + // Should redact only own properties + assert.strictEqual(result.users.user1, '[REDACTED]') + assert.strictEqual(result.users.user2, '[REDACTED]') + + // __proto__ should only be redacted if it's an own property, not inherited + if (Object.prototype.hasOwnProperty.call(obj.users, '__proto__')) { + assert.strictEqual(result.users.__proto__, '[REDACTED]') + } +}) + +test('prototype pollution: malicious JSON payload should not pollute', () => { + // Simulate a malicious payload that might come from JSON.parse + const maliciousObj = JSON.parse('{"user": {"name": "john"}, "__proto__": {"isAdmin": true}}') + + const redact = slowRedact({ + paths: ['__proto__.isAdmin'], + serialize: false + }) + + const result = redact(maliciousObj) + + // Should not pollute Object.prototype + assert.strictEqual(Object.prototype.isAdmin, undefined) + assert.strictEqual({}.isAdmin, undefined) + + // The malicious payload should have been redacted + assert.strictEqual(result.__proto__.isAdmin, '[REDACTED]') +}) + +test('prototype pollution: verify prototype chain is preserved', () => { + function CustomClass () { + this.data = 'test' + } + CustomClass.prototype.method = function () { return 'original' } + + const obj = new CustomClass() + + const redact = slowRedact({ + paths: ['data'], + serialize: false + }) + + const result = redact(obj) + + // Should redact the data property + assert.strictEqual(result.data, '[REDACTED]') + + // Should preserve the original prototype chain + assert.strictEqual(result.method(), 'original') + assert.strictEqual(Object.getPrototypeOf(result), CustomClass.prototype) +}) + +test('prototype pollution: setValue should not create prototype pollution', () => { + const obj = { user: { name: 'john' } } + + // Try to pollute via non-existent path that could create __proto__ + const redact = slowRedact({ + paths: ['__proto__.isAdmin'], + serialize: false + }) + + const result = redact(obj) + + // Should not pollute Object.prototype + assert.strictEqual(Object.prototype.isAdmin, undefined) + assert.strictEqual({}.isAdmin, undefined) + + // Should not create the path if it doesn't exist + // The __proto__ property may exist due to Object.create, but should not contain our redacted value + if (result.__proto__) { + assert.strictEqual(result.__proto__.isAdmin, undefined) + } +}) + +test('prototype pollution: deep nested prototype properties should not pollute', () => { + const obj = { + level1: { + level2: { + level3: { + __proto__: { isAdmin: true }, + constructor: { + prototype: { isEvil: true } + } + } + } + } + } + + const redact = slowRedact({ + paths: [ + 'level1.level2.level3.__proto__.isAdmin', + 'level1.level2.level3.constructor.prototype.isEvil' + ], + serialize: false + }) + + const result = redact(obj) + + // Should not pollute Object.prototype + assert.strictEqual(Object.prototype.isAdmin, undefined) + assert.strictEqual(Object.prototype.isEvil, undefined) + assert.strictEqual({}.isAdmin, undefined) + assert.strictEqual({}.isEvil, undefined) + + // Should redact the deep nested properties + assert.strictEqual(result.level1.level2.level3.__proto__.isAdmin, '[REDACTED]') + assert.strictEqual(result.level1.level2.level3.constructor.prototype.isEvil, '[REDACTED]') +}) diff --git a/node_modules/@pinojs/redact/test/selective-clone.test.js b/node_modules/@pinojs/redact/test/selective-clone.test.js new file mode 100644 index 0000000..1ea90c8 --- /dev/null +++ b/node_modules/@pinojs/redact/test/selective-clone.test.js @@ -0,0 +1,115 @@ +const { test } = require('node:test') +const { strict: assert } = require('node:assert') +const slowRedact = require('../index.js') + +test('selective cloning shares references for non-redacted paths', () => { + const sharedObject = { unchanged: 'data' } + const obj = { + toRedact: 'secret', + shared: sharedObject, + nested: { + toRedact: 'secret2', + shared: sharedObject + } + } + + const redact = slowRedact({ + paths: ['toRedact', 'nested.toRedact'], + serialize: false + }) + + const result = redact(obj) + + // Redacted values should be different + assert.strictEqual(result.toRedact, '[REDACTED]') + assert.strictEqual(result.nested.toRedact, '[REDACTED]') + + // Non-redacted references should be shared (same object reference) + assert.strictEqual(result.shared, obj.shared) + assert.strictEqual(result.nested.shared, obj.nested.shared) + + // The shared object should be the exact same reference + assert.strictEqual(result.shared, sharedObject) + assert.strictEqual(result.nested.shared, sharedObject) +}) + +test('selective cloning works with arrays', () => { + const sharedItem = { unchanged: 'data' } + const obj = { + items: [ + { secret: 'hidden1', shared: sharedItem }, + { secret: 'hidden2', shared: sharedItem }, + sharedItem + ] + } + + const redact = slowRedact({ + paths: ['items.*.secret'], + serialize: false + }) + + const result = redact(obj) + + // Secrets should be redacted + assert.strictEqual(result.items[0].secret, '[REDACTED]') + assert.strictEqual(result.items[1].secret, '[REDACTED]') + + // Shared references should be preserved where possible + // Note: array items with secrets will be cloned, but their shared properties should still reference the original + assert.strictEqual(result.items[0].shared, sharedItem) + assert.strictEqual(result.items[1].shared, sharedItem) + + // The third item gets cloned due to wildcard, but should have the same content + assert.deepStrictEqual(result.items[2], sharedItem) + // Note: Due to wildcard '*', all array items are cloned, even if they don't need redaction + // This is still a significant optimization for object properties that aren't in wildcard paths +}) + +test('selective cloning with no paths returns original object', () => { + const obj = { data: 'unchanged' } + const redact = slowRedact({ + paths: [], + serialize: false + }) + + const result = redact(obj) + + // Should return the exact same object reference + assert.strictEqual(result, obj) +}) + +test('selective cloning performance - large objects with minimal redaction', () => { + // Create a large object with mostly shared data + const sharedData = { large: 'data'.repeat(1000) } + const obj = { + secret: 'hidden', + shared1: sharedData, + shared2: sharedData, + nested: { + secret: 'hidden2', + shared3: sharedData, + deep: { + shared4: sharedData, + moreShared: sharedData + } + } + } + + const redact = slowRedact({ + paths: ['secret', 'nested.secret'], + serialize: false + }) + + const result = redact(obj) + + // Verify redaction worked + assert.strictEqual(result.secret, '[REDACTED]') + assert.strictEqual(result.nested.secret, '[REDACTED]') + + // Verify shared references are preserved + assert.strictEqual(result.shared1, sharedData) + assert.strictEqual(result.shared2, sharedData) + assert.strictEqual(result.nested.shared3, sharedData) + assert.strictEqual(result.nested.deep.shared4, sharedData) + assert.strictEqual(result.nested.deep.moreShared, sharedData) +}) diff --git a/node_modules/@pinojs/redact/tsconfig.json b/node_modules/@pinojs/redact/tsconfig.json new file mode 100644 index 0000000..0319609 --- /dev/null +++ b/node_modules/@pinojs/redact/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "module": "commonjs", + "lib": [ + "es6" + ], + "noImplicitAny": true, + "noImplicitThis": true, + "strictFunctionTypes": true, + "strictNullChecks": true, + "types": [], + "noEmit": true, + "forceConsistentCasingInFileNames": true + }, + "files": [ + "index.d.ts", + "index.test-d.ts" + ] +} diff --git a/node_modules/atomic-sleep/.travis.yml b/node_modules/atomic-sleep/.travis.yml new file mode 100644 index 0000000..3bb09da --- /dev/null +++ b/node_modules/atomic-sleep/.travis.yml @@ -0,0 +1,11 @@ +language: node_js +sudo: false +node_js: + - 6 + - 8 + - 10 + - 11 + - 12 + - 13 +script: + - npm run ci \ No newline at end of file diff --git a/node_modules/atomic-sleep/LICENSE b/node_modules/atomic-sleep/LICENSE new file mode 100644 index 0000000..d1d8849 --- /dev/null +++ b/node_modules/atomic-sleep/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) +Copyright (c) 2020 David Mark Clements + + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/atomic-sleep/index.js b/node_modules/atomic-sleep/index.js new file mode 100644 index 0000000..fbfc8b2 --- /dev/null +++ b/node_modules/atomic-sleep/index.js @@ -0,0 +1,38 @@ +'use strict' + +/* global SharedArrayBuffer, Atomics */ + +if (typeof SharedArrayBuffer !== 'undefined' && typeof Atomics !== 'undefined') { + const nil = new Int32Array(new SharedArrayBuffer(4)) + + function sleep (ms) { + // also filters out NaN, non-number types, including empty strings, but allows bigints + const valid = ms > 0 && ms < Infinity + if (valid === false) { + if (typeof ms !== 'number' && typeof ms !== 'bigint') { + throw TypeError('sleep: ms must be a number') + } + throw RangeError('sleep: ms must be a number that is greater than 0 but less than Infinity') + } + + Atomics.wait(nil, 0, 0, Number(ms)) + } + module.exports = sleep +} else { + + function sleep (ms) { + // also filters out NaN, non-number types, including empty strings, but allows bigints + const valid = ms > 0 && ms < Infinity + if (valid === false) { + if (typeof ms !== 'number' && typeof ms !== 'bigint') { + throw TypeError('sleep: ms must be a number') + } + throw RangeError('sleep: ms must be a number that is greater than 0 but less than Infinity') + } + const target = Date.now() + Number(ms) + while (target > Date.now()){} + } + + module.exports = sleep + +} diff --git a/node_modules/atomic-sleep/package.json b/node_modules/atomic-sleep/package.json new file mode 100644 index 0000000..cfdf200 --- /dev/null +++ b/node_modules/atomic-sleep/package.json @@ -0,0 +1,37 @@ +{ + "name": "atomic-sleep", + "version": "1.0.0", + "description": "Zero CPU overhead, zero dependency, true event-loop blocking sleep", + "main": "index.js", + "scripts": { + "test": "tap -R classic- -j1 test", + "lint": "standard", + "ci": "npm run lint && npm test" + }, + "keywords": [ + "sleep", + "pause", + "wait", + "performance", + "atomics" + ], + "engines": { + "node": ">=8.0.0" + }, + "author": "David Mark Clements (@davidmarkclem)", + "license": "MIT", + "devDependencies": { + "standard": "^14.3.1", + "tap": "^14.10.6", + "tape": "^4.13.2" + }, + "dependencies": {}, + "repository": { + "type": "git", + "url": "git+https://github.com/davidmarkclements/atomic-sleep.git" + }, + "bugs": { + "url": "https://github.com/davidmarkclements/atomic-sleep/issues" + }, + "homepage": "https://github.com/davidmarkclements/atomic-sleep#readme" +} diff --git a/node_modules/atomic-sleep/readme.md b/node_modules/atomic-sleep/readme.md new file mode 100644 index 0000000..3cdd91b --- /dev/null +++ b/node_modules/atomic-sleep/readme.md @@ -0,0 +1,58 @@ +

Welcome to atomic-sleep ⏱️

+

+ Version + + License: MIT + + + Twitter: davidmarkclem + +

+ +> Zero CPU overhead, zero dependency, true event-loop blocking sleep + +## Usage + +```js +const sleep = require('atomic-sleep') + +console.time('sleep') +setTimeout(() => { console.timeEnd('sleep') }, 100) +sleep(1000) +``` + +The `console.time` will report a time of just over 1000ms despite the `setTimeout` +being 100ms. This is because the event loop is paused for 1000ms and the setTimeout +fires immediately after the event loop is no longer blocked (as more than 100ms have passed). + +## Install + +```sh +npm install +``` + +## Run tests + +```sh +npm test +``` + +## Support + +Node and Browser versions that support both `SharedArrayBuffer` and `Atomics` will have (virtually) zero CPU overhead sleep. + +For Node, Atomic Sleep can provide zero CPU overhead sleep from Node 8 and up. + +For browser support see https://caniuse.com/#feat=sharedarraybuffer and https://caniuse.com/#feat=mdn-javascript_builtins_atomics. + + +For older Node versions and olders browsers we fall back to blocking the event loop in a way that will cause a CPU spike. + + + +## Author + +👤 **David Mark Clements (@davidmarkclem)** + +* Twitter: [@davidmarkclem](https://twitter.com/davidmarkclem) +* Github: [@davidmarkclements](https://github.com/davidmarkclements) diff --git a/node_modules/atomic-sleep/test.js b/node_modules/atomic-sleep/test.js new file mode 100644 index 0000000..0020daa --- /dev/null +++ b/node_modules/atomic-sleep/test.js @@ -0,0 +1,47 @@ +'use strict' +const test = require('tape') +const sleep = require('.') + +test('blocks event loop for given amount of milliseconds', ({ is, end }) => { + const now = Date.now() + setTimeout(() => { + const delta = Date.now() - now + const fuzzyDelta = Math.floor(delta / 10) * 10 // allow up to 10ms of execution lag + is(fuzzyDelta, 1000) + end() + }, 100) + sleep(1000) +}) + +if (typeof BigInt !== 'undefined') { + + test('allows ms to be supplied as a BigInt number', ({ is, end }) => { + const now = Date.now() + setTimeout(() => { + const delta = Date.now() - now + const fuzzyDelta = Math.floor(delta / 10) * 10 // allow up to 10ms of execution lag + is(fuzzyDelta, 1000) + end() + }, 100) + sleep(BigInt(1000)) // avoiding n notation as this will error on legacy node/browsers + }) + +} + +test('throws range error if ms less than 0', ({ throws, end }) => { + throws(() => sleep(-1), RangeError('sleep: ms must be a number that is greater than 0 but less than Infinity')) + end() +}) + +test('throws range error if ms is Infinity', ({ throws, end }) => { + throws(() => sleep(Infinity), RangeError('sleep: ms must be a number that is greater than 0 but less than Infinity')) + end() +}) + +test('throws range error if ms is not a number or bigint', ({ throws, end }) => { + throws(() => sleep('Infinity'), TypeError('sleep: ms must be a number')) + throws(() => sleep('foo'), TypeError('sleep: ms must be a number')) + throws(() => sleep({a: 1}), TypeError('sleep: ms must be a number')) + throws(() => sleep([1,2,3]), TypeError('sleep: ms must be a number')) + end() +}) \ No newline at end of file diff --git a/node_modules/colorette/LICENSE.md b/node_modules/colorette/LICENSE.md new file mode 100644 index 0000000..6ba7a0f --- /dev/null +++ b/node_modules/colorette/LICENSE.md @@ -0,0 +1,7 @@ +Copyright © Jorge Bucaran <> + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/colorette/README.md b/node_modules/colorette/README.md new file mode 100644 index 0000000..693ae44 --- /dev/null +++ b/node_modules/colorette/README.md @@ -0,0 +1,134 @@ +# 🌈Colorette + +> Easily set your terminal text color & styles. + +- No dependecies +- Automatic color support detection +- Up to [2x faster](#benchmarks) than alternatives +- TypeScript support +- [`NO_COLOR`](https://no-color.org) friendly +- Node >= `10` + +> [**Upgrading from Colorette `1.x`?**](https://github.com/jorgebucaran/colorette/issues/70) + +## Quickstart + +```js +import { blue, bold, underline } from "colorette" + +console.log( + blue("I'm blue"), + bold(blue("da ba dee")), + underline(bold(blue("da ba daa"))) +) +``` + +Here's an example using [template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals). + +```js +console.log(` + There's a ${underline(blue("house"))}, + With a ${bold(blue("window"))}, + And a ${blue("corvette")} + And everything is blue +`) +``` + +You can also nest styles without breaking existing color sequences. + +```js +console.log(bold(`I'm ${blue(`da ba ${underline("dee")} da ba`)} daa`)) +``` + +Need to override terminal color detection? You can do that too. + +```js +import { createColors } from "colorette" + +const { blue } = createColors({ useColor: false }) + +console.log(blue("Blue? Nope, nah")) +``` + +## Installation + +```console +npm install colorette +``` + +## API + +### \() + +> See all [supported colors](#supported-colors). + +```js +import { blue } from "colorette" + +blue("I'm blue") //=> \x1b[34mI'm blue\x1b[39m +``` + +### createColors() + +Override terminal color detection via `createColors({ useColor })`. + +```js +import { createColors } from "colorette" + +const { blue } = createColors({ useColor: false }) +``` + +### isColorSupported + +`true` if your terminal supports color, `false` otherwise. Used internally, but exposed for convenience. + +## Environment + +You can override color detection from the CLI by setting the `--no-color` or `--color` flags. + +```console +$ ./example.js --no-color | ./consumer.js +``` + +Or if you can't use CLI flags, by setting the `NO_COLOR=` or `FORCE_COLOR=` environment variables. + +```console +$ NO_COLOR= ./example.js | ./consumer.js +``` + +## Supported colors + +| Colors | Background Colors | Bright Colors | Bright Background Colors | Modifiers | +| ------- | ----------------- | ------------- | ------------------------ | ----------------- | +| black | bgBlack | blackBright | bgBlackBright | dim | +| red | bgRed | redBright | bgRedBright | **bold** | +| green | bgGreen | greenBright | bgGreenBright | hidden | +| yellow | bgYellow | yellowBright | bgYellowBright | _italic_ | +| blue | bgBlue | blueBright | bgBlueBright | underline | +| magenta | bgMagenta | magentaBright | bgMagentaBright | ~~strikethrough~~ | +| cyan | bgCyan | cyanBright | bgCyanBright | reset | +| white | bgWhite | whiteBright | bgWhiteBright | | +| gray | | | | | + +## [Benchmarks](https://github.com/jorgebucaran/colorette/actions/workflows/bench.yml) + +```console +npm --prefix bench start +``` + +```diff + chalk 1,786,703 ops/sec + kleur 1,618,960 ops/sec + colors 646,823 ops/sec + ansi-colors 786,149 ops/sec + picocolors 2,871,758 ops/sec ++ colorette 3,002,751 ops/sec +``` + +## Acknowledgments + +Colorette started out in 2015 by [@jorgebucaran](https://github.com/jorgebucaran) as a lightweight alternative to [Chalk](https://github.com/chalk/chalk) and was introduced originally as [Clor](https://github.com/jorgebucaran/colorette/commit/b01b5b9961ceb7df878583a3002e836fae9e37ce). Our terminal color detection logic borrows heavily from [@sindresorhus](https://github.com/sindresorhus) and [@Qix-](https://github.com/Qix-) work on Chalk. The idea of slicing strings to clear bleeding sequences was adapted from a similar technique used by [@alexeyraspopov](https://github.com/alexeyraspopov) in [picocolors](https://github.com/alexeyraspopov/picocolors). Thank you to all our contributors! <3 + +## License + +[MIT](LICENSE.md) diff --git a/node_modules/colorette/index.cjs b/node_modules/colorette/index.cjs new file mode 100644 index 0000000..baf0e64 --- /dev/null +++ b/node_modules/colorette/index.cjs @@ -0,0 +1,218 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var tty = require('tty'); + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var tty__namespace = /*#__PURE__*/_interopNamespace(tty); + +const { + env = {}, + argv = [], + platform = "", +} = typeof process === "undefined" ? {} : process; + +const isDisabled = "NO_COLOR" in env || argv.includes("--no-color"); +const isForced = "FORCE_COLOR" in env || argv.includes("--color"); +const isWindows = platform === "win32"; +const isDumbTerminal = env.TERM === "dumb"; + +const isCompatibleTerminal = + tty__namespace && tty__namespace.isatty && tty__namespace.isatty(1) && env.TERM && !isDumbTerminal; + +const isCI = + "CI" in env && + ("GITHUB_ACTIONS" in env || "GITLAB_CI" in env || "CIRCLECI" in env); + +const isColorSupported = + !isDisabled && + (isForced || (isWindows && !isDumbTerminal) || isCompatibleTerminal || isCI); + +const replaceClose = ( + index, + string, + close, + replace, + head = string.substring(0, index) + replace, + tail = string.substring(index + close.length), + next = tail.indexOf(close) +) => head + (next < 0 ? tail : replaceClose(next, tail, close, replace)); + +const clearBleed = (index, string, open, close, replace) => + index < 0 + ? open + string + close + : open + replaceClose(index, string, close, replace) + close; + +const filterEmpty = + (open, close, replace = open, at = open.length + 1) => + (string) => + string || !(string === "" || string === undefined) + ? clearBleed( + ("" + string).indexOf(close, at), + string, + open, + close, + replace + ) + : ""; + +const init = (open, close, replace) => + filterEmpty(`\x1b[${open}m`, `\x1b[${close}m`, replace); + +const colors = { + reset: init(0, 0), + bold: init(1, 22, "\x1b[22m\x1b[1m"), + dim: init(2, 22, "\x1b[22m\x1b[2m"), + italic: init(3, 23), + underline: init(4, 24), + inverse: init(7, 27), + hidden: init(8, 28), + strikethrough: init(9, 29), + black: init(30, 39), + red: init(31, 39), + green: init(32, 39), + yellow: init(33, 39), + blue: init(34, 39), + magenta: init(35, 39), + cyan: init(36, 39), + white: init(37, 39), + gray: init(90, 39), + bgBlack: init(40, 49), + bgRed: init(41, 49), + bgGreen: init(42, 49), + bgYellow: init(43, 49), + bgBlue: init(44, 49), + bgMagenta: init(45, 49), + bgCyan: init(46, 49), + bgWhite: init(47, 49), + blackBright: init(90, 39), + redBright: init(91, 39), + greenBright: init(92, 39), + yellowBright: init(93, 39), + blueBright: init(94, 39), + magentaBright: init(95, 39), + cyanBright: init(96, 39), + whiteBright: init(97, 39), + bgBlackBright: init(100, 49), + bgRedBright: init(101, 49), + bgGreenBright: init(102, 49), + bgYellowBright: init(103, 49), + bgBlueBright: init(104, 49), + bgMagentaBright: init(105, 49), + bgCyanBright: init(106, 49), + bgWhiteBright: init(107, 49), +}; + +const createColors = ({ useColor = isColorSupported } = {}) => + useColor + ? colors + : Object.keys(colors).reduce( + (colors, key) => ({ ...colors, [key]: String }), + {} + ); + +const { + reset, + bold, + dim, + italic, + underline, + inverse, + hidden, + strikethrough, + black, + red, + green, + yellow, + blue, + magenta, + cyan, + white, + gray, + bgBlack, + bgRed, + bgGreen, + bgYellow, + bgBlue, + bgMagenta, + bgCyan, + bgWhite, + blackBright, + redBright, + greenBright, + yellowBright, + blueBright, + magentaBright, + cyanBright, + whiteBright, + bgBlackBright, + bgRedBright, + bgGreenBright, + bgYellowBright, + bgBlueBright, + bgMagentaBright, + bgCyanBright, + bgWhiteBright, +} = createColors(); + +exports.bgBlack = bgBlack; +exports.bgBlackBright = bgBlackBright; +exports.bgBlue = bgBlue; +exports.bgBlueBright = bgBlueBright; +exports.bgCyan = bgCyan; +exports.bgCyanBright = bgCyanBright; +exports.bgGreen = bgGreen; +exports.bgGreenBright = bgGreenBright; +exports.bgMagenta = bgMagenta; +exports.bgMagentaBright = bgMagentaBright; +exports.bgRed = bgRed; +exports.bgRedBright = bgRedBright; +exports.bgWhite = bgWhite; +exports.bgWhiteBright = bgWhiteBright; +exports.bgYellow = bgYellow; +exports.bgYellowBright = bgYellowBright; +exports.black = black; +exports.blackBright = blackBright; +exports.blue = blue; +exports.blueBright = blueBright; +exports.bold = bold; +exports.createColors = createColors; +exports.cyan = cyan; +exports.cyanBright = cyanBright; +exports.dim = dim; +exports.gray = gray; +exports.green = green; +exports.greenBright = greenBright; +exports.hidden = hidden; +exports.inverse = inverse; +exports.isColorSupported = isColorSupported; +exports.italic = italic; +exports.magenta = magenta; +exports.magentaBright = magentaBright; +exports.red = red; +exports.redBright = redBright; +exports.reset = reset; +exports.strikethrough = strikethrough; +exports.underline = underline; +exports.white = white; +exports.whiteBright = whiteBright; +exports.yellow = yellow; +exports.yellowBright = yellowBright; diff --git a/node_modules/colorette/index.d.ts b/node_modules/colorette/index.d.ts new file mode 100644 index 0000000..ba32c9f --- /dev/null +++ b/node_modules/colorette/index.d.ts @@ -0,0 +1,93 @@ +declare module "colorette" { + type Color = (text: string | number) => string + + interface Colorette { + reset: Color + bold: Color + dim: Color + italic: Color + underline: Color + inverse: Color + hidden: Color + strikethrough: Color + black: Color + red: Color + green: Color + yellow: Color + blue: Color + magenta: Color + cyan: Color + white: Color + gray: Color + bgBlack: Color + bgRed: Color + bgGreen: Color + bgYellow: Color + bgBlue: Color + bgMagenta: Color + bgCyan: Color + bgWhite: Color + blackBright: Color + redBright: Color + greenBright: Color + yellowBright: Color + blueBright: Color + magentaBright: Color + cyanBright: Color + whiteBright: Color + bgBlackBright: Color + bgRedBright: Color + bgGreenBright: Color + bgYellowBright: Color + bgBlueBright: Color + bgMagentaBright: Color + bgCyanBright: Color + bgWhiteBright: Color + } + + const reset: Color + const bold: Color + const dim: Color + const italic: Color + const underline: Color + const inverse: Color + const hidden: Color + const strikethrough: Color + const black: Color + const red: Color + const green: Color + const yellow: Color + const blue: Color + const magenta: Color + const cyan: Color + const white: Color + const gray: Color + const bgBlack: Color + const bgRed: Color + const bgGreen: Color + const bgYellow: Color + const bgBlue: Color + const bgMagenta: Color + const bgCyan: Color + const bgWhite: Color + const blackBright: Color + const redBright: Color + const greenBright: Color + const yellowBright: Color + const blueBright: Color + const magentaBright: Color + const cyanBright: Color + const whiteBright: Color + const bgBlackBright: Color + const bgRedBright: Color + const bgGreenBright: Color + const bgYellowBright: Color + const bgBlueBright: Color + const bgMagentaBright: Color + const bgCyanBright: Color + const bgWhiteBright: Color + + const isColorSupported: boolean + + function createColors(options?: { useColor: boolean }): Colorette +} diff --git a/node_modules/colorette/index.js b/node_modules/colorette/index.js new file mode 100644 index 0000000..0d64e6b --- /dev/null +++ b/node_modules/colorette/index.js @@ -0,0 +1,150 @@ +import * as tty from "tty" + +const { + env = {}, + argv = [], + platform = "", +} = typeof process === "undefined" ? {} : process + +const isDisabled = "NO_COLOR" in env || argv.includes("--no-color") +const isForced = "FORCE_COLOR" in env || argv.includes("--color") +const isWindows = platform === "win32" +const isDumbTerminal = env.TERM === "dumb" + +const isCompatibleTerminal = + tty && tty.isatty && tty.isatty(1) && env.TERM && !isDumbTerminal + +const isCI = + "CI" in env && + ("GITHUB_ACTIONS" in env || "GITLAB_CI" in env || "CIRCLECI" in env) + +export const isColorSupported = + !isDisabled && + (isForced || (isWindows && !isDumbTerminal) || isCompatibleTerminal || isCI) + +const replaceClose = ( + index, + string, + close, + replace, + head = string.substring(0, index) + replace, + tail = string.substring(index + close.length), + next = tail.indexOf(close) +) => head + (next < 0 ? tail : replaceClose(next, tail, close, replace)) + +const clearBleed = (index, string, open, close, replace) => + index < 0 + ? open + string + close + : open + replaceClose(index, string, close, replace) + close + +const filterEmpty = + (open, close, replace = open, at = open.length + 1) => + (string) => + string || !(string === "" || string === undefined) + ? clearBleed( + ("" + string).indexOf(close, at), + string, + open, + close, + replace + ) + : "" + +const init = (open, close, replace) => + filterEmpty(`\x1b[${open}m`, `\x1b[${close}m`, replace) + +const colors = { + reset: init(0, 0), + bold: init(1, 22, "\x1b[22m\x1b[1m"), + dim: init(2, 22, "\x1b[22m\x1b[2m"), + italic: init(3, 23), + underline: init(4, 24), + inverse: init(7, 27), + hidden: init(8, 28), + strikethrough: init(9, 29), + black: init(30, 39), + red: init(31, 39), + green: init(32, 39), + yellow: init(33, 39), + blue: init(34, 39), + magenta: init(35, 39), + cyan: init(36, 39), + white: init(37, 39), + gray: init(90, 39), + bgBlack: init(40, 49), + bgRed: init(41, 49), + bgGreen: init(42, 49), + bgYellow: init(43, 49), + bgBlue: init(44, 49), + bgMagenta: init(45, 49), + bgCyan: init(46, 49), + bgWhite: init(47, 49), + blackBright: init(90, 39), + redBright: init(91, 39), + greenBright: init(92, 39), + yellowBright: init(93, 39), + blueBright: init(94, 39), + magentaBright: init(95, 39), + cyanBright: init(96, 39), + whiteBright: init(97, 39), + bgBlackBright: init(100, 49), + bgRedBright: init(101, 49), + bgGreenBright: init(102, 49), + bgYellowBright: init(103, 49), + bgBlueBright: init(104, 49), + bgMagentaBright: init(105, 49), + bgCyanBright: init(106, 49), + bgWhiteBright: init(107, 49), +} + +export const createColors = ({ useColor = isColorSupported } = {}) => + useColor + ? colors + : Object.keys(colors).reduce( + (colors, key) => ({ ...colors, [key]: String }), + {} + ) + +export const { + reset, + bold, + dim, + italic, + underline, + inverse, + hidden, + strikethrough, + black, + red, + green, + yellow, + blue, + magenta, + cyan, + white, + gray, + bgBlack, + bgRed, + bgGreen, + bgYellow, + bgBlue, + bgMagenta, + bgCyan, + bgWhite, + blackBright, + redBright, + greenBright, + yellowBright, + blueBright, + magentaBright, + cyanBright, + whiteBright, + bgBlackBright, + bgRedBright, + bgGreenBright, + bgYellowBright, + bgBlueBright, + bgMagentaBright, + bgCyanBright, + bgWhiteBright, +} = createColors() diff --git a/node_modules/colorette/package.json b/node_modules/colorette/package.json new file mode 100644 index 0000000..d827eee --- /dev/null +++ b/node_modules/colorette/package.json @@ -0,0 +1,40 @@ +{ + "name": "colorette", + "version": "2.0.20", + "type": "module", + "main": "index.cjs", + "module": "index.js", + "types": "index.d.ts", + "description": "🌈Easily set your terminal text color & styles.", + "repository": "jorgebucaran/colorette", + "license": "MIT", + "exports": { + "./package.json": "./package.json", + ".": { + "require": "./index.cjs", + "import": "./index.js", + "types": "./index.d.ts" + } + }, + "files": [ + "*.*(c)[tj]s*" + ], + "author": "Jorge Bucaran", + "keywords": [ + "terminal", + "styles", + "color", + "ansi" + ], + "scripts": { + "test": "c8 twist tests/*.js", + "build": "npx rollup --format cjs --input index.js --file index.cjs", + "deploy": "npm test && git commit --all --message $tag && git tag --sign $tag --message $tag && git push && git push --tags", + "release": "tag=$npm_package_version npm run deploy && npm publish --access public", + "prepare": "npm run build" + }, + "devDependencies": { + "c8": "*", + "twist": "*" + } +} diff --git a/node_modules/dateformat/LICENSE b/node_modules/dateformat/LICENSE new file mode 100644 index 0000000..57d44e2 --- /dev/null +++ b/node_modules/dateformat/LICENSE @@ -0,0 +1,20 @@ +(c) 2007-2009 Steven Levithan + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/dateformat/Readme.md b/node_modules/dateformat/Readme.md new file mode 100644 index 0000000..9431b90 --- /dev/null +++ b/node_modules/dateformat/Readme.md @@ -0,0 +1,204 @@ +# dateformat + +A node.js package for Steven Levithan's excellent [dateFormat()][dateformat] function. + +[![Build Status](https://travis-ci.org/felixge/node-dateformat.svg)](https://travis-ci.org/felixge/node-dateformat) + +## Modifications + +- Removed the `Date.prototype.format` method. Sorry folks, but extending native prototypes is for suckers. +- Added a `module.exports = dateFormat;` statement at the bottom +- Added the placeholder `N` to get the ISO 8601 numeric representation of the day of the week + +## Installation + +```bash +$ npm install dateformat +$ dateformat --help +``` + +## Usage + +As taken from Steven's post, modified to match the Modifications listed above: + +```js +var dateFormat = require("dateformat"); +var now = new Date(); + +// Basic usage +dateFormat(now, "dddd, mmmm dS, yyyy, h:MM:ss TT"); +// Saturday, June 9th, 2007, 5:46:21 PM + +// You can use one of several named masks +dateFormat(now, "isoDateTime"); +// 2007-06-09T17:46:21 + +// ...Or add your own +dateFormat.masks.hammerTime = 'HH:MM! "Can\'t touch this!"'; +dateFormat(now, "hammerTime"); +// 17:46! Can't touch this! + +// You can also provide the date as a string +dateFormat("Jun 9 2007", "fullDate"); +// Saturday, June 9, 2007 + +// Note that if you don't include the mask argument, +// dateFormat.masks.default is used +dateFormat(now); +// Sat Jun 09 2007 17:46:21 + +// And if you don't include the date argument, +// the current date and time is used +dateFormat(); +// Sat Jun 09 2007 17:46:22 + +// You can also skip the date argument (as long as your mask doesn't +// contain any numbers), in which case the current date/time is used +dateFormat("longTime"); +// 5:46:22 PM EST + +// And finally, you can convert local time to UTC time. Simply pass in +// true as an additional argument (no argument skipping allowed in this case): +dateFormat(now, "longTime", true); +// 10:46:21 PM UTC + +// ...Or add the prefix "UTC:" or "GMT:" to your mask. +dateFormat(now, "UTC:h:MM:ss TT Z"); +// 10:46:21 PM UTC + +// You can also get the ISO 8601 week of the year: +dateFormat(now, "W"); +// 42 + +// and also get the ISO 8601 numeric representation of the day of the week: +dateFormat(now, "N"); +// 6 +``` + +### Mask options + +| Mask | Description | +| ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `d` | Day of the month as digits; no leading zero for single-digit days. | +| `dd` | Day of the month as digits; leading zero for single-digit days. | +| `ddd` | Day of the week as a three-letter abbreviation. | +| `DDD` | "Ysd", "Tdy" or "Tmw" if date lies within these three days. Else fall back to ddd. | +| `dddd` | Day of the week as its full name. | +| `DDDD` | "Yesterday", "Today" or "Tomorrow" if date lies within these three days. Else fall back to dddd. | +| `m` | Month as digits; no leading zero for single-digit months. | +| `mm` | Month as digits; leading zero for single-digit months. | +| `mmm` | Month as a three-letter abbreviation. | +| `mmmm` | Month as its full name. | +| `yy` | Year as last two digits; leading zero for years less than 10. | +| `yyyy` | Year represented by four digits. | +| `h` | Hours; no leading zero for single-digit hours (12-hour clock). | +| `hh` | Hours; leading zero for single-digit hours (12-hour clock). | +| `H` | Hours; no leading zero for single-digit hours (24-hour clock). | +| `HH` | Hours; leading zero for single-digit hours (24-hour clock). | +| `M` | Minutes; no leading zero for single-digit minutes. | +| `MM` | Minutes; leading zero for single-digit minutes. | +| `N` | ISO 8601 numeric representation of the day of the week. | +| `o` | GMT/UTC timezone offset, e.g. -0500 or +0230. | +| `p` | GMT/UTC timezone offset, e.g. -05:00 or +02:30. | +| `s` | Seconds; no leading zero for single-digit seconds. | +| `ss` | Seconds; leading zero for single-digit seconds. | +| `S` | The date's ordinal suffix (st, nd, rd, or th). Works well with `d`. | +| `l` | Milliseconds; gives 3 digits. | +| `L` | Milliseconds; gives 2 digits. | +| `t` | Lowercase, single-character time marker string: a or p. | +| `tt` | Lowercase, two-character time marker string: am or pm. | +| `T` | Uppercase, single-character time marker string: A or P. | +| `TT` | Uppercase, two-character time marker string: AM or PM. | +| `W` | ISO 8601 week number of the year, e.g. 4, 42 | +| `WW` | ISO 8601 week number of the year, leading zero for single-digit, e.g. 04, 42 | +| `Z` | US timezone abbreviation, e.g. EST or MDT. For non-US timezones, the GMT/UTC offset is returned, e.g. GMT-0500 | +| `'...'`, `"..."` | Literal character sequence. Surrounding quotes are removed. | +| `UTC:` | Must be the first four characters of the mask. Converts the date from local time to UTC/GMT/Zulu time before applying the mask. The "UTC:" prefix is removed. | + +### Named Formats + +| Name | Mask | Example | +| ----------------- | ------------------------------ | ------------------------ | +| `default` | `ddd mmm dd yyyy HH:MM:ss` | Sat Jun 09 2007 17:46:21 | +| `shortDate` | `m/d/yy` | 6/9/07 | +| `paddedShortDate` | `mm/dd/yyyy` | 06/09/2007 | +| `mediumDate` | `mmm d, yyyy` | Jun 9, 2007 | +| `longDate` | `mmmm d, yyyy` | June 9, 2007 | +| `fullDate` | `dddd, mmmm d, yyyy` | Saturday, June 9, 2007 | +| `shortTime` | `h:MM TT` | 5:46 PM | +| `mediumTime` | `h:MM:ss TT` | 5:46:21 PM | +| `longTime` | `h:MM:ss TT Z` | 5:46:21 PM EST | +| `isoDate` | `yyyy-mm-dd` | 2007-06-09 | +| `isoTime` | `HH:MM:ss` | 17:46:21 | +| `isoDateTime` | `yyyy-mm-dd'T'HH:MM:sso` | 2007-06-09T17:46:21+0700 | +| `isoUtcDateTime` | `UTC:yyyy-mm-dd'T'HH:MM:ss'Z'` | 2007-06-09T22:46:21Z | + +### Localization + +Day names, month names and the AM/PM indicators can be localized by +passing an object with the necessary strings. For example: + +```js +var dateFormat = require("dateformat"); +dateFormat.i18n = { + dayNames: [ + "Sun", + "Mon", + "Tue", + "Wed", + "Thu", + "Fri", + "Sat", + "Sunday", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday", + ], + monthNames: [ + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ], + timeNames: ["a", "p", "am", "pm", "A", "P", "AM", "PM"], +}; +``` + +> Notice that only one language is supported at a time and all strings +> _must_ be present in the new value. + +### Breaking change in 2.1.0 + +- 2.1.0 was published with a breaking change, for those using localized strings. +- 2.2.0 has been published without the change, to keep packages refering to ^2.0.0 to continue working. This is now branch v2_2. +- 3.0.\* contains the localized AM/PM change. + +## License + +(c) 2007-2009 Steven Levithan [stevenlevithan.com][stevenlevithan], MIT license. + +[dateformat]: http://blog.stevenlevithan.com/archives/date-time-format +[stevenlevithan]: http://stevenlevithan.com/ diff --git a/node_modules/dateformat/lib/dateformat.js b/node_modules/dateformat/lib/dateformat.js new file mode 100644 index 0000000..d2145ab --- /dev/null +++ b/node_modules/dateformat/lib/dateformat.js @@ -0,0 +1 @@ +"use strict";function _typeof(obj){"@babel/helpers - typeof";if(typeof Symbol==="function"&&typeof Symbol.iterator==="symbol"){_typeof=function _typeof(obj){return typeof obj}}else{_typeof=function _typeof(obj){return obj&&typeof Symbol==="function"&&obj.constructor===Symbol&&obj!==Symbol.prototype?"symbol":typeof obj}}return _typeof(obj)}(function(global){var _arguments=arguments;var dateFormat=function(){var token=/d{1,4}|D{3,4}|m{1,4}|yy(?:yy)?|([HhMsTt])\1?|W{1,2}|[LlopSZN]|"[^"]*"|'[^']*'/g;var timezone=/\b(?:[PMCEA][SDP]T|(?:Pacific|Mountain|Central|Eastern|Atlantic) (?:Standard|Daylight|Prevailing) Time|(?:GMT|UTC)(?:[-+]\d{4})?)\b/g;var timezoneClip=/[^-+\dA-Z]/g;return function(date,mask,utc,gmt){if(_arguments.length===1&&kindOf(date)==="string"&&!/\d/.test(date)){mask=date;date=undefined}date=date||date===0?date:new Date;if(!(date instanceof Date)){date=new Date(date)}if(isNaN(date)){throw TypeError("Invalid date")}mask=String(dateFormat.masks[mask]||mask||dateFormat.masks["default"]);var maskSlice=mask.slice(0,4);if(maskSlice==="UTC:"||maskSlice==="GMT:"){mask=mask.slice(4);utc=true;if(maskSlice==="GMT:"){gmt=true}}var _=function _(){return utc?"getUTC":"get"};var _d=function d(){return date[_()+"Date"]()};var D=function D(){return date[_()+"Day"]()};var _m=function m(){return date[_()+"Month"]()};var y=function y(){return date[_()+"FullYear"]()};var _H=function H(){return date[_()+"Hours"]()};var _M=function M(){return date[_()+"Minutes"]()};var _s=function s(){return date[_()+"Seconds"]()};var _L=function L(){return date[_()+"Milliseconds"]()};var _o=function o(){return utc?0:date.getTimezoneOffset()};var _W=function W(){return getWeek(date)};var _N=function N(){return getDayOfWeek(date)};var flags={d:function d(){return _d()},dd:function dd(){return pad(_d())},ddd:function ddd(){return dateFormat.i18n.dayNames[D()]},DDD:function DDD(){return getDayName({y:y(),m:_m(),d:_d(),_:_(),dayName:dateFormat.i18n.dayNames[D()],short:true})},dddd:function dddd(){return dateFormat.i18n.dayNames[D()+7]},DDDD:function DDDD(){return getDayName({y:y(),m:_m(),d:_d(),_:_(),dayName:dateFormat.i18n.dayNames[D()+7]})},m:function m(){return _m()+1},mm:function mm(){return pad(_m()+1)},mmm:function mmm(){return dateFormat.i18n.monthNames[_m()]},mmmm:function mmmm(){return dateFormat.i18n.monthNames[_m()+12]},yy:function yy(){return String(y()).slice(2)},yyyy:function yyyy(){return pad(y(),4)},h:function h(){return _H()%12||12},hh:function hh(){return pad(_H()%12||12)},H:function H(){return _H()},HH:function HH(){return pad(_H())},M:function M(){return _M()},MM:function MM(){return pad(_M())},s:function s(){return _s()},ss:function ss(){return pad(_s())},l:function l(){return pad(_L(),3)},L:function L(){return pad(Math.floor(_L()/10))},t:function t(){return _H()<12?dateFormat.i18n.timeNames[0]:dateFormat.i18n.timeNames[1]},tt:function tt(){return _H()<12?dateFormat.i18n.timeNames[2]:dateFormat.i18n.timeNames[3]},T:function T(){return _H()<12?dateFormat.i18n.timeNames[4]:dateFormat.i18n.timeNames[5]},TT:function TT(){return _H()<12?dateFormat.i18n.timeNames[6]:dateFormat.i18n.timeNames[7]},Z:function Z(){return gmt?"GMT":utc?"UTC":(String(date).match(timezone)||[""]).pop().replace(timezoneClip,"").replace(/GMT\+0000/g,"UTC")},o:function o(){return(_o()>0?"-":"+")+pad(Math.floor(Math.abs(_o())/60)*100+Math.abs(_o())%60,4)},p:function p(){return(_o()>0?"-":"+")+pad(Math.floor(Math.abs(_o())/60),2)+":"+pad(Math.floor(Math.abs(_o())%60),2)},S:function S(){return["th","st","nd","rd"][_d()%10>3?0:(_d()%100-_d()%10!=10)*_d()%10]},W:function W(){return _W()},WW:function WW(){return pad(_W())},N:function N(){return _N()}};return mask.replace(token,function(match){if(match in flags){return flags[match]()}return match.slice(1,match.length-1)})}}();dateFormat.masks={default:"ddd mmm dd yyyy HH:MM:ss",shortDate:"m/d/yy",paddedShortDate:"mm/dd/yyyy",mediumDate:"mmm d, yyyy",longDate:"mmmm d, yyyy",fullDate:"dddd, mmmm d, yyyy",shortTime:"h:MM TT",mediumTime:"h:MM:ss TT",longTime:"h:MM:ss TT Z",isoDate:"yyyy-mm-dd",isoTime:"HH:MM:ss",isoDateTime:"yyyy-mm-dd'T'HH:MM:sso",isoUtcDateTime:"UTC:yyyy-mm-dd'T'HH:MM:ss'Z'",expiresHeaderFormat:"ddd, dd mmm yyyy HH:MM:ss Z"};dateFormat.i18n={dayNames:["Sun","Mon","Tue","Wed","Thu","Fri","Sat","Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],monthNames:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec","January","February","March","April","May","June","July","August","September","October","November","December"],timeNames:["a","p","am","pm","A","P","AM","PM"]};var pad=function pad(val,len){val=String(val);len=len||2;while(val.length" + ], + "homepage": "https://github.com/felixge/node-dateformat", + "author": "Steven Levithan", + "contributors": [ + "Steven Levithan", + "Felix Geisendörfer ", + "Christoph Tavan ", + "Jon Schlinkert (https://github.com/jonschlinkert)" + ], + "version": "4.6.3", + "license": "MIT", + "main": "lib/dateformat", + "devDependencies": { + "@babel/cli": "^7.12.10", + "@babel/core": "^7.12.10", + "@babel/preset-env": "^7.12.11", + "mocha": "^8.2.1", + "uglify-js": "^3.12.5" + }, + "engines": { + "node": "*" + }, + "scripts": { + "build": "./node_modules/.bin/babel src --out-dir lib && uglifyjs lib/dateformat.js -o lib/dateformat.js", + "test": "npm run build && mocha", + "benchmark": "npm run build && node ./benchmark/benchmark.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/felixge/node-dateformat.git" + }, + "dependencies": {} +} diff --git a/node_modules/end-of-stream/LICENSE b/node_modules/end-of-stream/LICENSE new file mode 100644 index 0000000..757562e --- /dev/null +++ b/node_modules/end-of-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/end-of-stream/README.md b/node_modules/end-of-stream/README.md new file mode 100644 index 0000000..857b14b --- /dev/null +++ b/node_modules/end-of-stream/README.md @@ -0,0 +1,54 @@ +# end-of-stream + +A node module that calls a callback when a readable/writable/duplex stream has completed or failed. + + npm install end-of-stream + +[![Build status](https://travis-ci.org/mafintosh/end-of-stream.svg?branch=master)](https://travis-ci.org/mafintosh/end-of-stream) + +## Usage + +Simply pass a stream and a callback to the `eos`. +Both legacy streams, streams2 and stream3 are supported. + +``` js +var eos = require('end-of-stream'); + +eos(readableStream, function(err) { + // this will be set to the stream instance + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended', this === readableStream); +}); + +eos(writableStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished', this === writableStream); +}); + +eos(duplexStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended and finished', this === duplexStream); +}); + +eos(duplexStream, {readable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished but might still be readable'); +}); + +eos(duplexStream, {writable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended but might still be writable'); +}); + +eos(readableStream, {error:false}, function(err) { + // do not treat emit('error', err) as a end-of-stream +}); +``` + +## License + +MIT + +## Related + +`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/end-of-stream/index.js b/node_modules/end-of-stream/index.js new file mode 100644 index 0000000..7ce47e9 --- /dev/null +++ b/node_modules/end-of-stream/index.js @@ -0,0 +1,96 @@ +var once = require('once'); + +var noop = function() {}; + +var qnt = global.Bare ? queueMicrotask : process.nextTick.bind(process); + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + qnt(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; diff --git a/node_modules/end-of-stream/package.json b/node_modules/end-of-stream/package.json new file mode 100644 index 0000000..0b530cd --- /dev/null +++ b/node_modules/end-of-stream/package.json @@ -0,0 +1,37 @@ +{ + "name": "end-of-stream", + "version": "1.4.5", + "description": "Call a callback when a readable/writable/duplex stream has completed or failed.", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/end-of-stream.git" + }, + "dependencies": { + "once": "^1.4.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "stream", + "streams", + "callback", + "finish", + "close", + "end", + "wait" + ], + "bugs": { + "url": "https://github.com/mafintosh/end-of-stream/issues" + }, + "homepage": "https://github.com/mafintosh/end-of-stream", + "main": "index.js", + "author": "Mathias Buus ", + "license": "MIT", + "devDependencies": { + "tape": "^4.11.0" + } +} diff --git a/node_modules/fast-copy/CHANGELOG.md b/node_modules/fast-copy/CHANGELOG.md new file mode 100644 index 0000000..6c092d7 --- /dev/null +++ b/node_modules/fast-copy/CHANGELOG.md @@ -0,0 +1,171 @@ +# fast-copy CHANGELOG + +## 4.0.2 + +- [#112](https://github.com/planttheidea/fast-copy/pull/112) - Prevent generators from attempting to be copied (fixes + [#111](https://github.com/planttheidea/fast-copy/issues/111)) + +## 4.0.1 + +- [#110](https://github.com/planttheidea/fast-copy/pull/110) - Fix legacy types not aligning with types from build + package + +## 4.0.0 + +### BREAKING CHANGES + +- The default `copy` method is now a named export, and the default export has been removed. +- Legacy environment support has been removed; `Symbol`, `WeakMap`, and `RegExp.prototype.flags` are now expected to be + present. +- `createCopier` now receives an object of options. The methods passed previously are namespaced under the `methods` key + in that options object. +- `createStrictCopier` has been removed; please use the `strict` option passed to `createCopier` + +## 3.0.2 + +- [#95](https://github.com/planttheidea/fast-copy/pull/95) - Add support for objects that have a prototype with no + constructor + +## 3.0.1 + +- [#78](https://github.com/planttheidea/fast-copy/pull/78) - Work when running Node process with `--disable-proto=throw` + (thanks [@castarco](https://github.com/castarco)) + +## 3.0.0 + +**Breaking changes** + +- Exports are now always named, so the `.default` suffix is required when accessing + - CommonJS in Node => `const copy = require('fast-copy').default;` + - UMD global via CDN => `const copy = globalThis['fast-copy'].default;` +- `copy.strict` is no longer available; it is now available as the explicit `copyStrict` named import +- Options have been removed + - `isStrict` option has been replaced with importing the separate `copyStrict` method + - `realm` has been removed entirely, as `instanceof` is no longer used internally +- The `FastCopy` namespace in typings has been removed in favor of explicit import of available types + +**Enhancements** + +- Support `exports` option, to have bettern handling for different environments (ESM vs CJS vs UMD) and improve + tree-shaking when supported +- Can now create a custom copier (either standard or strict), allowing maximum performance for specific use-cases +- Small speed improvements when handling certain object types + +**Bug fixes** + +- Correctly handle primitive wrappers, e.g. `new String('foo')` + +## 2.1.7 + +- Republish of [`2.1.6`](#216), as the release process failed mid-publish + +## 2.1.6 + +- Revert [#69](https://github.com/planttheidea/fast-copy/pull/69) and + [#71](https://github.com/planttheidea/fast-copy/pull/71), as they broke the package for NodeJS consumption (will be + reintroduced in v3, as breaking changes are required) + +## 2.1.5 - DO NOT USE + +- Ensure `"type": "module"` is set to allow ESM in NodeJS to work + [#71](https://github.com/planttheidea/fast-copy/pull/71) + +## 2.1.4 - DO NOT USE + +- Provide `"exports"` definition in `package.json` [#69](https://github.com/planttheidea/fast-copy/pull/69) (thanks + [@liteoood](https://github.com/ilteoood)) + +## 2.1.3 + +- Fix source maps not referencing source code [#65](https://github.com/planttheidea/fast-copy/pull/65) + +## 2.1.2 + +- Support `constructor` property override on object [#60](https://github.com/planttheidea/fast-copy/pull/60) +- Provide better support for `constructor` override on non-plain object types + [#61](https://github.com/planttheidea/fast-copy/pull/61) +- Remove `tslint` in favor of `@typescript-eslint` [#62](https://github.com/planttheidea/fast-copy/pull/62) + +## 2.1.1 + +- Fix ESM-to-CommonJS issue when using TSC to consume [#37](https://github.com/planttheidea/fast-copy/issues/37) +- Modify `Blob` cloning to use `blob.slice()` instead of `new Blob()` for speed + +## 2.1.0 + +- Support cloning `Blob` [#31](https://github.com/planttheidea/fast-copy/pull/31) (thanks + [@fratzigner](https://github.com/fratzinger)) +- Fix cloning descriptors that only are getters / setters in strict mode +- Handle errors when defining properties in strict mode + +## 2.0.5 + +- Fix issue copying objects referenced multiple times in source [#28](https://github.com/planttheidea/fast-copy/pull/28) + (thanks [@darkowic](https://github.com/darkowic)) + +## 2.0.4 + +- Cache length of arrays for faster iteration [#22](https://github.com/planttheidea/fast-copy/pull/22) +- Update dev dependencies and types + +## 2.0.3 + +- Add safety to constructing native objects (fixes #19) + +## 2.0.2 + +- Manually coalesce options instead of use destructuring (performance) + +## 2.0.1 + +- Fix typings declarations - [#17](https://github.com/planttheidea/fast-copy/pull/17) + +## 2.0.0 + +- Rewrite in TypeScript +- Add strict mode (for more accurate and thorough copying, at the expense of less performance) + +#### BREAKING CHANGES + +- Second parameter is now an object of [options](README.md#options) + +## 1.2.4 + +- Ensure `Date` copy uses realm-specific constructor + +## 1.2.3 + +- Support custom prototype applied to plain object via `Object.create()` + +## 1.2.2 + +- Support copy of extensions of native `Array` with alternative `push()` method + +## 1.2.1 + +- Under-the-hood optimizations per recommendations from #7 + +## 1.2.0 + +- Add support for multiple realms + +## 1.1.2 + +- Optimize order of operations for common use cases + +## 1.1.1 + +- Fix cache using `WeakSet` when there was support for `WeakMap`s instead of `WeakSet`s (in case one was polyfilled but + not the other) + +## 1.1.0 + +- Add TypeScript and FlowType bindings + +## 1.0.1 + +- Activate tree-shaking + +## 1.0.0 + +- Initial release diff --git a/node_modules/fast-copy/LICENSE b/node_modules/fast-copy/LICENSE new file mode 100644 index 0000000..a90404f --- /dev/null +++ b/node_modules/fast-copy/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Tony Quetano + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/fast-copy/README.md b/node_modules/fast-copy/README.md new file mode 100644 index 0000000..5094381 --- /dev/null +++ b/node_modules/fast-copy/README.md @@ -0,0 +1,437 @@ +# fast-copy + + + + + +A [blazing fast](#benchmarks) deep object copier + +## Table of contents + +- [fast-copy](#fast-copy) + - [Table of contents](#table-of-contents) + - [Usage](#usage) + - [API](#api) + - [`copy`](#copy) + - [`copyStrict`](#copystrict) + - [`createCopier`](#createcopier) + - [`createCache`](#createcache) + - [`methods`](#methods) + - [Copier state](#copier-state) + - [`cache`](#cache) + - [`copier`](#copier) + - [`Constructor` / `prototype`](#constructor--prototype) + - [`strict`](#strict) + - [Types supported](#types-supported) + - [Aspects of default copiers](#aspects-of-default-copiers) + - [Error references are copied directly, instead of creating a new `*Error` object](#error-references-are-copied-directly-instead-of-creating-a-new-error-object) + - [The constructor of the original object is used, instead of using known globals](#the-constructor-of-the-original-object-is-used-instead-of-using-known-globals) + - [Generator objects are copied, but still reference the original generator's state](#generator-objects-are-copied-but-still-reference-the-original-generators-state) + - [Benchmarks](#benchmarks) + - [Simple objects](#simple-objects) + - [Complex objects](#complex-objects) + - [Big data](#big-data) + - [Circular objects](#circular-objects) + - [Special objects](#special-objects) + +## Usage + +```js +import { copy } from 'fast-copy'; +import { deepEqual } from 'fast-equals'; + +const object = { + array: [123, { deep: 'value' }], + map: new Map([ + ['foo', {}], + [{ bar: 'baz' }, 'quz'], + ]), +}; + +const copiedObject = copy(object); + +console.log(copiedObject === object); // false +console.log(deepEqual(copiedObject, object)); // true +``` + +## API + +### `copy` + +Deeply copy the object passed. + +```js +import { copy } from 'fast-copy'; + +const copied = copy({ foo: 'bar' }); +``` + +### `copyStrict` + +Deeply copy the object passed, but with additional strictness when replicating the original object: + +- Properties retain their original property descriptor +- Non-enumerable keys are copied +- Non-standard properties (e.g., keys on arrays / maps / sets) are copied + +```js +import { copyStrict } from 'fast-copy'; + +const object = { foo: 'bar' }; +object.nonEnumerable = Object.defineProperty(object, 'bar', { + enumerable: false, + value: 'baz', +}); + +const copied = copy(object); +``` + +**NOTE**: This method is significantly slower than [`copy`](#copy), so it is recommended to only use this when you have +specific use-cases that require it. + +### `createCopier` + +Create a custom copier based on the type-specific method overrides passed, as well as configuration options for how +copies should be performed. This is useful if you want to squeeze out maximum performance, or perform something other +than a standard deep copy. + +```js +import { createCopier } from 'fast-copy'; +import { LRUCache } from 'lru-cache'; + +const copyShallowStrict = createCopier({ + createCache: () => new LRUCache(), + methods: { + array: (array) => [...array], + map: (map) => new Map(map.entries()), + object: (object) => ({ ...object }), + set: (set) => new Set(set.values()), + }, + strict: true, +}); +``` + +#### `createCache` + +Method that creates the internal [`cache`](#cache) in the [Copier state](#copier-state). Defaults to creating a new +`WeakMap` instance. + +#### `methods` + +Methods used for copying specific object types. A list of the methods and which object types they handle: + +- `array` => `Array` +- `arrayBuffer`=> `ArrayBuffer`, `Float32Array`, `Float64Array`, `Int8Array`, `Int16Array`, `Int32Array`, `Uint8Array`, + `Uint8ClampedArray`, `Uint16Array`, `Uint32Array`, `Uint64Array` +- `blob` => `Blob` +- `dataView` => `DataView` +- `date` => `Date` +- `error` => `Error`, `AggregateError`, `EvalError`, `RangeError`, `ReferenceError`, `SyntaxError`, `TypeError`, + `URIError` +- `map` => `Map` +- `object` => `Object`, or any custom constructor +- `regExp` => `RegExp` +- `set` => `Set` + +Each method has the following contract: + +```js +type InternalCopier = (value: Value, state: State) => Value; + +interface State { + Constructor: any; + cache: WeakMap; + copier: InternalCopier; + prototype: any; +} +``` + +##### Copier state + +###### `cache` + +If you want to maintain circular reference handling, then you'll need the methods to handle cache population for future +lookups: + +```js +function shallowlyCloneArray( + value: Value, + state: State +): Value { + const clone = [...value]; + + state.cache.set(value, clone); + + return clone; +} +``` + +###### `copier` + +`copier` is provided for recursive calls with deeply-nested objects. + +```js +function deeplyCloneArray( + value: Value, + state: State +): Value { + const clone = []; + + state.cache.set(value, clone); + + value.forEach((item) => state.copier(item, state)); + + return clone; +} +``` + +Note above I am using `forEach` instead of a simple `map`. This is because it is highly recommended to store the clone +in [`cache`](#cache) eagerly when deeply copying, so that nested circular references are handled correctly. + +###### `Constructor` / `prototype` + +Both `Constructor` and `prototype` properties are only populated with complex objects that are not standard objects or +arrays. This is mainly useful for custom subclasses of these globals, or maintaining custom prototypes of objects. + +```js +function deeplyCloneSubclassArray( + value: Value, + state: State +): Value { + const clone = new state.Constructor(); + + state.cache.set(value, clone); + + value.forEach((item) => clone.push(item)); + + return clone; +} + +function deeplyCloneCustomObject( + value: Value, + state: State +): Value { + const clone = Object.create(state.prototype); + + state.cache.set(value, clone); + + Object.entries(value).forEach(([k, v]) => (clone[k] = v)); + + return clone; +} +``` + +#### `strict` + +Enforces strict copying of properties, which includes properties that are not standard for that object. An example would +be a named key on an array. + +**NOTE**: This creates a copier that is significantly slower than "loose" mode, so it is recommended to only use this +when you have specific use-cases that require it. + +## Types supported + +The following object types are deeply cloned when they are either properties on the object passed, or the object itself: + +- `Array` +- `ArrayBuffer` +- `Boolean` primitive wrappers (e.g., `new Boolean(true)`) +- `Blob` +- `Buffer` +- `DataView` +- `Date` +- `Float32Array` +- `Float64Array` +- `Int8Array` +- `Int16Array` +- `Int32Array` +- `Map` +- `Number` primitive wrappers (e.g., `new Number(123)`) +- `Object` +- `RegExp` +- `Set` +- `String` primitive wrappers (e.g., `new String('foo')`) +- `Uint8Array` +- `Uint8ClampedArray` +- `Uint16Array` +- `Uint32Array` +- `React` components +- Custom constructors + +The following object types are copied directly, as they are either primitives, cannot be cloned, or the common use-case +implementation does not expect cloning: + +- `AsyncFunction` +- `AsyncGenerator` +- `Boolean` primitives +- `Error` +- `Function` +- `Generator` +- `GeneratorFunction` +- `Number` primitives +- `Null` +- `Promise` +- `String` primitives +- `Symbol` +- `Undefined` +- `WeakMap` +- `WeakSet` + +Circular objects are supported out of the box. By default, a cache based on `WeakSet` is used, but if `WeakSet` is not +available then a fallback is used. The benchmarks quoted below are based on use of `WeakSet`. + +## Aspects of default copiers + +Inherently, what is considered a valid copy is subjective because of different requirements and use-cases. For this +library, some decisions were explicitly made for the default copiers of specific object types, and those decisions are +detailed below. If your use-cases require different handling, you can always create your own custom copier with +[`createCopier`](#createcopier). + +### Error references are copied directly, instead of creating a new `*Error` object + +While it would be relatively trivial to copy over the message and stack to a new object of the same `Error` subclass, it +is a common practice to "override" the message or stack, and copies would not retain this mutation. As such, the +original reference is copied. + +### The constructor of the original object is used, instead of using known globals + +Starting in ES2015, native globals can be subclassed like any custom class. When copying, we explicitly reuse the +constructor of the original object. However, the expectation is that these subclasses would have the same constructur +signature as their native base class. This is a common community practice, but there is the possibility of inaccuracy if +the contract differs. + +### Generator objects are copied, but still reference the original generator's state + +[Generator objects](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Generator) are +specific types of iterators, but appear like standard objects that just have a few methods (`next`, `throw`, `return`). +These methods are bound to the internal state of the generator, which cannot be copied effectively. Normally this would +be treated like other "uncopiable" objects and simply pass the reference through, however the "validation" of whether it +is a generator object or a standard object is not guaranteed (duck-typing) and there is a runtime cost associated with. +Therefore, the simplest path of treating it like a standard object (copying methods to a new object) was taken. + +## Benchmarks + +#### Simple objects + +_Small number of properties, all values are primitives_ + +```bash +┌────────────────────┬────────────────┐ +│ Name │ Ops / sec │ +├────────────────────┼────────────────┤ +│ fast-copy │ 4606103.720559 │ +├────────────────────┼────────────────┤ +│ lodash.cloneDeep │ 2575175.39241 │ +├────────────────────┼────────────────┤ +│ clone │ 2172921.6353 │ +├────────────────────┼────────────────┤ +│ ramda │ 1919715.448951 │ +├────────────────────┼────────────────┤ +│ fast-clone │ 1576610.693318 │ +├────────────────────┼────────────────┤ +│ deepclone │ 1173500.05884 │ +├────────────────────┼────────────────┤ +│ fast-copy (strict) │ 1049310.47701 │ +└────────────────────┴────────────────┘ +Fastest was "fast-copy". +``` + +#### Complex objects + +_Large number of properties, values are a combination of primitives and complex objects_ + +```bash +┌────────────────────┬───────────────┐ +│ Name │ Ops / sec │ +├────────────────────┼───────────────┤ +│ fast-copy │ 235511.4532 │ +├────────────────────┼───────────────┤ +│ deepclone │ 142976.849406 │ +├────────────────────┼───────────────┤ +│ clone │ 125026.837887 │ +├────────────────────┼───────────────┤ +│ ramda │ 114216.98158 │ +├────────────────────┼───────────────┤ +│ fast-clone │ 111388.215547 │ +├────────────────────┼───────────────┤ +│ fast-copy (strict) │ 77683.900047 │ +├────────────────────┼───────────────┤ +│ lodash.cloneDeep │ 71343.431983 │ +└────────────────────┴───────────────┘ +Fastest was "fast-copy". +``` + +#### Big data + +_Very large number of properties with high amount of nesting, mainly objects and arrays_ + +```bash +Testing big data object... +┌────────────────────┬────────────┐ +│ Name │ Ops / sec │ +├────────────────────┼────────────┤ +│ fast-copy │ 325.548627 │ +├────────────────────┼────────────┤ +│ fast-clone │ 257.913886 │ +├────────────────────┼────────────┤ +│ deepclone │ 158.228042 │ +├────────────────────┼────────────┤ +│ lodash.cloneDeep │ 153.520966 │ +├────────────────────┼────────────┤ +│ fast-copy (strict) │ 126.027381 │ +├────────────────────┼────────────┤ +│ clone │ 123.383641 │ +├────────────────────┼────────────┤ +│ ramda │ 35.507959 │ +└────────────────────┴────────────┘ +Fastest was "fast-copy". +``` + +#### Circular objects + +```bash +Testing circular object... +┌────────────────────┬────────────────┐ +│ Name │ Ops / sec │ +├────────────────────┼────────────────┤ +│ fast-copy │ 1344790.296938 │ +├────────────────────┼────────────────┤ +│ deepclone │ 1127781.641192 │ +├────────────────────┼────────────────┤ +│ lodash.cloneDeep │ 894679.711048 │ +├────────────────────┼────────────────┤ +│ clone │ 892911.50594 │ +├────────────────────┼────────────────┤ +│ fast-copy (strict) │ 821339.44828 │ +├────────────────────┼────────────────┤ +│ ramda │ 615222.946985 │ +├────────────────────┼────────────────┤ +│ fast-clone │ 0 │ +└────────────────────┴────────────────┘ +Fastest was "fast-copy". +``` + +#### Special objects + +_Custom constructors, React components, etc_ + +```bash +┌────────────────────┬──────────────┐ +│ Name │ Ops / sec │ +├────────────────────┼──────────────┤ +│ fast-copy │ 86875.694416 │ +├────────────────────┼──────────────┤ +│ clone │ 73525.671381 │ +├────────────────────┼──────────────┤ +│ lodash.cloneDeep │ 63280.563976 │ +├────────────────────┼──────────────┤ +│ fast-clone │ 52991.064016 │ +├────────────────────┼──────────────┤ +│ ramda │ 31770.652317 │ +├────────────────────┼──────────────┤ +│ deepclone │ 24253.795114 │ +├────────────────────┼──────────────┤ +│ fast-copy (strict) │ 19112.538416 │ +└────────────────────┴──────────────┘ +Fastest was "fast-copy". +``` diff --git a/node_modules/fast-copy/dist/cjs/copier.d.cts b/node_modules/fast-copy/dist/cjs/copier.d.cts new file mode 100644 index 0000000..a11bd29 --- /dev/null +++ b/node_modules/fast-copy/dist/cjs/copier.d.cts @@ -0,0 +1,75 @@ +import type { Cache } from './utils.d.cts'; +export type InternalCopier = (value: Value, state: State) => Value; +export interface State { + Constructor: any; + cache: Cache; + copier: InternalCopier; + prototype: any; +} +/** + * Deeply copy the indexed values in the array. + */ +export declare function copyArrayLoose(array: any[], state: State): any; +/** + * Deeply copy the indexed values in the array, as well as any custom properties. + */ +export declare function copyArrayStrict(array: Value, state: State): Value; +/** + * Copy the contents of the ArrayBuffer. + */ +export declare function copyArrayBuffer(arrayBuffer: Value, _state: State): Value; +/** + * Create a new Blob with the contents of the original. + */ +export declare function copyBlob(blob: Value, _state: State): Value; +/** + * Create a new DataView with the contents of the original. + */ +export declare function copyDataView(dataView: Value, state: State): Value; +/** + * Create a new Date based on the time of the original. + */ +export declare function copyDate(date: Value, state: State): Value; +/** + * Deeply copy the keys and values of the original. + */ +export declare function copyMapLoose>(map: Value, state: State): Value; +/** + * Deeply copy the keys and values of the original, as well as any custom properties. + */ +export declare function copyMapStrict>(map: Value, state: State): Value; +/** + * Deeply copy the properties (keys and symbols) and values of the original. + */ +export declare function copyObjectLoose>(object: Value, state: State): Value; +/** + * Deeply copy the properties (keys and symbols) and values of the original, as well + * as any hidden or non-enumerable properties. + */ +export declare function copyObjectStrict>(object: Value, state: State): Value; +/** + * Create a new primitive wrapper from the value of the original. + */ +export declare function copyPrimitiveWrapper( + primitiveObject: Value, + state: State, +): Value; +/** + * Create a new RegExp based on the value and flags of the original. + */ +export declare function copyRegExp(regExp: Value, state: State): Value; +/** + * Return the original value (an identity function). + * + * @note + * THis is used for objects that cannot be copied, such as WeakMap. + */ +export declare function copySelf(value: Value, _state: State): Value; +/** + * Deeply copy the values of the original. + */ +export declare function copySetLoose>(set: Value, state: State): Value; +/** + * Deeply copy the values of the original, as well as any custom properties. + */ +export declare function copySetStrict>(set: Value, state: State): Value; diff --git a/node_modules/fast-copy/dist/cjs/index.cjs b/node_modules/fast-copy/dist/cjs/index.cjs new file mode 100644 index 0000000..04272c5 --- /dev/null +++ b/node_modules/fast-copy/dist/cjs/index.cjs @@ -0,0 +1,340 @@ +'use strict'; + +// eslint-disable-next-line @typescript-eslint/unbound-method +const toStringFunction = Function.prototype.toString; +// eslint-disable-next-line @typescript-eslint/unbound-method +const toStringObject = Object.prototype.toString; +/** + * Get an empty version of the object with the same prototype it has. + */ +function getCleanClone(prototype) { + if (!prototype) { + return Object.create(null); + } + const Constructor = prototype.constructor; + if (Constructor === Object) { + return prototype === Object.prototype ? {} : Object.create(prototype); + } + if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) { + try { + return new Constructor(); + } + catch (_a) { + // Ignore + } + } + return Object.create(prototype); +} +/** + * Get the tag of the value passed, so that the correct copier can be used. + */ +function getTag(value) { + const stringTag = value[Symbol.toStringTag]; + if (stringTag) { + return stringTag; + } + const type = toStringObject.call(value); + return type.substring(8, type.length - 1); +} + +// eslint-disable-next-line @typescript-eslint/unbound-method +const { hasOwnProperty, propertyIsEnumerable } = Object.prototype; +function copyOwnDescriptor(original, clone, property, state) { + const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || { + configurable: true, + enumerable: true, + value: original[property], + writable: true, + }; + const descriptor = ownDescriptor.get || ownDescriptor.set + ? ownDescriptor + : { + configurable: ownDescriptor.configurable, + enumerable: ownDescriptor.enumerable, + value: state.copier(ownDescriptor.value, state), + writable: ownDescriptor.writable, + }; + try { + Object.defineProperty(clone, property, descriptor); + } + catch (_a) { + // The above can fail on node in extreme edge cases, so fall back to the loose assignment. + clone[property] = descriptor.get ? descriptor.get() : descriptor.value; + } +} +/** + * Striclty copy all properties contained on the object. + */ +function copyOwnPropertiesStrict(value, clone, state) { + const names = Object.getOwnPropertyNames(value); + for (let index = 0; index < names.length; ++index) { + copyOwnDescriptor(value, clone, names[index], state); + } + const symbols = Object.getOwnPropertySymbols(value); + for (let index = 0; index < symbols.length; ++index) { + copyOwnDescriptor(value, clone, symbols[index], state); + } + return clone; +} +/** + * Deeply copy the indexed values in the array. + */ +function copyArrayLoose(array, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(array, clone); + for (let index = 0; index < array.length; ++index) { + clone[index] = state.copier(array[index], state); + } + return clone; +} +/** + * Deeply copy the indexed values in the array, as well as any custom properties. + */ +function copyArrayStrict(array, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(array, clone); + return copyOwnPropertiesStrict(array, clone, state); +} +/** + * Copy the contents of the ArrayBuffer. + */ +function copyArrayBuffer(arrayBuffer, _state) { + return arrayBuffer.slice(0); +} +/** + * Create a new Blob with the contents of the original. + */ +function copyBlob(blob, _state) { + return blob.slice(0, blob.size, blob.type); +} +/** + * Create a new DataView with the contents of the original. + */ +function copyDataView(dataView, state) { + return new state.Constructor(copyArrayBuffer(dataView.buffer)); +} +/** + * Create a new Date based on the time of the original. + */ +function copyDate(date, state) { + return new state.Constructor(date.getTime()); +} +/** + * Deeply copy the keys and values of the original. + */ +function copyMapLoose(map, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(map, clone); + map.forEach((value, key) => { + clone.set(key, state.copier(value, state)); + }); + return clone; +} +/** + * Deeply copy the keys and values of the original, as well as any custom properties. + */ +function copyMapStrict(map, state) { + return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state); +} +/** + * Deeply copy the properties (keys and symbols) and values of the original. + */ +function copyObjectLoose(object, state) { + const clone = getCleanClone(state.prototype); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(object, clone); + for (const key in object) { + if (hasOwnProperty.call(object, key)) { + clone[key] = state.copier(object[key], state); + } + } + const symbols = Object.getOwnPropertySymbols(object); + for (let index = 0; index < symbols.length; ++index) { + const symbol = symbols[index]; + if (propertyIsEnumerable.call(object, symbol)) { + clone[symbol] = state.copier(object[symbol], state); + } + } + return clone; +} +/** + * Deeply copy the properties (keys and symbols) and values of the original, as well + * as any hidden or non-enumerable properties. + */ +function copyObjectStrict(object, state) { + const clone = getCleanClone(state.prototype); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(object, clone); + return copyOwnPropertiesStrict(object, clone, state); +} +/** + * Create a new primitive wrapper from the value of the original. + */ +function copyPrimitiveWrapper(primitiveObject, state) { + return new state.Constructor(primitiveObject.valueOf()); +} +/** + * Create a new RegExp based on the value and flags of the original. + */ +function copyRegExp(regExp, state) { + const clone = new state.Constructor(regExp.source, regExp.flags); + clone.lastIndex = regExp.lastIndex; + return clone; +} +/** + * Return the original value (an identity function). + * + * @note + * THis is used for objects that cannot be copied, such as WeakMap. + */ +function copySelf(value, _state) { + return value; +} +/** + * Deeply copy the values of the original. + */ +function copySetLoose(set, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(set, clone); + set.forEach((value) => { + clone.add(state.copier(value, state)); + }); + return clone; +} +/** + * Deeply copy the values of the original, as well as any custom properties. + */ +function copySetStrict(set, state) { + return copyOwnPropertiesStrict(set, copySetLoose(set, state), state); +} + +function createDefaultCache() { + return new WeakMap(); +} +function getOptions({ createCache: createCacheOverride, methods: methodsOverride, strict, }) { + const defaultMethods = { + array: strict ? copyArrayStrict : copyArrayLoose, + arrayBuffer: copyArrayBuffer, + asyncGenerator: copySelf, + blob: copyBlob, + dataView: copyDataView, + date: copyDate, + error: copySelf, + generator: copySelf, + map: strict ? copyMapStrict : copyMapLoose, + object: strict ? copyObjectStrict : copyObjectLoose, + regExp: copyRegExp, + set: strict ? copySetStrict : copySetLoose, + }; + const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods; + const copiers = getTagSpecificCopiers(methods); + const createCache = createCacheOverride || createDefaultCache; + // Extra safety check to ensure that object and array copiers are always provided, + // avoiding runtime errors. + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!copiers.Object || !copiers.Array) { + throw new Error('An object and array copier must be provided.'); + } + return { createCache, copiers, methods, strict: Boolean(strict) }; +} +/** + * Get the copiers used for each specific object tag. + */ +function getTagSpecificCopiers(methods) { + return { + Arguments: methods.object, + Array: methods.array, + ArrayBuffer: methods.arrayBuffer, + AsyncGenerator: methods.asyncGenerator, + Blob: methods.blob, + Boolean: copyPrimitiveWrapper, + DataView: methods.dataView, + Date: methods.date, + Error: methods.error, + Float32Array: methods.arrayBuffer, + Float64Array: methods.arrayBuffer, + Generator: methods.generator, + Int8Array: methods.arrayBuffer, + Int16Array: methods.arrayBuffer, + Int32Array: methods.arrayBuffer, + Map: methods.map, + Number: copyPrimitiveWrapper, + Object: methods.object, + Promise: copySelf, + RegExp: methods.regExp, + Set: methods.set, + String: copyPrimitiveWrapper, + WeakMap: copySelf, + WeakSet: copySelf, + Uint8Array: methods.arrayBuffer, + Uint8ClampedArray: methods.arrayBuffer, + Uint16Array: methods.arrayBuffer, + Uint32Array: methods.arrayBuffer, + Uint64Array: methods.arrayBuffer, + }; +} + +/** + * Create a custom copier based on custom options for any of the following: + * - `createCache` method to create a cache for copied objects + * - custom copier `methods` for specific object types + * - `strict` mode to copy all properties with their descriptors + */ +function createCopier(options = {}) { + const { createCache, copiers } = getOptions(options); + const { Array: copyArray, Object: copyObject } = copiers; + function copier(value, state) { + state.prototype = state.Constructor = undefined; + if (!value || typeof value !== 'object') { + return value; + } + if (state.cache.has(value)) { + return state.cache.get(value); + } + state.prototype = Object.getPrototypeOf(value); + // Using logical AND for speed, since optional chaining transforms to + // a local variable usage. + // eslint-disable-next-line @typescript-eslint/prefer-optional-chain + state.Constructor = state.prototype && state.prototype.constructor; + // plain objects + if (!state.Constructor || state.Constructor === Object) { + return copyObject(value, state); + } + // arrays + if (Array.isArray(value)) { + return copyArray(value, state); + } + const tagSpecificCopier = copiers[getTag(value)]; + if (tagSpecificCopier) { + return tagSpecificCopier(value, state); + } + return typeof value.then === 'function' ? value : copyObject(value, state); + } + return function copy(value) { + return copier(value, { + Constructor: undefined, + cache: createCache(), + copier, + prototype: undefined, + }); + }; +} +/** + * Copy an value deeply as much as possible, where strict recreation of object properties + * are maintained. All properties (including non-enumerable ones) are copied with their + * original property descriptors on both objects and arrays. + */ +const copyStrict = createCopier({ strict: true }); +/** + * Copy an value deeply as much as possible. + */ +const copy = createCopier(); + +exports.copy = copy; +exports.copyStrict = copyStrict; +exports.createCopier = createCopier; +//# sourceMappingURL=index.cjs.map diff --git a/node_modules/fast-copy/dist/cjs/index.cjs.map b/node_modules/fast-copy/dist/cjs/index.cjs.map new file mode 100644 index 0000000..3dc29b8 --- /dev/null +++ b/node_modules/fast-copy/dist/cjs/index.cjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.cjs","sources":["../../../src/utils.ts","../../../src/copier.ts","../../../src/options.ts","../../../src/index.ts"],"sourcesContent":["export interface Cache {\n has: (value: any) => boolean;\n set: (key: any, value: any) => void;\n get: (key: any) => any;\n}\n\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst toStringFunction = Function.prototype.toString;\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst toStringObject = Object.prototype.toString;\n\n/**\n * Get an empty version of the object with the same prototype it has.\n */\nexport function getCleanClone(prototype: any): any {\n if (!prototype) {\n return Object.create(null);\n }\n\n const Constructor = prototype.constructor;\n\n if (Constructor === Object) {\n return prototype === Object.prototype ? {} : Object.create(prototype as object | null);\n }\n\n if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) {\n try {\n return new Constructor();\n } catch {\n // Ignore\n }\n }\n\n return Object.create(prototype as object | null);\n}\n\n/**\n * Get the tag of the value passed, so that the correct copier can be used.\n */\nexport function getTag(value: any): string {\n const stringTag = value[Symbol.toStringTag];\n\n if (stringTag) {\n return stringTag;\n }\n\n const type = toStringObject.call(value);\n\n return type.substring(8, type.length - 1);\n}\n","import { getCleanClone } from './utils.js';\nimport type { Cache } from './utils.ts';\n\nexport type InternalCopier = (value: Value, state: State) => Value;\n\nexport interface State {\n Constructor: any;\n cache: Cache;\n copier: InternalCopier;\n prototype: any;\n}\n\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst { hasOwnProperty, propertyIsEnumerable } = Object.prototype;\n\nfunction copyOwnDescriptor(\n original: Value,\n clone: Value,\n property: string | symbol,\n state: State,\n): void {\n const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || {\n configurable: true,\n enumerable: true,\n value: original[property as keyof Value],\n writable: true,\n };\n const descriptor =\n ownDescriptor.get || ownDescriptor.set\n ? ownDescriptor\n : {\n configurable: ownDescriptor.configurable,\n enumerable: ownDescriptor.enumerable,\n value: state.copier(ownDescriptor.value, state),\n writable: ownDescriptor.writable,\n };\n\n try {\n Object.defineProperty(clone, property, descriptor);\n } catch {\n // The above can fail on node in extreme edge cases, so fall back to the loose assignment.\n clone[property as keyof Value] = descriptor.get ? descriptor.get() : descriptor.value;\n }\n}\n\n/**\n * Striclty copy all properties contained on the object.\n */\nfunction copyOwnPropertiesStrict(value: Value, clone: Value, state: State): Value {\n const names = Object.getOwnPropertyNames(value);\n\n for (let index = 0; index < names.length; ++index) {\n copyOwnDescriptor(value, clone, names[index]!, state);\n }\n\n const symbols = Object.getOwnPropertySymbols(value);\n\n for (let index = 0; index < symbols.length; ++index) {\n copyOwnDescriptor(value, clone, symbols[index]!, state);\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the indexed values in the array.\n */\nexport function copyArrayLoose(array: any[], state: State) {\n const clone = new state.Constructor();\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(array, clone);\n\n for (let index = 0; index < array.length; ++index) {\n clone[index] = state.copier(array[index], state);\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the indexed values in the array, as well as any custom properties.\n */\nexport function copyArrayStrict(array: Value, state: State) {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(array, clone);\n\n return copyOwnPropertiesStrict(array, clone, state);\n}\n\n/**\n * Copy the contents of the ArrayBuffer.\n */\nexport function copyArrayBuffer(arrayBuffer: Value, _state: State): Value {\n return arrayBuffer.slice(0) as Value;\n}\n\n/**\n * Create a new Blob with the contents of the original.\n */\nexport function copyBlob(blob: Value, _state: State): Value {\n return blob.slice(0, blob.size, blob.type) as Value;\n}\n\n/**\n * Create a new DataView with the contents of the original.\n */\nexport function copyDataView(dataView: Value, state: State): Value {\n return new state.Constructor(copyArrayBuffer(dataView.buffer, state));\n}\n\n/**\n * Create a new Date based on the time of the original.\n */\nexport function copyDate(date: Value, state: State): Value {\n return new state.Constructor(date.getTime());\n}\n\n/**\n * Deeply copy the keys and values of the original.\n */\nexport function copyMapLoose>(map: Value, state: State): Value {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(map, clone);\n\n map.forEach((value, key) => {\n clone.set(key, state.copier(value, state));\n });\n\n return clone;\n}\n\n/**\n * Deeply copy the keys and values of the original, as well as any custom properties.\n */\nexport function copyMapStrict>(map: Value, state: State) {\n return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state);\n}\n\n/**\n * Deeply copy the properties (keys and symbols) and values of the original.\n */\nexport function copyObjectLoose>(object: Value, state: State): Value {\n const clone = getCleanClone(state.prototype);\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(object, clone);\n\n for (const key in object) {\n if (hasOwnProperty.call(object, key)) {\n clone[key] = state.copier(object[key], state);\n }\n }\n\n const symbols = Object.getOwnPropertySymbols(object);\n\n for (let index = 0; index < symbols.length; ++index) {\n const symbol = symbols[index]!;\n\n if (propertyIsEnumerable.call(object, symbol)) {\n clone[symbol] = state.copier((object as any)[symbol], state);\n }\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the properties (keys and symbols) and values of the original, as well\n * as any hidden or non-enumerable properties.\n */\nexport function copyObjectStrict>(object: Value, state: State): Value {\n const clone = getCleanClone(state.prototype);\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(object, clone);\n\n return copyOwnPropertiesStrict(object, clone, state);\n}\n\n/**\n * Create a new primitive wrapper from the value of the original.\n */\nexport function copyPrimitiveWrapper<\n // Specifically use the object constructor types\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Value extends Boolean | Number | String,\n>(primitiveObject: Value, state: State): Value {\n return new state.Constructor(primitiveObject.valueOf());\n}\n\n/**\n * Create a new RegExp based on the value and flags of the original.\n */\nexport function copyRegExp(regExp: Value, state: State): Value {\n const clone = new state.Constructor(regExp.source, regExp.flags) as Value;\n\n clone.lastIndex = regExp.lastIndex;\n\n return clone;\n}\n\n/**\n * Return the original value (an identity function).\n *\n * @note\n * THis is used for objects that cannot be copied, such as WeakMap.\n */\nexport function copySelf(value: Value, _state: State): Value {\n return value;\n}\n\n/**\n * Deeply copy the values of the original.\n */\nexport function copySetLoose>(set: Value, state: State): Value {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(set, clone);\n\n set.forEach((value) => {\n clone.add(state.copier(value, state));\n });\n\n return clone;\n}\n\n/**\n * Deeply copy the values of the original, as well as any custom properties.\n */\nexport function copySetStrict>(set: Value, state: State): Value {\n return copyOwnPropertiesStrict(set, copySetLoose(set, state), state);\n}\n","import {\n copyArrayBuffer,\n copyArrayLoose,\n copyArrayStrict,\n copyBlob,\n copyDataView,\n copyDate,\n copyMapLoose,\n copyMapStrict,\n copyObjectLoose,\n copyObjectStrict,\n copyPrimitiveWrapper,\n copyRegExp,\n copySelf,\n copySetLoose,\n copySetStrict,\n} from './copier.js';\nimport type { InternalCopier } from './copier.ts';\nimport type { Cache } from './utils.ts';\n\nexport interface CopierMethods {\n array?: InternalCopier;\n arrayBuffer?: InternalCopier;\n asyncGenerator?: InternalCopier;\n blob?: InternalCopier;\n dataView?: InternalCopier;\n date?: InternalCopier;\n error?: InternalCopier;\n generator?: InternalCopier;\n map?: InternalCopier>;\n object?: InternalCopier>;\n regExp?: InternalCopier;\n set?: InternalCopier>;\n}\n\ninterface Copiers {\n [key: string]: InternalCopier | undefined;\n\n Arguments: InternalCopier>;\n Array: InternalCopier;\n ArrayBuffer: InternalCopier;\n AsyncGenerator: InternalCopier;\n Blob: InternalCopier;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Boolean: InternalCopier;\n DataView: InternalCopier;\n Date: InternalCopier;\n Error: InternalCopier;\n Float32Array: InternalCopier;\n Float64Array: InternalCopier;\n Generator: InternalCopier;\n\n Int8Array: InternalCopier;\n Int16Array: InternalCopier;\n Int32Array: InternalCopier;\n Map: InternalCopier>;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Number: InternalCopier;\n Object: InternalCopier>;\n Promise: InternalCopier>;\n RegExp: InternalCopier;\n Set: InternalCopier>;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n String: InternalCopier;\n WeakMap: InternalCopier>;\n WeakSet: InternalCopier>;\n Uint8Array: InternalCopier;\n Uint8ClampedArray: InternalCopier;\n Uint16Array: InternalCopier;\n Uint32Array: InternalCopier;\n Uint64Array: InternalCopier;\n}\n\nexport interface CreateCopierOptions {\n createCache?: () => Cache;\n methods?: CopierMethods;\n strict?: boolean;\n}\n\nexport interface RequiredCreateCopierOptions extends Omit, 'methods'> {\n copiers: Copiers;\n methods: Required;\n}\n\nexport function createDefaultCache(): Cache {\n return new WeakMap();\n}\n\nexport function getOptions({\n createCache: createCacheOverride,\n methods: methodsOverride,\n strict,\n}: CreateCopierOptions): RequiredCreateCopierOptions {\n const defaultMethods = {\n array: strict ? copyArrayStrict : copyArrayLoose,\n arrayBuffer: copyArrayBuffer,\n asyncGenerator: copySelf,\n blob: copyBlob,\n dataView: copyDataView,\n date: copyDate,\n error: copySelf,\n generator: copySelf,\n map: strict ? copyMapStrict : copyMapLoose,\n object: strict ? copyObjectStrict : copyObjectLoose,\n regExp: copyRegExp,\n set: strict ? copySetStrict : copySetLoose,\n };\n\n const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods;\n const copiers = getTagSpecificCopiers(methods);\n const createCache = createCacheOverride || createDefaultCache;\n\n // Extra safety check to ensure that object and array copiers are always provided,\n // avoiding runtime errors.\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!copiers.Object || !copiers.Array) {\n throw new Error('An object and array copier must be provided.');\n }\n\n return { createCache, copiers, methods, strict: Boolean(strict) };\n}\n\n/**\n * Get the copiers used for each specific object tag.\n */\nexport function getTagSpecificCopiers(methods: Required): Copiers {\n return {\n Arguments: methods.object,\n Array: methods.array,\n ArrayBuffer: methods.arrayBuffer,\n AsyncGenerator: methods.asyncGenerator,\n Blob: methods.blob,\n Boolean: copyPrimitiveWrapper,\n DataView: methods.dataView,\n Date: methods.date,\n Error: methods.error,\n Float32Array: methods.arrayBuffer,\n Float64Array: methods.arrayBuffer,\n Generator: methods.generator,\n Int8Array: methods.arrayBuffer,\n Int16Array: methods.arrayBuffer,\n Int32Array: methods.arrayBuffer,\n Map: methods.map,\n Number: copyPrimitiveWrapper,\n Object: methods.object,\n Promise: copySelf,\n RegExp: methods.regExp,\n Set: methods.set,\n String: copyPrimitiveWrapper,\n WeakMap: copySelf,\n WeakSet: copySelf,\n Uint8Array: methods.arrayBuffer,\n Uint8ClampedArray: methods.arrayBuffer,\n Uint16Array: methods.arrayBuffer,\n Uint32Array: methods.arrayBuffer,\n Uint64Array: methods.arrayBuffer,\n };\n}\n","import type { State } from './copier.ts';\nimport { getOptions } from './options.js';\nimport type { CreateCopierOptions } from './options.ts';\nimport { getTag } from './utils.js';\n\nexport type { State } from './copier.ts';\nexport type { CreateCopierOptions } from './options.ts';\n\n/**\n * Create a custom copier based on custom options for any of the following:\n * - `createCache` method to create a cache for copied objects\n * - custom copier `methods` for specific object types\n * - `strict` mode to copy all properties with their descriptors\n */\nexport function createCopier(options: CreateCopierOptions = {}) {\n const { createCache, copiers } = getOptions(options);\n const { Array: copyArray, Object: copyObject } = copiers;\n\n function copier(value: any, state: State): any {\n state.prototype = state.Constructor = undefined;\n\n if (!value || typeof value !== 'object') {\n return value;\n }\n\n if (state.cache.has(value)) {\n return state.cache.get(value);\n }\n\n state.prototype = Object.getPrototypeOf(value);\n // Using logical AND for speed, since optional chaining transforms to\n // a local variable usage.\n // eslint-disable-next-line @typescript-eslint/prefer-optional-chain\n state.Constructor = state.prototype && state.prototype.constructor;\n\n // plain objects\n if (!state.Constructor || state.Constructor === Object) {\n return copyObject(value as Record, state);\n }\n\n // arrays\n if (Array.isArray(value)) {\n return copyArray(value, state);\n }\n\n const tagSpecificCopier = copiers[getTag(value)];\n\n if (tagSpecificCopier) {\n return tagSpecificCopier(value, state);\n }\n\n return typeof value.then === 'function' ? value : copyObject(value as Record, state);\n }\n\n return function copy(value: Value): Value {\n return copier(value, {\n Constructor: undefined,\n cache: createCache(),\n copier,\n prototype: undefined,\n });\n };\n}\n\n/**\n * Copy an value deeply as much as possible, where strict recreation of object properties\n * are maintained. All properties (including non-enumerable ones) are copied with their\n * original property descriptors on both objects and arrays.\n */\nexport const copyStrict = createCopier({ strict: true });\n\n/**\n * Copy an value deeply as much as possible.\n */\nexport const copy = createCopier();\n"],"names":[],"mappings":";;AAMA;AACA,MAAM,gBAAgB,GAAG,QAAQ,CAAC,SAAS,CAAC,QAAQ;AACpD;AACA,MAAM,cAAc,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ;AAEhD;;AAEG;AACG,SAAU,aAAa,CAAC,SAAc,EAAA;IAC1C,IAAI,CAAC,SAAS,EAAE;AACd,QAAA,OAAO,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC;IAC5B;AAEA,IAAA,MAAM,WAAW,GAAG,SAAS,CAAC,WAAW;AAEzC,IAAA,IAAI,WAAW,KAAK,MAAM,EAAE;AAC1B,QAAA,OAAO,SAAS,KAAK,MAAM,CAAC,SAAS,GAAG,EAAE,GAAG,MAAM,CAAC,MAAM,CAAC,SAA0B,CAAC;IACxF;AAEA,IAAA,IAAI,WAAW,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,OAAO,CAAC,eAAe,CAAC,EAAE;AAC/E,QAAA,IAAI;YACF,OAAO,IAAI,WAAW,EAAE;QAC1B;AAAE,QAAA,OAAA,EAAA,EAAM;;QAER;IACF;AAEA,IAAA,OAAO,MAAM,CAAC,MAAM,CAAC,SAA0B,CAAC;AAClD;AAEA;;AAEG;AACG,SAAU,MAAM,CAAC,KAAU,EAAA;IAC/B,MAAM,SAAS,GAAG,KAAK,CAAC,MAAM,CAAC,WAAW,CAAC;IAE3C,IAAI,SAAS,EAAE;AACb,QAAA,OAAO,SAAS;IAClB;IAEA,MAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC;AAEvC,IAAA,OAAO,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;AAC3C;;ACrCA;AACA,MAAM,EAAE,cAAc,EAAE,oBAAoB,EAAE,GAAG,MAAM,CAAC,SAAS;AAEjE,SAAS,iBAAiB,CACxB,QAAe,EACf,KAAY,EACZ,QAAyB,EACzB,KAAY,EAAA;IAEZ,MAAM,aAAa,GAAG,MAAM,CAAC,wBAAwB,CAAC,QAAQ,EAAE,QAAQ,CAAC,IAAI;AAC3E,QAAA,YAAY,EAAE,IAAI;AAClB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,KAAK,EAAE,QAAQ,CAAC,QAAuB,CAAC;AACxC,QAAA,QAAQ,EAAE,IAAI;KACf;IACD,MAAM,UAAU,GACd,aAAa,CAAC,GAAG,IAAI,aAAa,CAAC;AACjC,UAAE;AACF,UAAE;YACE,YAAY,EAAE,aAAa,CAAC,YAAY;YACxC,UAAU,EAAE,aAAa,CAAC,UAAU;YACpC,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE,KAAK,CAAC;YAC/C,QAAQ,EAAE,aAAa,CAAC,QAAQ;SACjC;AAEP,IAAA,IAAI;QACF,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,QAAQ,EAAE,UAAU,CAAC;IACpD;AAAE,IAAA,OAAA,EAAA,EAAM;;QAEN,KAAK,CAAC,QAAuB,CAAC,GAAG,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,GAAG,EAAE,GAAG,UAAU,CAAC,KAAK;IACvF;AACF;AAEA;;AAEG;AACH,SAAS,uBAAuB,CAAuB,KAAY,EAAE,KAAY,EAAE,KAAY,EAAA;IAC7F,MAAM,KAAK,GAAG,MAAM,CAAC,mBAAmB,CAAC,KAAK,CAAC;AAE/C,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,KAAK,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;AACjD,QAAA,iBAAiB,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,CAAE,EAAE,KAAK,CAAC;IACvD;IAEA,MAAM,OAAO,GAAG,MAAM,CAAC,qBAAqB,CAAC,KAAK,CAAC;AAEnD,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;AACnD,QAAA,iBAAiB,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,CAAE,EAAE,KAAK,CAAC;IACzD;AAEA,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,cAAc,CAAC,KAAY,EAAE,KAAY,EAAA;AACvD,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAE;;IAGrC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC;AAE7B,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,KAAK,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;AACjD,QAAA,KAAK,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,KAAK,CAAC;IAClD;AAEA,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,eAAe,CAAsB,KAAY,EAAE,KAAY,EAAA;AAC7E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;IAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC;IAE7B,OAAO,uBAAuB,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC;AACrD;AAEA;;AAEG;AACG,SAAU,eAAe,CAAgC,WAAkB,EAAE,MAAa,EAAA;AAC9F,IAAA,OAAO,WAAW,CAAC,KAAK,CAAC,CAAC,CAAU;AACtC;AAEA;;AAEG;AACG,SAAU,QAAQ,CAAqB,IAAW,EAAE,MAAa,EAAA;AACrE,IAAA,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAU;AACrD;AAEA;;AAEG;AACG,SAAU,YAAY,CAAyB,QAAe,EAAE,KAAY,EAAA;AAChF,IAAA,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,eAAe,CAAC,QAAQ,CAAC,MAAa,CAAC,CAAC;AACvE;AAEA;;AAEG;AACG,SAAU,QAAQ,CAAqB,IAAW,EAAE,KAAY,EAAA;IACpE,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;AAC9C;AAEA;;AAEG;AACG,SAAU,YAAY,CAA8B,GAAU,EAAE,KAAY,EAAA;AAChF,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;IAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC;IAE3B,GAAG,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,KAAI;AACzB,QAAA,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC5C,IAAA,CAAC,CAAC;AAEF,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,aAAa,CAA8B,GAAU,EAAE,KAAY,EAAA;AACjF,IAAA,OAAO,uBAAuB,CAAC,GAAG,EAAE,YAAY,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC;AACtE;AAEA;;AAEG;AACG,SAAU,eAAe,CAAoC,MAAa,EAAE,KAAY,EAAA;IAC5F,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC;;IAG5C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC;AAE9B,IAAA,KAAK,MAAM,GAAG,IAAI,MAAM,EAAE;QACxB,IAAI,cAAc,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE;AACpC,YAAA,KAAK,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC;QAC/C;IACF;IAEA,MAAM,OAAO,GAAG,MAAM,CAAC,qBAAqB,CAAC,MAAM,CAAC;AAEpD,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;AACnD,QAAA,MAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAE;QAE9B,IAAI,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE;AAC7C,YAAA,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAE,MAAc,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC;QAC9D;IACF;AAEA,IAAA,OAAO,KAAK;AACd;AAEA;;;AAGG;AACG,SAAU,gBAAgB,CAAoC,MAAa,EAAE,KAAY,EAAA;IAC7F,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC;;IAG5C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC;IAE9B,OAAO,uBAAuB,CAAC,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC;AACtD;AAEA;;AAEG;AACG,SAAU,oBAAoB,CAIlC,eAAsB,EAAE,KAAY,EAAA;IACpC,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,eAAe,CAAC,OAAO,EAAE,CAAC;AACzD;AAEA;;AAEG;AACG,SAAU,UAAU,CAAuB,MAAa,EAAE,KAAY,EAAA;AAC1E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,KAAK,CAAU;AAEzE,IAAA,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS;AAElC,IAAA,OAAO,KAAK;AACd;AAEA;;;;;AAKG;AACG,SAAU,QAAQ,CAAQ,KAAY,EAAE,MAAa,EAAA;AACzD,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,YAAY,CAAyB,GAAU,EAAE,KAAY,EAAA;AAC3E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;IAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC;AAE3B,IAAA,GAAG,CAAC,OAAO,CAAC,CAAC,KAAK,KAAI;AACpB,QAAA,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AACvC,IAAA,CAAC,CAAC;AAEF,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,aAAa,CAAyB,GAAU,EAAE,KAAY,EAAA;AAC5E,IAAA,OAAO,uBAAuB,CAAC,GAAG,EAAE,YAAY,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC;AACtE;;SCzJgB,kBAAkB,GAAA;IAChC,OAAO,IAAI,OAAO,EAAE;AACtB;AAEM,SAAU,UAAU,CAAC,EACzB,WAAW,EAAE,mBAAmB,EAChC,OAAO,EAAE,eAAe,EACxB,MAAM,GACc,EAAA;AACpB,IAAA,MAAM,cAAc,GAAG;QACrB,KAAK,EAAE,MAAM,GAAG,eAAe,GAAG,cAAc;AAChD,QAAA,WAAW,EAAE,eAAe;AAC5B,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE,QAAQ;AACd,QAAA,QAAQ,EAAE,YAAY;AACtB,QAAA,IAAI,EAAE,QAAQ;AACd,QAAA,KAAK,EAAE,QAAQ;AACf,QAAA,SAAS,EAAE,QAAQ;QACnB,GAAG,EAAE,MAAM,GAAG,aAAa,GAAG,YAAY;QAC1C,MAAM,EAAE,MAAM,GAAG,gBAAgB,GAAG,eAAe;AACnD,QAAA,MAAM,EAAE,UAAU;QAClB,GAAG,EAAE,MAAM,GAAG,aAAa,GAAG,YAAY;KAC3C;AAED,IAAA,MAAM,OAAO,GAAG,eAAe,GAAG,MAAM,CAAC,MAAM,CAAC,cAAc,EAAE,eAAe,CAAC,GAAG,cAAc;AACjG,IAAA,MAAM,OAAO,GAAG,qBAAqB,CAAC,OAAO,CAAC;AAC9C,IAAA,MAAM,WAAW,GAAG,mBAAmB,IAAI,kBAAkB;;;;IAK7D,IAAI,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE;AACrC,QAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;IACjE;AAEA,IAAA,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE;AACnE;AAEA;;AAEG;AACG,SAAU,qBAAqB,CAAC,OAAgC,EAAA;IACpE,OAAO;QACL,SAAS,EAAE,OAAO,CAAC,MAAM;QACzB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,WAAW,EAAE,OAAO,CAAC,WAAW;QAChC,cAAc,EAAE,OAAO,CAAC,cAAc;QACtC,IAAI,EAAE,OAAO,CAAC,IAAI;AAClB,QAAA,OAAO,EAAE,oBAAoB;QAC7B,QAAQ,EAAE,OAAO,CAAC,QAAQ;QAC1B,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,YAAY,EAAE,OAAO,CAAC,WAAW;QACjC,YAAY,EAAE,OAAO,CAAC,WAAW;QACjC,SAAS,EAAE,OAAO,CAAC,SAAS;QAC5B,SAAS,EAAE,OAAO,CAAC,WAAW;QAC9B,UAAU,EAAE,OAAO,CAAC,WAAW;QAC/B,UAAU,EAAE,OAAO,CAAC,WAAW;QAC/B,GAAG,EAAE,OAAO,CAAC,GAAG;AAChB,QAAA,MAAM,EAAE,oBAAoB;QAC5B,MAAM,EAAE,OAAO,CAAC,MAAM;AACtB,QAAA,OAAO,EAAE,QAAQ;QACjB,MAAM,EAAE,OAAO,CAAC,MAAM;QACtB,GAAG,EAAE,OAAO,CAAC,GAAG;AAChB,QAAA,MAAM,EAAE,oBAAoB;AAC5B,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,OAAO,EAAE,QAAQ;QACjB,UAAU,EAAE,OAAO,CAAC,WAAW;QAC/B,iBAAiB,EAAE,OAAO,CAAC,WAAW;QACtC,WAAW,EAAE,OAAO,CAAC,WAAW;QAChC,WAAW,EAAE,OAAO,CAAC,WAAW;QAChC,WAAW,EAAE,OAAO,CAAC,WAAW;KACjC;AACH;;ACrJA;;;;;AAKG;AACG,SAAU,YAAY,CAAC,OAAA,GAA+B,EAAE,EAAA;IAC5D,MAAM,EAAE,WAAW,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC,OAAO,CAAC;IACpD,MAAM,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,GAAG,OAAO;AAExD,IAAA,SAAS,MAAM,CAAC,KAAU,EAAE,KAAY,EAAA;QACtC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,WAAW,GAAG,SAAS;QAE/C,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACvC,YAAA,OAAO,KAAK;QACd;QAEA,IAAI,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;YAC1B,OAAO,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC;QAC/B;QAEA,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC;;;;AAI9C,QAAA,KAAK,CAAC,WAAW,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,SAAS,CAAC,WAAW;;QAGlE,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,KAAK,MAAM,EAAE;AACtD,YAAA,OAAO,UAAU,CAAC,KAA4B,EAAE,KAAK,CAAC;QACxD;;AAGA,QAAA,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AACxB,YAAA,OAAO,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC;QAChC;QAEA,MAAM,iBAAiB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAEhD,IAAI,iBAAiB,EAAE;AACrB,YAAA,OAAO,iBAAiB,CAAC,KAAK,EAAE,KAAK,CAAC;QACxC;AAEA,QAAA,OAAO,OAAO,KAAK,CAAC,IAAI,KAAK,UAAU,GAAG,KAAK,GAAG,UAAU,CAAC,KAA4B,EAAE,KAAK,CAAC;IACnG;IAEA,OAAO,SAAS,IAAI,CAAQ,KAAY,EAAA;QACtC,OAAO,MAAM,CAAC,KAAK,EAAE;AACnB,YAAA,WAAW,EAAE,SAAS;YACtB,KAAK,EAAE,WAAW,EAAE;YACpB,MAAM;AACN,YAAA,SAAS,EAAE,SAAS;AACrB,SAAA,CAAC;AACJ,IAAA,CAAC;AACH;AAEA;;;;AAIG;AACI,MAAM,UAAU,GAAG,YAAY,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE;AAEvD;;AAEG;AACI,MAAM,IAAI,GAAG,YAAY;;;;;;"} \ No newline at end of file diff --git a/node_modules/fast-copy/dist/cjs/index.d.cts b/node_modules/fast-copy/dist/cjs/index.d.cts new file mode 100644 index 0000000..1097c27 --- /dev/null +++ b/node_modules/fast-copy/dist/cjs/index.d.cts @@ -0,0 +1,20 @@ +import type { CreateCopierOptions } from './options.d.cts'; +export type { State } from './copier.d.cts'; +export type { CreateCopierOptions } from './options.d.cts'; +/** + * Create a custom copier based on custom options for any of the following: + * - `createCache` method to create a cache for copied objects + * - custom copier `methods` for specific object types + * - `strict` mode to copy all properties with their descriptors + */ +export declare function createCopier(options?: CreateCopierOptions): (value: Value) => Value; +/** + * Copy an value deeply as much as possible, where strict recreation of object properties + * are maintained. All properties (including non-enumerable ones) are copied with their + * original property descriptors on both objects and arrays. + */ +export declare const copyStrict: (value: Value) => Value; +/** + * Copy an value deeply as much as possible. + */ +export declare const copy: (value: Value) => Value; diff --git a/node_modules/fast-copy/dist/cjs/options.d.cts b/node_modules/fast-copy/dist/cjs/options.d.cts new file mode 100644 index 0000000..b28de2f --- /dev/null +++ b/node_modules/fast-copy/dist/cjs/options.d.cts @@ -0,0 +1,68 @@ +import type { InternalCopier } from './copier.d.cts'; +import type { Cache } from './utils.d.cts'; +export interface CopierMethods { + array?: InternalCopier; + arrayBuffer?: InternalCopier; + asyncGenerator?: InternalCopier; + blob?: InternalCopier; + dataView?: InternalCopier; + date?: InternalCopier; + error?: InternalCopier; + generator?: InternalCopier; + map?: InternalCopier>; + object?: InternalCopier>; + regExp?: InternalCopier; + set?: InternalCopier>; +} +interface Copiers { + [key: string]: InternalCopier | undefined; + Arguments: InternalCopier>; + Array: InternalCopier; + ArrayBuffer: InternalCopier; + AsyncGenerator: InternalCopier; + Blob: InternalCopier; + Boolean: InternalCopier; + DataView: InternalCopier; + Date: InternalCopier; + Error: InternalCopier; + Float32Array: InternalCopier; + Float64Array: InternalCopier; + Generator: InternalCopier; + Int8Array: InternalCopier; + Int16Array: InternalCopier; + Int32Array: InternalCopier; + Map: InternalCopier>; + Number: InternalCopier; + Object: InternalCopier>; + Promise: InternalCopier>; + RegExp: InternalCopier; + Set: InternalCopier>; + String: InternalCopier; + WeakMap: InternalCopier>; + WeakSet: InternalCopier>; + Uint8Array: InternalCopier; + Uint8ClampedArray: InternalCopier; + Uint16Array: InternalCopier; + Uint32Array: InternalCopier; + Uint64Array: InternalCopier; +} +export interface CreateCopierOptions { + createCache?: () => Cache; + methods?: CopierMethods; + strict?: boolean; +} +export interface RequiredCreateCopierOptions extends Omit, 'methods'> { + copiers: Copiers; + methods: Required; +} +export declare function createDefaultCache(): Cache; +export declare function getOptions({ + createCache: createCacheOverride, + methods: methodsOverride, + strict, +}: CreateCopierOptions): RequiredCreateCopierOptions; +/** + * Get the copiers used for each specific object tag. + */ +export declare function getTagSpecificCopiers(methods: Required): Copiers; +export {}; diff --git a/node_modules/fast-copy/dist/cjs/utils.d.cts b/node_modules/fast-copy/dist/cjs/utils.d.cts new file mode 100644 index 0000000..6c87b72 --- /dev/null +++ b/node_modules/fast-copy/dist/cjs/utils.d.cts @@ -0,0 +1,13 @@ +export interface Cache { + has: (value: any) => boolean; + set: (key: any, value: any) => void; + get: (key: any) => any; +} +/** + * Get an empty version of the object with the same prototype it has. + */ +export declare function getCleanClone(prototype: any): any; +/** + * Get the tag of the value passed, so that the correct copier can be used. + */ +export declare function getTag(value: any): string; diff --git a/node_modules/fast-copy/dist/es/copier.d.mts b/node_modules/fast-copy/dist/es/copier.d.mts new file mode 100644 index 0000000..74b7c44 --- /dev/null +++ b/node_modules/fast-copy/dist/es/copier.d.mts @@ -0,0 +1,75 @@ +import type { Cache } from './utils.d.mts'; +export type InternalCopier = (value: Value, state: State) => Value; +export interface State { + Constructor: any; + cache: Cache; + copier: InternalCopier; + prototype: any; +} +/** + * Deeply copy the indexed values in the array. + */ +export declare function copyArrayLoose(array: any[], state: State): any; +/** + * Deeply copy the indexed values in the array, as well as any custom properties. + */ +export declare function copyArrayStrict(array: Value, state: State): Value; +/** + * Copy the contents of the ArrayBuffer. + */ +export declare function copyArrayBuffer(arrayBuffer: Value, _state: State): Value; +/** + * Create a new Blob with the contents of the original. + */ +export declare function copyBlob(blob: Value, _state: State): Value; +/** + * Create a new DataView with the contents of the original. + */ +export declare function copyDataView(dataView: Value, state: State): Value; +/** + * Create a new Date based on the time of the original. + */ +export declare function copyDate(date: Value, state: State): Value; +/** + * Deeply copy the keys and values of the original. + */ +export declare function copyMapLoose>(map: Value, state: State): Value; +/** + * Deeply copy the keys and values of the original, as well as any custom properties. + */ +export declare function copyMapStrict>(map: Value, state: State): Value; +/** + * Deeply copy the properties (keys and symbols) and values of the original. + */ +export declare function copyObjectLoose>(object: Value, state: State): Value; +/** + * Deeply copy the properties (keys and symbols) and values of the original, as well + * as any hidden or non-enumerable properties. + */ +export declare function copyObjectStrict>(object: Value, state: State): Value; +/** + * Create a new primitive wrapper from the value of the original. + */ +export declare function copyPrimitiveWrapper( + primitiveObject: Value, + state: State, +): Value; +/** + * Create a new RegExp based on the value and flags of the original. + */ +export declare function copyRegExp(regExp: Value, state: State): Value; +/** + * Return the original value (an identity function). + * + * @note + * THis is used for objects that cannot be copied, such as WeakMap. + */ +export declare function copySelf(value: Value, _state: State): Value; +/** + * Deeply copy the values of the original. + */ +export declare function copySetLoose>(set: Value, state: State): Value; +/** + * Deeply copy the values of the original, as well as any custom properties. + */ +export declare function copySetStrict>(set: Value, state: State): Value; diff --git a/node_modules/fast-copy/dist/es/index.d.mts b/node_modules/fast-copy/dist/es/index.d.mts new file mode 100644 index 0000000..35c98aa --- /dev/null +++ b/node_modules/fast-copy/dist/es/index.d.mts @@ -0,0 +1,20 @@ +import type { CreateCopierOptions } from './options.d.mts'; +export type { State } from './copier.d.mts'; +export type { CreateCopierOptions } from './options.d.mts'; +/** + * Create a custom copier based on custom options for any of the following: + * - `createCache` method to create a cache for copied objects + * - custom copier `methods` for specific object types + * - `strict` mode to copy all properties with their descriptors + */ +export declare function createCopier(options?: CreateCopierOptions): (value: Value) => Value; +/** + * Copy an value deeply as much as possible, where strict recreation of object properties + * are maintained. All properties (including non-enumerable ones) are copied with their + * original property descriptors on both objects and arrays. + */ +export declare const copyStrict: (value: Value) => Value; +/** + * Copy an value deeply as much as possible. + */ +export declare const copy: (value: Value) => Value; diff --git a/node_modules/fast-copy/dist/es/index.mjs b/node_modules/fast-copy/dist/es/index.mjs new file mode 100644 index 0000000..1b5ad38 --- /dev/null +++ b/node_modules/fast-copy/dist/es/index.mjs @@ -0,0 +1,336 @@ +// eslint-disable-next-line @typescript-eslint/unbound-method +const toStringFunction = Function.prototype.toString; +// eslint-disable-next-line @typescript-eslint/unbound-method +const toStringObject = Object.prototype.toString; +/** + * Get an empty version of the object with the same prototype it has. + */ +function getCleanClone(prototype) { + if (!prototype) { + return Object.create(null); + } + const Constructor = prototype.constructor; + if (Constructor === Object) { + return prototype === Object.prototype ? {} : Object.create(prototype); + } + if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) { + try { + return new Constructor(); + } + catch (_a) { + // Ignore + } + } + return Object.create(prototype); +} +/** + * Get the tag of the value passed, so that the correct copier can be used. + */ +function getTag(value) { + const stringTag = value[Symbol.toStringTag]; + if (stringTag) { + return stringTag; + } + const type = toStringObject.call(value); + return type.substring(8, type.length - 1); +} + +// eslint-disable-next-line @typescript-eslint/unbound-method +const { hasOwnProperty, propertyIsEnumerable } = Object.prototype; +function copyOwnDescriptor(original, clone, property, state) { + const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || { + configurable: true, + enumerable: true, + value: original[property], + writable: true, + }; + const descriptor = ownDescriptor.get || ownDescriptor.set + ? ownDescriptor + : { + configurable: ownDescriptor.configurable, + enumerable: ownDescriptor.enumerable, + value: state.copier(ownDescriptor.value, state), + writable: ownDescriptor.writable, + }; + try { + Object.defineProperty(clone, property, descriptor); + } + catch (_a) { + // The above can fail on node in extreme edge cases, so fall back to the loose assignment. + clone[property] = descriptor.get ? descriptor.get() : descriptor.value; + } +} +/** + * Striclty copy all properties contained on the object. + */ +function copyOwnPropertiesStrict(value, clone, state) { + const names = Object.getOwnPropertyNames(value); + for (let index = 0; index < names.length; ++index) { + copyOwnDescriptor(value, clone, names[index], state); + } + const symbols = Object.getOwnPropertySymbols(value); + for (let index = 0; index < symbols.length; ++index) { + copyOwnDescriptor(value, clone, symbols[index], state); + } + return clone; +} +/** + * Deeply copy the indexed values in the array. + */ +function copyArrayLoose(array, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(array, clone); + for (let index = 0; index < array.length; ++index) { + clone[index] = state.copier(array[index], state); + } + return clone; +} +/** + * Deeply copy the indexed values in the array, as well as any custom properties. + */ +function copyArrayStrict(array, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(array, clone); + return copyOwnPropertiesStrict(array, clone, state); +} +/** + * Copy the contents of the ArrayBuffer. + */ +function copyArrayBuffer(arrayBuffer, _state) { + return arrayBuffer.slice(0); +} +/** + * Create a new Blob with the contents of the original. + */ +function copyBlob(blob, _state) { + return blob.slice(0, blob.size, blob.type); +} +/** + * Create a new DataView with the contents of the original. + */ +function copyDataView(dataView, state) { + return new state.Constructor(copyArrayBuffer(dataView.buffer)); +} +/** + * Create a new Date based on the time of the original. + */ +function copyDate(date, state) { + return new state.Constructor(date.getTime()); +} +/** + * Deeply copy the keys and values of the original. + */ +function copyMapLoose(map, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(map, clone); + map.forEach((value, key) => { + clone.set(key, state.copier(value, state)); + }); + return clone; +} +/** + * Deeply copy the keys and values of the original, as well as any custom properties. + */ +function copyMapStrict(map, state) { + return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state); +} +/** + * Deeply copy the properties (keys and symbols) and values of the original. + */ +function copyObjectLoose(object, state) { + const clone = getCleanClone(state.prototype); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(object, clone); + for (const key in object) { + if (hasOwnProperty.call(object, key)) { + clone[key] = state.copier(object[key], state); + } + } + const symbols = Object.getOwnPropertySymbols(object); + for (let index = 0; index < symbols.length; ++index) { + const symbol = symbols[index]; + if (propertyIsEnumerable.call(object, symbol)) { + clone[symbol] = state.copier(object[symbol], state); + } + } + return clone; +} +/** + * Deeply copy the properties (keys and symbols) and values of the original, as well + * as any hidden or non-enumerable properties. + */ +function copyObjectStrict(object, state) { + const clone = getCleanClone(state.prototype); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(object, clone); + return copyOwnPropertiesStrict(object, clone, state); +} +/** + * Create a new primitive wrapper from the value of the original. + */ +function copyPrimitiveWrapper(primitiveObject, state) { + return new state.Constructor(primitiveObject.valueOf()); +} +/** + * Create a new RegExp based on the value and flags of the original. + */ +function copyRegExp(regExp, state) { + const clone = new state.Constructor(regExp.source, regExp.flags); + clone.lastIndex = regExp.lastIndex; + return clone; +} +/** + * Return the original value (an identity function). + * + * @note + * THis is used for objects that cannot be copied, such as WeakMap. + */ +function copySelf(value, _state) { + return value; +} +/** + * Deeply copy the values of the original. + */ +function copySetLoose(set, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(set, clone); + set.forEach((value) => { + clone.add(state.copier(value, state)); + }); + return clone; +} +/** + * Deeply copy the values of the original, as well as any custom properties. + */ +function copySetStrict(set, state) { + return copyOwnPropertiesStrict(set, copySetLoose(set, state), state); +} + +function createDefaultCache() { + return new WeakMap(); +} +function getOptions({ createCache: createCacheOverride, methods: methodsOverride, strict, }) { + const defaultMethods = { + array: strict ? copyArrayStrict : copyArrayLoose, + arrayBuffer: copyArrayBuffer, + asyncGenerator: copySelf, + blob: copyBlob, + dataView: copyDataView, + date: copyDate, + error: copySelf, + generator: copySelf, + map: strict ? copyMapStrict : copyMapLoose, + object: strict ? copyObjectStrict : copyObjectLoose, + regExp: copyRegExp, + set: strict ? copySetStrict : copySetLoose, + }; + const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods; + const copiers = getTagSpecificCopiers(methods); + const createCache = createCacheOverride || createDefaultCache; + // Extra safety check to ensure that object and array copiers are always provided, + // avoiding runtime errors. + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!copiers.Object || !copiers.Array) { + throw new Error('An object and array copier must be provided.'); + } + return { createCache, copiers, methods, strict: Boolean(strict) }; +} +/** + * Get the copiers used for each specific object tag. + */ +function getTagSpecificCopiers(methods) { + return { + Arguments: methods.object, + Array: methods.array, + ArrayBuffer: methods.arrayBuffer, + AsyncGenerator: methods.asyncGenerator, + Blob: methods.blob, + Boolean: copyPrimitiveWrapper, + DataView: methods.dataView, + Date: methods.date, + Error: methods.error, + Float32Array: methods.arrayBuffer, + Float64Array: methods.arrayBuffer, + Generator: methods.generator, + Int8Array: methods.arrayBuffer, + Int16Array: methods.arrayBuffer, + Int32Array: methods.arrayBuffer, + Map: methods.map, + Number: copyPrimitiveWrapper, + Object: methods.object, + Promise: copySelf, + RegExp: methods.regExp, + Set: methods.set, + String: copyPrimitiveWrapper, + WeakMap: copySelf, + WeakSet: copySelf, + Uint8Array: methods.arrayBuffer, + Uint8ClampedArray: methods.arrayBuffer, + Uint16Array: methods.arrayBuffer, + Uint32Array: methods.arrayBuffer, + Uint64Array: methods.arrayBuffer, + }; +} + +/** + * Create a custom copier based on custom options for any of the following: + * - `createCache` method to create a cache for copied objects + * - custom copier `methods` for specific object types + * - `strict` mode to copy all properties with their descriptors + */ +function createCopier(options = {}) { + const { createCache, copiers } = getOptions(options); + const { Array: copyArray, Object: copyObject } = copiers; + function copier(value, state) { + state.prototype = state.Constructor = undefined; + if (!value || typeof value !== 'object') { + return value; + } + if (state.cache.has(value)) { + return state.cache.get(value); + } + state.prototype = Object.getPrototypeOf(value); + // Using logical AND for speed, since optional chaining transforms to + // a local variable usage. + // eslint-disable-next-line @typescript-eslint/prefer-optional-chain + state.Constructor = state.prototype && state.prototype.constructor; + // plain objects + if (!state.Constructor || state.Constructor === Object) { + return copyObject(value, state); + } + // arrays + if (Array.isArray(value)) { + return copyArray(value, state); + } + const tagSpecificCopier = copiers[getTag(value)]; + if (tagSpecificCopier) { + return tagSpecificCopier(value, state); + } + return typeof value.then === 'function' ? value : copyObject(value, state); + } + return function copy(value) { + return copier(value, { + Constructor: undefined, + cache: createCache(), + copier, + prototype: undefined, + }); + }; +} +/** + * Copy an value deeply as much as possible, where strict recreation of object properties + * are maintained. All properties (including non-enumerable ones) are copied with their + * original property descriptors on both objects and arrays. + */ +const copyStrict = createCopier({ strict: true }); +/** + * Copy an value deeply as much as possible. + */ +const copy = createCopier(); + +export { copy, copyStrict, createCopier }; +//# sourceMappingURL=index.mjs.map diff --git a/node_modules/fast-copy/dist/es/index.mjs.map b/node_modules/fast-copy/dist/es/index.mjs.map new file mode 100644 index 0000000..f6e3a1f --- /dev/null +++ b/node_modules/fast-copy/dist/es/index.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"index.mjs","sources":["../../../src/utils.ts","../../../src/copier.ts","../../../src/options.ts","../../../src/index.ts"],"sourcesContent":["export interface Cache {\n has: (value: any) => boolean;\n set: (key: any, value: any) => void;\n get: (key: any) => any;\n}\n\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst toStringFunction = Function.prototype.toString;\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst toStringObject = Object.prototype.toString;\n\n/**\n * Get an empty version of the object with the same prototype it has.\n */\nexport function getCleanClone(prototype: any): any {\n if (!prototype) {\n return Object.create(null);\n }\n\n const Constructor = prototype.constructor;\n\n if (Constructor === Object) {\n return prototype === Object.prototype ? {} : Object.create(prototype as object | null);\n }\n\n if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) {\n try {\n return new Constructor();\n } catch {\n // Ignore\n }\n }\n\n return Object.create(prototype as object | null);\n}\n\n/**\n * Get the tag of the value passed, so that the correct copier can be used.\n */\nexport function getTag(value: any): string {\n const stringTag = value[Symbol.toStringTag];\n\n if (stringTag) {\n return stringTag;\n }\n\n const type = toStringObject.call(value);\n\n return type.substring(8, type.length - 1);\n}\n","import { getCleanClone } from './utils.js';\nimport type { Cache } from './utils.ts';\n\nexport type InternalCopier = (value: Value, state: State) => Value;\n\nexport interface State {\n Constructor: any;\n cache: Cache;\n copier: InternalCopier;\n prototype: any;\n}\n\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst { hasOwnProperty, propertyIsEnumerable } = Object.prototype;\n\nfunction copyOwnDescriptor(\n original: Value,\n clone: Value,\n property: string | symbol,\n state: State,\n): void {\n const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || {\n configurable: true,\n enumerable: true,\n value: original[property as keyof Value],\n writable: true,\n };\n const descriptor =\n ownDescriptor.get || ownDescriptor.set\n ? ownDescriptor\n : {\n configurable: ownDescriptor.configurable,\n enumerable: ownDescriptor.enumerable,\n value: state.copier(ownDescriptor.value, state),\n writable: ownDescriptor.writable,\n };\n\n try {\n Object.defineProperty(clone, property, descriptor);\n } catch {\n // The above can fail on node in extreme edge cases, so fall back to the loose assignment.\n clone[property as keyof Value] = descriptor.get ? descriptor.get() : descriptor.value;\n }\n}\n\n/**\n * Striclty copy all properties contained on the object.\n */\nfunction copyOwnPropertiesStrict(value: Value, clone: Value, state: State): Value {\n const names = Object.getOwnPropertyNames(value);\n\n for (let index = 0; index < names.length; ++index) {\n copyOwnDescriptor(value, clone, names[index]!, state);\n }\n\n const symbols = Object.getOwnPropertySymbols(value);\n\n for (let index = 0; index < symbols.length; ++index) {\n copyOwnDescriptor(value, clone, symbols[index]!, state);\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the indexed values in the array.\n */\nexport function copyArrayLoose(array: any[], state: State) {\n const clone = new state.Constructor();\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(array, clone);\n\n for (let index = 0; index < array.length; ++index) {\n clone[index] = state.copier(array[index], state);\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the indexed values in the array, as well as any custom properties.\n */\nexport function copyArrayStrict(array: Value, state: State) {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(array, clone);\n\n return copyOwnPropertiesStrict(array, clone, state);\n}\n\n/**\n * Copy the contents of the ArrayBuffer.\n */\nexport function copyArrayBuffer(arrayBuffer: Value, _state: State): Value {\n return arrayBuffer.slice(0) as Value;\n}\n\n/**\n * Create a new Blob with the contents of the original.\n */\nexport function copyBlob(blob: Value, _state: State): Value {\n return blob.slice(0, blob.size, blob.type) as Value;\n}\n\n/**\n * Create a new DataView with the contents of the original.\n */\nexport function copyDataView(dataView: Value, state: State): Value {\n return new state.Constructor(copyArrayBuffer(dataView.buffer, state));\n}\n\n/**\n * Create a new Date based on the time of the original.\n */\nexport function copyDate(date: Value, state: State): Value {\n return new state.Constructor(date.getTime());\n}\n\n/**\n * Deeply copy the keys and values of the original.\n */\nexport function copyMapLoose>(map: Value, state: State): Value {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(map, clone);\n\n map.forEach((value, key) => {\n clone.set(key, state.copier(value, state));\n });\n\n return clone;\n}\n\n/**\n * Deeply copy the keys and values of the original, as well as any custom properties.\n */\nexport function copyMapStrict>(map: Value, state: State) {\n return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state);\n}\n\n/**\n * Deeply copy the properties (keys and symbols) and values of the original.\n */\nexport function copyObjectLoose>(object: Value, state: State): Value {\n const clone = getCleanClone(state.prototype);\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(object, clone);\n\n for (const key in object) {\n if (hasOwnProperty.call(object, key)) {\n clone[key] = state.copier(object[key], state);\n }\n }\n\n const symbols = Object.getOwnPropertySymbols(object);\n\n for (let index = 0; index < symbols.length; ++index) {\n const symbol = symbols[index]!;\n\n if (propertyIsEnumerable.call(object, symbol)) {\n clone[symbol] = state.copier((object as any)[symbol], state);\n }\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the properties (keys and symbols) and values of the original, as well\n * as any hidden or non-enumerable properties.\n */\nexport function copyObjectStrict>(object: Value, state: State): Value {\n const clone = getCleanClone(state.prototype);\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(object, clone);\n\n return copyOwnPropertiesStrict(object, clone, state);\n}\n\n/**\n * Create a new primitive wrapper from the value of the original.\n */\nexport function copyPrimitiveWrapper<\n // Specifically use the object constructor types\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Value extends Boolean | Number | String,\n>(primitiveObject: Value, state: State): Value {\n return new state.Constructor(primitiveObject.valueOf());\n}\n\n/**\n * Create a new RegExp based on the value and flags of the original.\n */\nexport function copyRegExp(regExp: Value, state: State): Value {\n const clone = new state.Constructor(regExp.source, regExp.flags) as Value;\n\n clone.lastIndex = regExp.lastIndex;\n\n return clone;\n}\n\n/**\n * Return the original value (an identity function).\n *\n * @note\n * THis is used for objects that cannot be copied, such as WeakMap.\n */\nexport function copySelf(value: Value, _state: State): Value {\n return value;\n}\n\n/**\n * Deeply copy the values of the original.\n */\nexport function copySetLoose>(set: Value, state: State): Value {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(set, clone);\n\n set.forEach((value) => {\n clone.add(state.copier(value, state));\n });\n\n return clone;\n}\n\n/**\n * Deeply copy the values of the original, as well as any custom properties.\n */\nexport function copySetStrict>(set: Value, state: State): Value {\n return copyOwnPropertiesStrict(set, copySetLoose(set, state), state);\n}\n","import {\n copyArrayBuffer,\n copyArrayLoose,\n copyArrayStrict,\n copyBlob,\n copyDataView,\n copyDate,\n copyMapLoose,\n copyMapStrict,\n copyObjectLoose,\n copyObjectStrict,\n copyPrimitiveWrapper,\n copyRegExp,\n copySelf,\n copySetLoose,\n copySetStrict,\n} from './copier.js';\nimport type { InternalCopier } from './copier.ts';\nimport type { Cache } from './utils.ts';\n\nexport interface CopierMethods {\n array?: InternalCopier;\n arrayBuffer?: InternalCopier;\n asyncGenerator?: InternalCopier;\n blob?: InternalCopier;\n dataView?: InternalCopier;\n date?: InternalCopier;\n error?: InternalCopier;\n generator?: InternalCopier;\n map?: InternalCopier>;\n object?: InternalCopier>;\n regExp?: InternalCopier;\n set?: InternalCopier>;\n}\n\ninterface Copiers {\n [key: string]: InternalCopier | undefined;\n\n Arguments: InternalCopier>;\n Array: InternalCopier;\n ArrayBuffer: InternalCopier;\n AsyncGenerator: InternalCopier;\n Blob: InternalCopier;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Boolean: InternalCopier;\n DataView: InternalCopier;\n Date: InternalCopier;\n Error: InternalCopier;\n Float32Array: InternalCopier;\n Float64Array: InternalCopier;\n Generator: InternalCopier;\n\n Int8Array: InternalCopier;\n Int16Array: InternalCopier;\n Int32Array: InternalCopier;\n Map: InternalCopier>;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Number: InternalCopier;\n Object: InternalCopier>;\n Promise: InternalCopier>;\n RegExp: InternalCopier;\n Set: InternalCopier>;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n String: InternalCopier;\n WeakMap: InternalCopier>;\n WeakSet: InternalCopier>;\n Uint8Array: InternalCopier;\n Uint8ClampedArray: InternalCopier;\n Uint16Array: InternalCopier;\n Uint32Array: InternalCopier;\n Uint64Array: InternalCopier;\n}\n\nexport interface CreateCopierOptions {\n createCache?: () => Cache;\n methods?: CopierMethods;\n strict?: boolean;\n}\n\nexport interface RequiredCreateCopierOptions extends Omit, 'methods'> {\n copiers: Copiers;\n methods: Required;\n}\n\nexport function createDefaultCache(): Cache {\n return new WeakMap();\n}\n\nexport function getOptions({\n createCache: createCacheOverride,\n methods: methodsOverride,\n strict,\n}: CreateCopierOptions): RequiredCreateCopierOptions {\n const defaultMethods = {\n array: strict ? copyArrayStrict : copyArrayLoose,\n arrayBuffer: copyArrayBuffer,\n asyncGenerator: copySelf,\n blob: copyBlob,\n dataView: copyDataView,\n date: copyDate,\n error: copySelf,\n generator: copySelf,\n map: strict ? copyMapStrict : copyMapLoose,\n object: strict ? copyObjectStrict : copyObjectLoose,\n regExp: copyRegExp,\n set: strict ? copySetStrict : copySetLoose,\n };\n\n const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods;\n const copiers = getTagSpecificCopiers(methods);\n const createCache = createCacheOverride || createDefaultCache;\n\n // Extra safety check to ensure that object and array copiers are always provided,\n // avoiding runtime errors.\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!copiers.Object || !copiers.Array) {\n throw new Error('An object and array copier must be provided.');\n }\n\n return { createCache, copiers, methods, strict: Boolean(strict) };\n}\n\n/**\n * Get the copiers used for each specific object tag.\n */\nexport function getTagSpecificCopiers(methods: Required): Copiers {\n return {\n Arguments: methods.object,\n Array: methods.array,\n ArrayBuffer: methods.arrayBuffer,\n AsyncGenerator: methods.asyncGenerator,\n Blob: methods.blob,\n Boolean: copyPrimitiveWrapper,\n DataView: methods.dataView,\n Date: methods.date,\n Error: methods.error,\n Float32Array: methods.arrayBuffer,\n Float64Array: methods.arrayBuffer,\n Generator: methods.generator,\n Int8Array: methods.arrayBuffer,\n Int16Array: methods.arrayBuffer,\n Int32Array: methods.arrayBuffer,\n Map: methods.map,\n Number: copyPrimitiveWrapper,\n Object: methods.object,\n Promise: copySelf,\n RegExp: methods.regExp,\n Set: methods.set,\n String: copyPrimitiveWrapper,\n WeakMap: copySelf,\n WeakSet: copySelf,\n Uint8Array: methods.arrayBuffer,\n Uint8ClampedArray: methods.arrayBuffer,\n Uint16Array: methods.arrayBuffer,\n Uint32Array: methods.arrayBuffer,\n Uint64Array: methods.arrayBuffer,\n };\n}\n","import type { State } from './copier.ts';\nimport { getOptions } from './options.js';\nimport type { CreateCopierOptions } from './options.ts';\nimport { getTag } from './utils.js';\n\nexport type { State } from './copier.ts';\nexport type { CreateCopierOptions } from './options.ts';\n\n/**\n * Create a custom copier based on custom options for any of the following:\n * - `createCache` method to create a cache for copied objects\n * - custom copier `methods` for specific object types\n * - `strict` mode to copy all properties with their descriptors\n */\nexport function createCopier(options: CreateCopierOptions = {}) {\n const { createCache, copiers } = getOptions(options);\n const { Array: copyArray, Object: copyObject } = copiers;\n\n function copier(value: any, state: State): any {\n state.prototype = state.Constructor = undefined;\n\n if (!value || typeof value !== 'object') {\n return value;\n }\n\n if (state.cache.has(value)) {\n return state.cache.get(value);\n }\n\n state.prototype = Object.getPrototypeOf(value);\n // Using logical AND for speed, since optional chaining transforms to\n // a local variable usage.\n // eslint-disable-next-line @typescript-eslint/prefer-optional-chain\n state.Constructor = state.prototype && state.prototype.constructor;\n\n // plain objects\n if (!state.Constructor || state.Constructor === Object) {\n return copyObject(value as Record, state);\n }\n\n // arrays\n if (Array.isArray(value)) {\n return copyArray(value, state);\n }\n\n const tagSpecificCopier = copiers[getTag(value)];\n\n if (tagSpecificCopier) {\n return tagSpecificCopier(value, state);\n }\n\n return typeof value.then === 'function' ? value : copyObject(value as Record, state);\n }\n\n return function copy(value: Value): Value {\n return copier(value, {\n Constructor: undefined,\n cache: createCache(),\n copier,\n prototype: undefined,\n });\n };\n}\n\n/**\n * Copy an value deeply as much as possible, where strict recreation of object properties\n * are maintained. All properties (including non-enumerable ones) are copied with their\n * original property descriptors on both objects and arrays.\n */\nexport const copyStrict = createCopier({ strict: true });\n\n/**\n * Copy an value deeply as much as possible.\n */\nexport const copy = createCopier();\n"],"names":[],"mappings":"AAMA;AACA,MAAM,gBAAgB,GAAG,QAAQ,CAAC,SAAS,CAAC,QAAQ;AACpD;AACA,MAAM,cAAc,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ;AAEhD;;AAEG;AACG,SAAU,aAAa,CAAC,SAAc,EAAA;IAC1C,IAAI,CAAC,SAAS,EAAE;AACd,QAAA,OAAO,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC;IAC5B;AAEA,IAAA,MAAM,WAAW,GAAG,SAAS,CAAC,WAAW;AAEzC,IAAA,IAAI,WAAW,KAAK,MAAM,EAAE;AAC1B,QAAA,OAAO,SAAS,KAAK,MAAM,CAAC,SAAS,GAAG,EAAE,GAAG,MAAM,CAAC,MAAM,CAAC,SAA0B,CAAC;IACxF;AAEA,IAAA,IAAI,WAAW,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,OAAO,CAAC,eAAe,CAAC,EAAE;AAC/E,QAAA,IAAI;YACF,OAAO,IAAI,WAAW,EAAE;QAC1B;AAAE,QAAA,OAAA,EAAA,EAAM;;QAER;IACF;AAEA,IAAA,OAAO,MAAM,CAAC,MAAM,CAAC,SAA0B,CAAC;AAClD;AAEA;;AAEG;AACG,SAAU,MAAM,CAAC,KAAU,EAAA;IAC/B,MAAM,SAAS,GAAG,KAAK,CAAC,MAAM,CAAC,WAAW,CAAC;IAE3C,IAAI,SAAS,EAAE;AACb,QAAA,OAAO,SAAS;IAClB;IAEA,MAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC;AAEvC,IAAA,OAAO,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;AAC3C;;ACrCA;AACA,MAAM,EAAE,cAAc,EAAE,oBAAoB,EAAE,GAAG,MAAM,CAAC,SAAS;AAEjE,SAAS,iBAAiB,CACxB,QAAe,EACf,KAAY,EACZ,QAAyB,EACzB,KAAY,EAAA;IAEZ,MAAM,aAAa,GAAG,MAAM,CAAC,wBAAwB,CAAC,QAAQ,EAAE,QAAQ,CAAC,IAAI;AAC3E,QAAA,YAAY,EAAE,IAAI;AAClB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,KAAK,EAAE,QAAQ,CAAC,QAAuB,CAAC;AACxC,QAAA,QAAQ,EAAE,IAAI;KACf;IACD,MAAM,UAAU,GACd,aAAa,CAAC,GAAG,IAAI,aAAa,CAAC;AACjC,UAAE;AACF,UAAE;YACE,YAAY,EAAE,aAAa,CAAC,YAAY;YACxC,UAAU,EAAE,aAAa,CAAC,UAAU;YACpC,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE,KAAK,CAAC;YAC/C,QAAQ,EAAE,aAAa,CAAC,QAAQ;SACjC;AAEP,IAAA,IAAI;QACF,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,QAAQ,EAAE,UAAU,CAAC;IACpD;AAAE,IAAA,OAAA,EAAA,EAAM;;QAEN,KAAK,CAAC,QAAuB,CAAC,GAAG,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,GAAG,EAAE,GAAG,UAAU,CAAC,KAAK;IACvF;AACF;AAEA;;AAEG;AACH,SAAS,uBAAuB,CAAuB,KAAY,EAAE,KAAY,EAAE,KAAY,EAAA;IAC7F,MAAM,KAAK,GAAG,MAAM,CAAC,mBAAmB,CAAC,KAAK,CAAC;AAE/C,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,KAAK,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;AACjD,QAAA,iBAAiB,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,CAAE,EAAE,KAAK,CAAC;IACvD;IAEA,MAAM,OAAO,GAAG,MAAM,CAAC,qBAAqB,CAAC,KAAK,CAAC;AAEnD,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;AACnD,QAAA,iBAAiB,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,CAAE,EAAE,KAAK,CAAC;IACzD;AAEA,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,cAAc,CAAC,KAAY,EAAE,KAAY,EAAA;AACvD,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAE;;IAGrC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC;AAE7B,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,KAAK,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;AACjD,QAAA,KAAK,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,KAAK,CAAC;IAClD;AAEA,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,eAAe,CAAsB,KAAY,EAAE,KAAY,EAAA;AAC7E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;IAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC;IAE7B,OAAO,uBAAuB,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC;AACrD;AAEA;;AAEG;AACG,SAAU,eAAe,CAAgC,WAAkB,EAAE,MAAa,EAAA;AAC9F,IAAA,OAAO,WAAW,CAAC,KAAK,CAAC,CAAC,CAAU;AACtC;AAEA;;AAEG;AACG,SAAU,QAAQ,CAAqB,IAAW,EAAE,MAAa,EAAA;AACrE,IAAA,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAU;AACrD;AAEA;;AAEG;AACG,SAAU,YAAY,CAAyB,QAAe,EAAE,KAAY,EAAA;AAChF,IAAA,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,eAAe,CAAC,QAAQ,CAAC,MAAa,CAAC,CAAC;AACvE;AAEA;;AAEG;AACG,SAAU,QAAQ,CAAqB,IAAW,EAAE,KAAY,EAAA;IACpE,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;AAC9C;AAEA;;AAEG;AACG,SAAU,YAAY,CAA8B,GAAU,EAAE,KAAY,EAAA;AAChF,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;IAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC;IAE3B,GAAG,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,KAAI;AACzB,QAAA,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC5C,IAAA,CAAC,CAAC;AAEF,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,aAAa,CAA8B,GAAU,EAAE,KAAY,EAAA;AACjF,IAAA,OAAO,uBAAuB,CAAC,GAAG,EAAE,YAAY,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC;AACtE;AAEA;;AAEG;AACG,SAAU,eAAe,CAAoC,MAAa,EAAE,KAAY,EAAA;IAC5F,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC;;IAG5C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC;AAE9B,IAAA,KAAK,MAAM,GAAG,IAAI,MAAM,EAAE;QACxB,IAAI,cAAc,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE;AACpC,YAAA,KAAK,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC;QAC/C;IACF;IAEA,MAAM,OAAO,GAAG,MAAM,CAAC,qBAAqB,CAAC,MAAM,CAAC;AAEpD,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;AACnD,QAAA,MAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAE;QAE9B,IAAI,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE;AAC7C,YAAA,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAE,MAAc,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC;QAC9D;IACF;AAEA,IAAA,OAAO,KAAK;AACd;AAEA;;;AAGG;AACG,SAAU,gBAAgB,CAAoC,MAAa,EAAE,KAAY,EAAA;IAC7F,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC;;IAG5C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC;IAE9B,OAAO,uBAAuB,CAAC,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC;AACtD;AAEA;;AAEG;AACG,SAAU,oBAAoB,CAIlC,eAAsB,EAAE,KAAY,EAAA;IACpC,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,eAAe,CAAC,OAAO,EAAE,CAAC;AACzD;AAEA;;AAEG;AACG,SAAU,UAAU,CAAuB,MAAa,EAAE,KAAY,EAAA;AAC1E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,KAAK,CAAU;AAEzE,IAAA,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS;AAElC,IAAA,OAAO,KAAK;AACd;AAEA;;;;;AAKG;AACG,SAAU,QAAQ,CAAQ,KAAY,EAAE,MAAa,EAAA;AACzD,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,YAAY,CAAyB,GAAU,EAAE,KAAY,EAAA;AAC3E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;IAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC;AAE3B,IAAA,GAAG,CAAC,OAAO,CAAC,CAAC,KAAK,KAAI;AACpB,QAAA,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AACvC,IAAA,CAAC,CAAC;AAEF,IAAA,OAAO,KAAK;AACd;AAEA;;AAEG;AACG,SAAU,aAAa,CAAyB,GAAU,EAAE,KAAY,EAAA;AAC5E,IAAA,OAAO,uBAAuB,CAAC,GAAG,EAAE,YAAY,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC;AACtE;;SCzJgB,kBAAkB,GAAA;IAChC,OAAO,IAAI,OAAO,EAAE;AACtB;AAEM,SAAU,UAAU,CAAC,EACzB,WAAW,EAAE,mBAAmB,EAChC,OAAO,EAAE,eAAe,EACxB,MAAM,GACc,EAAA;AACpB,IAAA,MAAM,cAAc,GAAG;QACrB,KAAK,EAAE,MAAM,GAAG,eAAe,GAAG,cAAc;AAChD,QAAA,WAAW,EAAE,eAAe;AAC5B,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE,QAAQ;AACd,QAAA,QAAQ,EAAE,YAAY;AACtB,QAAA,IAAI,EAAE,QAAQ;AACd,QAAA,KAAK,EAAE,QAAQ;AACf,QAAA,SAAS,EAAE,QAAQ;QACnB,GAAG,EAAE,MAAM,GAAG,aAAa,GAAG,YAAY;QAC1C,MAAM,EAAE,MAAM,GAAG,gBAAgB,GAAG,eAAe;AACnD,QAAA,MAAM,EAAE,UAAU;QAClB,GAAG,EAAE,MAAM,GAAG,aAAa,GAAG,YAAY;KAC3C;AAED,IAAA,MAAM,OAAO,GAAG,eAAe,GAAG,MAAM,CAAC,MAAM,CAAC,cAAc,EAAE,eAAe,CAAC,GAAG,cAAc;AACjG,IAAA,MAAM,OAAO,GAAG,qBAAqB,CAAC,OAAO,CAAC;AAC9C,IAAA,MAAM,WAAW,GAAG,mBAAmB,IAAI,kBAAkB;;;;IAK7D,IAAI,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE;AACrC,QAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;IACjE;AAEA,IAAA,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE;AACnE;AAEA;;AAEG;AACG,SAAU,qBAAqB,CAAC,OAAgC,EAAA;IACpE,OAAO;QACL,SAAS,EAAE,OAAO,CAAC,MAAM;QACzB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,WAAW,EAAE,OAAO,CAAC,WAAW;QAChC,cAAc,EAAE,OAAO,CAAC,cAAc;QACtC,IAAI,EAAE,OAAO,CAAC,IAAI;AAClB,QAAA,OAAO,EAAE,oBAAoB;QAC7B,QAAQ,EAAE,OAAO,CAAC,QAAQ;QAC1B,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,YAAY,EAAE,OAAO,CAAC,WAAW;QACjC,YAAY,EAAE,OAAO,CAAC,WAAW;QACjC,SAAS,EAAE,OAAO,CAAC,SAAS;QAC5B,SAAS,EAAE,OAAO,CAAC,WAAW;QAC9B,UAAU,EAAE,OAAO,CAAC,WAAW;QAC/B,UAAU,EAAE,OAAO,CAAC,WAAW;QAC/B,GAAG,EAAE,OAAO,CAAC,GAAG;AAChB,QAAA,MAAM,EAAE,oBAAoB;QAC5B,MAAM,EAAE,OAAO,CAAC,MAAM;AACtB,QAAA,OAAO,EAAE,QAAQ;QACjB,MAAM,EAAE,OAAO,CAAC,MAAM;QACtB,GAAG,EAAE,OAAO,CAAC,GAAG;AAChB,QAAA,MAAM,EAAE,oBAAoB;AAC5B,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,OAAO,EAAE,QAAQ;QACjB,UAAU,EAAE,OAAO,CAAC,WAAW;QAC/B,iBAAiB,EAAE,OAAO,CAAC,WAAW;QACtC,WAAW,EAAE,OAAO,CAAC,WAAW;QAChC,WAAW,EAAE,OAAO,CAAC,WAAW;QAChC,WAAW,EAAE,OAAO,CAAC,WAAW;KACjC;AACH;;ACrJA;;;;;AAKG;AACG,SAAU,YAAY,CAAC,OAAA,GAA+B,EAAE,EAAA;IAC5D,MAAM,EAAE,WAAW,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC,OAAO,CAAC;IACpD,MAAM,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,GAAG,OAAO;AAExD,IAAA,SAAS,MAAM,CAAC,KAAU,EAAE,KAAY,EAAA;QACtC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,WAAW,GAAG,SAAS;QAE/C,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACvC,YAAA,OAAO,KAAK;QACd;QAEA,IAAI,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;YAC1B,OAAO,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC;QAC/B;QAEA,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC;;;;AAI9C,QAAA,KAAK,CAAC,WAAW,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,SAAS,CAAC,WAAW;;QAGlE,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,KAAK,MAAM,EAAE;AACtD,YAAA,OAAO,UAAU,CAAC,KAA4B,EAAE,KAAK,CAAC;QACxD;;AAGA,QAAA,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;AACxB,YAAA,OAAO,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC;QAChC;QAEA,MAAM,iBAAiB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAEhD,IAAI,iBAAiB,EAAE;AACrB,YAAA,OAAO,iBAAiB,CAAC,KAAK,EAAE,KAAK,CAAC;QACxC;AAEA,QAAA,OAAO,OAAO,KAAK,CAAC,IAAI,KAAK,UAAU,GAAG,KAAK,GAAG,UAAU,CAAC,KAA4B,EAAE,KAAK,CAAC;IACnG;IAEA,OAAO,SAAS,IAAI,CAAQ,KAAY,EAAA;QACtC,OAAO,MAAM,CAAC,KAAK,EAAE;AACnB,YAAA,WAAW,EAAE,SAAS;YACtB,KAAK,EAAE,WAAW,EAAE;YACpB,MAAM;AACN,YAAA,SAAS,EAAE,SAAS;AACrB,SAAA,CAAC;AACJ,IAAA,CAAC;AACH;AAEA;;;;AAIG;AACI,MAAM,UAAU,GAAG,YAAY,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE;AAEvD;;AAEG;AACI,MAAM,IAAI,GAAG,YAAY;;;;"} \ No newline at end of file diff --git a/node_modules/fast-copy/dist/es/options.d.mts b/node_modules/fast-copy/dist/es/options.d.mts new file mode 100644 index 0000000..ef6948d --- /dev/null +++ b/node_modules/fast-copy/dist/es/options.d.mts @@ -0,0 +1,68 @@ +import type { InternalCopier } from './copier.d.mts'; +import type { Cache } from './utils.d.mts'; +export interface CopierMethods { + array?: InternalCopier; + arrayBuffer?: InternalCopier; + asyncGenerator?: InternalCopier; + blob?: InternalCopier; + dataView?: InternalCopier; + date?: InternalCopier; + error?: InternalCopier; + generator?: InternalCopier; + map?: InternalCopier>; + object?: InternalCopier>; + regExp?: InternalCopier; + set?: InternalCopier>; +} +interface Copiers { + [key: string]: InternalCopier | undefined; + Arguments: InternalCopier>; + Array: InternalCopier; + ArrayBuffer: InternalCopier; + AsyncGenerator: InternalCopier; + Blob: InternalCopier; + Boolean: InternalCopier; + DataView: InternalCopier; + Date: InternalCopier; + Error: InternalCopier; + Float32Array: InternalCopier; + Float64Array: InternalCopier; + Generator: InternalCopier; + Int8Array: InternalCopier; + Int16Array: InternalCopier; + Int32Array: InternalCopier; + Map: InternalCopier>; + Number: InternalCopier; + Object: InternalCopier>; + Promise: InternalCopier>; + RegExp: InternalCopier; + Set: InternalCopier>; + String: InternalCopier; + WeakMap: InternalCopier>; + WeakSet: InternalCopier>; + Uint8Array: InternalCopier; + Uint8ClampedArray: InternalCopier; + Uint16Array: InternalCopier; + Uint32Array: InternalCopier; + Uint64Array: InternalCopier; +} +export interface CreateCopierOptions { + createCache?: () => Cache; + methods?: CopierMethods; + strict?: boolean; +} +export interface RequiredCreateCopierOptions extends Omit, 'methods'> { + copiers: Copiers; + methods: Required; +} +export declare function createDefaultCache(): Cache; +export declare function getOptions({ + createCache: createCacheOverride, + methods: methodsOverride, + strict, +}: CreateCopierOptions): RequiredCreateCopierOptions; +/** + * Get the copiers used for each specific object tag. + */ +export declare function getTagSpecificCopiers(methods: Required): Copiers; +export {}; diff --git a/node_modules/fast-copy/dist/es/utils.d.mts b/node_modules/fast-copy/dist/es/utils.d.mts new file mode 100644 index 0000000..6c87b72 --- /dev/null +++ b/node_modules/fast-copy/dist/es/utils.d.mts @@ -0,0 +1,13 @@ +export interface Cache { + has: (value: any) => boolean; + set: (key: any, value: any) => void; + get: (key: any) => any; +} +/** + * Get an empty version of the object with the same prototype it has. + */ +export declare function getCleanClone(prototype: any): any; +/** + * Get the tag of the value passed, so that the correct copier can be used. + */ +export declare function getTag(value: any): string; diff --git a/node_modules/fast-copy/dist/umd/copier.d.ts b/node_modules/fast-copy/dist/umd/copier.d.ts new file mode 100644 index 0000000..d196874 --- /dev/null +++ b/node_modules/fast-copy/dist/umd/copier.d.ts @@ -0,0 +1,72 @@ +import type { Cache } from './utils.ts'; +export type InternalCopier = (value: Value, state: State) => Value; +export interface State { + Constructor: any; + cache: Cache; + copier: InternalCopier; + prototype: any; +} +/** + * Deeply copy the indexed values in the array. + */ +export declare function copyArrayLoose(array: any[], state: State): any; +/** + * Deeply copy the indexed values in the array, as well as any custom properties. + */ +export declare function copyArrayStrict(array: Value, state: State): Value; +/** + * Copy the contents of the ArrayBuffer. + */ +export declare function copyArrayBuffer(arrayBuffer: Value, _state: State): Value; +/** + * Create a new Blob with the contents of the original. + */ +export declare function copyBlob(blob: Value, _state: State): Value; +/** + * Create a new DataView with the contents of the original. + */ +export declare function copyDataView(dataView: Value, state: State): Value; +/** + * Create a new Date based on the time of the original. + */ +export declare function copyDate(date: Value, state: State): Value; +/** + * Deeply copy the keys and values of the original. + */ +export declare function copyMapLoose>(map: Value, state: State): Value; +/** + * Deeply copy the keys and values of the original, as well as any custom properties. + */ +export declare function copyMapStrict>(map: Value, state: State): Value; +/** + * Deeply copy the properties (keys and symbols) and values of the original. + */ +export declare function copyObjectLoose>(object: Value, state: State): Value; +/** + * Deeply copy the properties (keys and symbols) and values of the original, as well + * as any hidden or non-enumerable properties. + */ +export declare function copyObjectStrict>(object: Value, state: State): Value; +/** + * Create a new primitive wrapper from the value of the original. + */ +export declare function copyPrimitiveWrapper(primitiveObject: Value, state: State): Value; +/** + * Create a new RegExp based on the value and flags of the original. + */ +export declare function copyRegExp(regExp: Value, state: State): Value; +/** + * Return the original value (an identity function). + * + * @note + * THis is used for objects that cannot be copied, such as WeakMap. + */ +export declare function copySelf(value: Value, _state: State): Value; +/** + * Deeply copy the values of the original. + */ +export declare function copySetLoose>(set: Value, state: State): Value; +/** + * Deeply copy the values of the original, as well as any custom properties. + */ +export declare function copySetStrict>(set: Value, state: State): Value; diff --git a/node_modules/fast-copy/dist/umd/index.d.ts b/node_modules/fast-copy/dist/umd/index.d.ts new file mode 100644 index 0000000..819f64d --- /dev/null +++ b/node_modules/fast-copy/dist/umd/index.d.ts @@ -0,0 +1,20 @@ +import type { CreateCopierOptions } from './options.ts'; +export type { State } from './copier.ts'; +export type { CreateCopierOptions } from './options.ts'; +/** + * Create a custom copier based on custom options for any of the following: + * - `createCache` method to create a cache for copied objects + * - custom copier `methods` for specific object types + * - `strict` mode to copy all properties with their descriptors + */ +export declare function createCopier(options?: CreateCopierOptions): (value: Value) => Value; +/** + * Copy an value deeply as much as possible, where strict recreation of object properties + * are maintained. All properties (including non-enumerable ones) are copied with their + * original property descriptors on both objects and arrays. + */ +export declare const copyStrict: (value: Value) => Value; +/** + * Copy an value deeply as much as possible. + */ +export declare const copy: (value: Value) => Value; diff --git a/node_modules/fast-copy/dist/umd/index.js b/node_modules/fast-copy/dist/umd/index.js new file mode 100644 index 0000000..3e7cccd --- /dev/null +++ b/node_modules/fast-copy/dist/umd/index.js @@ -0,0 +1,346 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global["fast-copy"] = {})); +})(this, (function (exports) { 'use strict'; + + // eslint-disable-next-line @typescript-eslint/unbound-method + const toStringFunction = Function.prototype.toString; + // eslint-disable-next-line @typescript-eslint/unbound-method + const toStringObject = Object.prototype.toString; + /** + * Get an empty version of the object with the same prototype it has. + */ + function getCleanClone(prototype) { + if (!prototype) { + return Object.create(null); + } + const Constructor = prototype.constructor; + if (Constructor === Object) { + return prototype === Object.prototype ? {} : Object.create(prototype); + } + if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) { + try { + return new Constructor(); + } + catch (_a) { + // Ignore + } + } + return Object.create(prototype); + } + /** + * Get the tag of the value passed, so that the correct copier can be used. + */ + function getTag(value) { + const stringTag = value[Symbol.toStringTag]; + if (stringTag) { + return stringTag; + } + const type = toStringObject.call(value); + return type.substring(8, type.length - 1); + } + + // eslint-disable-next-line @typescript-eslint/unbound-method + const { hasOwnProperty, propertyIsEnumerable } = Object.prototype; + function copyOwnDescriptor(original, clone, property, state) { + const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || { + configurable: true, + enumerable: true, + value: original[property], + writable: true, + }; + const descriptor = ownDescriptor.get || ownDescriptor.set + ? ownDescriptor + : { + configurable: ownDescriptor.configurable, + enumerable: ownDescriptor.enumerable, + value: state.copier(ownDescriptor.value, state), + writable: ownDescriptor.writable, + }; + try { + Object.defineProperty(clone, property, descriptor); + } + catch (_a) { + // The above can fail on node in extreme edge cases, so fall back to the loose assignment. + clone[property] = descriptor.get ? descriptor.get() : descriptor.value; + } + } + /** + * Striclty copy all properties contained on the object. + */ + function copyOwnPropertiesStrict(value, clone, state) { + const names = Object.getOwnPropertyNames(value); + for (let index = 0; index < names.length; ++index) { + copyOwnDescriptor(value, clone, names[index], state); + } + const symbols = Object.getOwnPropertySymbols(value); + for (let index = 0; index < symbols.length; ++index) { + copyOwnDescriptor(value, clone, symbols[index], state); + } + return clone; + } + /** + * Deeply copy the indexed values in the array. + */ + function copyArrayLoose(array, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(array, clone); + for (let index = 0; index < array.length; ++index) { + clone[index] = state.copier(array[index], state); + } + return clone; + } + /** + * Deeply copy the indexed values in the array, as well as any custom properties. + */ + function copyArrayStrict(array, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(array, clone); + return copyOwnPropertiesStrict(array, clone, state); + } + /** + * Copy the contents of the ArrayBuffer. + */ + function copyArrayBuffer(arrayBuffer, _state) { + return arrayBuffer.slice(0); + } + /** + * Create a new Blob with the contents of the original. + */ + function copyBlob(blob, _state) { + return blob.slice(0, blob.size, blob.type); + } + /** + * Create a new DataView with the contents of the original. + */ + function copyDataView(dataView, state) { + return new state.Constructor(copyArrayBuffer(dataView.buffer)); + } + /** + * Create a new Date based on the time of the original. + */ + function copyDate(date, state) { + return new state.Constructor(date.getTime()); + } + /** + * Deeply copy the keys and values of the original. + */ + function copyMapLoose(map, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(map, clone); + map.forEach((value, key) => { + clone.set(key, state.copier(value, state)); + }); + return clone; + } + /** + * Deeply copy the keys and values of the original, as well as any custom properties. + */ + function copyMapStrict(map, state) { + return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state); + } + /** + * Deeply copy the properties (keys and symbols) and values of the original. + */ + function copyObjectLoose(object, state) { + const clone = getCleanClone(state.prototype); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(object, clone); + for (const key in object) { + if (hasOwnProperty.call(object, key)) { + clone[key] = state.copier(object[key], state); + } + } + const symbols = Object.getOwnPropertySymbols(object); + for (let index = 0; index < symbols.length; ++index) { + const symbol = symbols[index]; + if (propertyIsEnumerable.call(object, symbol)) { + clone[symbol] = state.copier(object[symbol], state); + } + } + return clone; + } + /** + * Deeply copy the properties (keys and symbols) and values of the original, as well + * as any hidden or non-enumerable properties. + */ + function copyObjectStrict(object, state) { + const clone = getCleanClone(state.prototype); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(object, clone); + return copyOwnPropertiesStrict(object, clone, state); + } + /** + * Create a new primitive wrapper from the value of the original. + */ + function copyPrimitiveWrapper(primitiveObject, state) { + return new state.Constructor(primitiveObject.valueOf()); + } + /** + * Create a new RegExp based on the value and flags of the original. + */ + function copyRegExp(regExp, state) { + const clone = new state.Constructor(regExp.source, regExp.flags); + clone.lastIndex = regExp.lastIndex; + return clone; + } + /** + * Return the original value (an identity function). + * + * @note + * THis is used for objects that cannot be copied, such as WeakMap. + */ + function copySelf(value, _state) { + return value; + } + /** + * Deeply copy the values of the original. + */ + function copySetLoose(set, state) { + const clone = new state.Constructor(); + // set in the cache immediately to be able to reuse the object recursively + state.cache.set(set, clone); + set.forEach((value) => { + clone.add(state.copier(value, state)); + }); + return clone; + } + /** + * Deeply copy the values of the original, as well as any custom properties. + */ + function copySetStrict(set, state) { + return copyOwnPropertiesStrict(set, copySetLoose(set, state), state); + } + + function createDefaultCache() { + return new WeakMap(); + } + function getOptions({ createCache: createCacheOverride, methods: methodsOverride, strict, }) { + const defaultMethods = { + array: strict ? copyArrayStrict : copyArrayLoose, + arrayBuffer: copyArrayBuffer, + asyncGenerator: copySelf, + blob: copyBlob, + dataView: copyDataView, + date: copyDate, + error: copySelf, + generator: copySelf, + map: strict ? copyMapStrict : copyMapLoose, + object: strict ? copyObjectStrict : copyObjectLoose, + regExp: copyRegExp, + set: strict ? copySetStrict : copySetLoose, + }; + const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods; + const copiers = getTagSpecificCopiers(methods); + const createCache = createCacheOverride || createDefaultCache; + // Extra safety check to ensure that object and array copiers are always provided, + // avoiding runtime errors. + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!copiers.Object || !copiers.Array) { + throw new Error('An object and array copier must be provided.'); + } + return { createCache, copiers, methods, strict: Boolean(strict) }; + } + /** + * Get the copiers used for each specific object tag. + */ + function getTagSpecificCopiers(methods) { + return { + Arguments: methods.object, + Array: methods.array, + ArrayBuffer: methods.arrayBuffer, + AsyncGenerator: methods.asyncGenerator, + Blob: methods.blob, + Boolean: copyPrimitiveWrapper, + DataView: methods.dataView, + Date: methods.date, + Error: methods.error, + Float32Array: methods.arrayBuffer, + Float64Array: methods.arrayBuffer, + Generator: methods.generator, + Int8Array: methods.arrayBuffer, + Int16Array: methods.arrayBuffer, + Int32Array: methods.arrayBuffer, + Map: methods.map, + Number: copyPrimitiveWrapper, + Object: methods.object, + Promise: copySelf, + RegExp: methods.regExp, + Set: methods.set, + String: copyPrimitiveWrapper, + WeakMap: copySelf, + WeakSet: copySelf, + Uint8Array: methods.arrayBuffer, + Uint8ClampedArray: methods.arrayBuffer, + Uint16Array: methods.arrayBuffer, + Uint32Array: methods.arrayBuffer, + Uint64Array: methods.arrayBuffer, + }; + } + + /** + * Create a custom copier based on custom options for any of the following: + * - `createCache` method to create a cache for copied objects + * - custom copier `methods` for specific object types + * - `strict` mode to copy all properties with their descriptors + */ + function createCopier(options = {}) { + const { createCache, copiers } = getOptions(options); + const { Array: copyArray, Object: copyObject } = copiers; + function copier(value, state) { + state.prototype = state.Constructor = undefined; + if (!value || typeof value !== 'object') { + return value; + } + if (state.cache.has(value)) { + return state.cache.get(value); + } + state.prototype = Object.getPrototypeOf(value); + // Using logical AND for speed, since optional chaining transforms to + // a local variable usage. + // eslint-disable-next-line @typescript-eslint/prefer-optional-chain + state.Constructor = state.prototype && state.prototype.constructor; + // plain objects + if (!state.Constructor || state.Constructor === Object) { + return copyObject(value, state); + } + // arrays + if (Array.isArray(value)) { + return copyArray(value, state); + } + const tagSpecificCopier = copiers[getTag(value)]; + if (tagSpecificCopier) { + return tagSpecificCopier(value, state); + } + return typeof value.then === 'function' ? value : copyObject(value, state); + } + return function copy(value) { + return copier(value, { + Constructor: undefined, + cache: createCache(), + copier, + prototype: undefined, + }); + }; + } + /** + * Copy an value deeply as much as possible, where strict recreation of object properties + * are maintained. All properties (including non-enumerable ones) are copied with their + * original property descriptors on both objects and arrays. + */ + const copyStrict = createCopier({ strict: true }); + /** + * Copy an value deeply as much as possible. + */ + const copy = createCopier(); + + exports.copy = copy; + exports.copyStrict = copyStrict; + exports.createCopier = createCopier; + +})); +//# sourceMappingURL=index.js.map diff --git a/node_modules/fast-copy/dist/umd/index.js.map b/node_modules/fast-copy/dist/umd/index.js.map new file mode 100644 index 0000000..6bb57c4 --- /dev/null +++ b/node_modules/fast-copy/dist/umd/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../../../src/utils.ts","../../../src/copier.ts","../../../src/options.ts","../../../src/index.ts"],"sourcesContent":["export interface Cache {\n has: (value: any) => boolean;\n set: (key: any, value: any) => void;\n get: (key: any) => any;\n}\n\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst toStringFunction = Function.prototype.toString;\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst toStringObject = Object.prototype.toString;\n\n/**\n * Get an empty version of the object with the same prototype it has.\n */\nexport function getCleanClone(prototype: any): any {\n if (!prototype) {\n return Object.create(null);\n }\n\n const Constructor = prototype.constructor;\n\n if (Constructor === Object) {\n return prototype === Object.prototype ? {} : Object.create(prototype as object | null);\n }\n\n if (Constructor && ~toStringFunction.call(Constructor).indexOf('[native code]')) {\n try {\n return new Constructor();\n } catch {\n // Ignore\n }\n }\n\n return Object.create(prototype as object | null);\n}\n\n/**\n * Get the tag of the value passed, so that the correct copier can be used.\n */\nexport function getTag(value: any): string {\n const stringTag = value[Symbol.toStringTag];\n\n if (stringTag) {\n return stringTag;\n }\n\n const type = toStringObject.call(value);\n\n return type.substring(8, type.length - 1);\n}\n","import { getCleanClone } from './utils.js';\nimport type { Cache } from './utils.ts';\n\nexport type InternalCopier = (value: Value, state: State) => Value;\n\nexport interface State {\n Constructor: any;\n cache: Cache;\n copier: InternalCopier;\n prototype: any;\n}\n\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst { hasOwnProperty, propertyIsEnumerable } = Object.prototype;\n\nfunction copyOwnDescriptor(\n original: Value,\n clone: Value,\n property: string | symbol,\n state: State,\n): void {\n const ownDescriptor = Object.getOwnPropertyDescriptor(original, property) || {\n configurable: true,\n enumerable: true,\n value: original[property as keyof Value],\n writable: true,\n };\n const descriptor =\n ownDescriptor.get || ownDescriptor.set\n ? ownDescriptor\n : {\n configurable: ownDescriptor.configurable,\n enumerable: ownDescriptor.enumerable,\n value: state.copier(ownDescriptor.value, state),\n writable: ownDescriptor.writable,\n };\n\n try {\n Object.defineProperty(clone, property, descriptor);\n } catch {\n // The above can fail on node in extreme edge cases, so fall back to the loose assignment.\n clone[property as keyof Value] = descriptor.get ? descriptor.get() : descriptor.value;\n }\n}\n\n/**\n * Striclty copy all properties contained on the object.\n */\nfunction copyOwnPropertiesStrict(value: Value, clone: Value, state: State): Value {\n const names = Object.getOwnPropertyNames(value);\n\n for (let index = 0; index < names.length; ++index) {\n copyOwnDescriptor(value, clone, names[index]!, state);\n }\n\n const symbols = Object.getOwnPropertySymbols(value);\n\n for (let index = 0; index < symbols.length; ++index) {\n copyOwnDescriptor(value, clone, symbols[index]!, state);\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the indexed values in the array.\n */\nexport function copyArrayLoose(array: any[], state: State) {\n const clone = new state.Constructor();\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(array, clone);\n\n for (let index = 0; index < array.length; ++index) {\n clone[index] = state.copier(array[index], state);\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the indexed values in the array, as well as any custom properties.\n */\nexport function copyArrayStrict(array: Value, state: State) {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(array, clone);\n\n return copyOwnPropertiesStrict(array, clone, state);\n}\n\n/**\n * Copy the contents of the ArrayBuffer.\n */\nexport function copyArrayBuffer(arrayBuffer: Value, _state: State): Value {\n return arrayBuffer.slice(0) as Value;\n}\n\n/**\n * Create a new Blob with the contents of the original.\n */\nexport function copyBlob(blob: Value, _state: State): Value {\n return blob.slice(0, blob.size, blob.type) as Value;\n}\n\n/**\n * Create a new DataView with the contents of the original.\n */\nexport function copyDataView(dataView: Value, state: State): Value {\n return new state.Constructor(copyArrayBuffer(dataView.buffer, state));\n}\n\n/**\n * Create a new Date based on the time of the original.\n */\nexport function copyDate(date: Value, state: State): Value {\n return new state.Constructor(date.getTime());\n}\n\n/**\n * Deeply copy the keys and values of the original.\n */\nexport function copyMapLoose>(map: Value, state: State): Value {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(map, clone);\n\n map.forEach((value, key) => {\n clone.set(key, state.copier(value, state));\n });\n\n return clone;\n}\n\n/**\n * Deeply copy the keys and values of the original, as well as any custom properties.\n */\nexport function copyMapStrict>(map: Value, state: State) {\n return copyOwnPropertiesStrict(map, copyMapLoose(map, state), state);\n}\n\n/**\n * Deeply copy the properties (keys and symbols) and values of the original.\n */\nexport function copyObjectLoose>(object: Value, state: State): Value {\n const clone = getCleanClone(state.prototype);\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(object, clone);\n\n for (const key in object) {\n if (hasOwnProperty.call(object, key)) {\n clone[key] = state.copier(object[key], state);\n }\n }\n\n const symbols = Object.getOwnPropertySymbols(object);\n\n for (let index = 0; index < symbols.length; ++index) {\n const symbol = symbols[index]!;\n\n if (propertyIsEnumerable.call(object, symbol)) {\n clone[symbol] = state.copier((object as any)[symbol], state);\n }\n }\n\n return clone;\n}\n\n/**\n * Deeply copy the properties (keys and symbols) and values of the original, as well\n * as any hidden or non-enumerable properties.\n */\nexport function copyObjectStrict>(object: Value, state: State): Value {\n const clone = getCleanClone(state.prototype);\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(object, clone);\n\n return copyOwnPropertiesStrict(object, clone, state);\n}\n\n/**\n * Create a new primitive wrapper from the value of the original.\n */\nexport function copyPrimitiveWrapper<\n // Specifically use the object constructor types\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Value extends Boolean | Number | String,\n>(primitiveObject: Value, state: State): Value {\n return new state.Constructor(primitiveObject.valueOf());\n}\n\n/**\n * Create a new RegExp based on the value and flags of the original.\n */\nexport function copyRegExp(regExp: Value, state: State): Value {\n const clone = new state.Constructor(regExp.source, regExp.flags) as Value;\n\n clone.lastIndex = regExp.lastIndex;\n\n return clone;\n}\n\n/**\n * Return the original value (an identity function).\n *\n * @note\n * THis is used for objects that cannot be copied, such as WeakMap.\n */\nexport function copySelf(value: Value, _state: State): Value {\n return value;\n}\n\n/**\n * Deeply copy the values of the original.\n */\nexport function copySetLoose>(set: Value, state: State): Value {\n const clone = new state.Constructor() as Value;\n\n // set in the cache immediately to be able to reuse the object recursively\n state.cache.set(set, clone);\n\n set.forEach((value) => {\n clone.add(state.copier(value, state));\n });\n\n return clone;\n}\n\n/**\n * Deeply copy the values of the original, as well as any custom properties.\n */\nexport function copySetStrict>(set: Value, state: State): Value {\n return copyOwnPropertiesStrict(set, copySetLoose(set, state), state);\n}\n","import {\n copyArrayBuffer,\n copyArrayLoose,\n copyArrayStrict,\n copyBlob,\n copyDataView,\n copyDate,\n copyMapLoose,\n copyMapStrict,\n copyObjectLoose,\n copyObjectStrict,\n copyPrimitiveWrapper,\n copyRegExp,\n copySelf,\n copySetLoose,\n copySetStrict,\n} from './copier.js';\nimport type { InternalCopier } from './copier.ts';\nimport type { Cache } from './utils.ts';\n\nexport interface CopierMethods {\n array?: InternalCopier;\n arrayBuffer?: InternalCopier;\n asyncGenerator?: InternalCopier;\n blob?: InternalCopier;\n dataView?: InternalCopier;\n date?: InternalCopier;\n error?: InternalCopier;\n generator?: InternalCopier;\n map?: InternalCopier>;\n object?: InternalCopier>;\n regExp?: InternalCopier;\n set?: InternalCopier>;\n}\n\ninterface Copiers {\n [key: string]: InternalCopier | undefined;\n\n Arguments: InternalCopier>;\n Array: InternalCopier;\n ArrayBuffer: InternalCopier;\n AsyncGenerator: InternalCopier;\n Blob: InternalCopier;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Boolean: InternalCopier;\n DataView: InternalCopier;\n Date: InternalCopier;\n Error: InternalCopier;\n Float32Array: InternalCopier;\n Float64Array: InternalCopier;\n Generator: InternalCopier;\n\n Int8Array: InternalCopier;\n Int16Array: InternalCopier;\n Int32Array: InternalCopier;\n Map: InternalCopier>;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n Number: InternalCopier;\n Object: InternalCopier>;\n Promise: InternalCopier>;\n RegExp: InternalCopier;\n Set: InternalCopier>;\n // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types\n String: InternalCopier;\n WeakMap: InternalCopier>;\n WeakSet: InternalCopier>;\n Uint8Array: InternalCopier;\n Uint8ClampedArray: InternalCopier;\n Uint16Array: InternalCopier;\n Uint32Array: InternalCopier;\n Uint64Array: InternalCopier;\n}\n\nexport interface CreateCopierOptions {\n createCache?: () => Cache;\n methods?: CopierMethods;\n strict?: boolean;\n}\n\nexport interface RequiredCreateCopierOptions extends Omit, 'methods'> {\n copiers: Copiers;\n methods: Required;\n}\n\nexport function createDefaultCache(): Cache {\n return new WeakMap();\n}\n\nexport function getOptions({\n createCache: createCacheOverride,\n methods: methodsOverride,\n strict,\n}: CreateCopierOptions): RequiredCreateCopierOptions {\n const defaultMethods = {\n array: strict ? copyArrayStrict : copyArrayLoose,\n arrayBuffer: copyArrayBuffer,\n asyncGenerator: copySelf,\n blob: copyBlob,\n dataView: copyDataView,\n date: copyDate,\n error: copySelf,\n generator: copySelf,\n map: strict ? copyMapStrict : copyMapLoose,\n object: strict ? copyObjectStrict : copyObjectLoose,\n regExp: copyRegExp,\n set: strict ? copySetStrict : copySetLoose,\n };\n\n const methods = methodsOverride ? Object.assign(defaultMethods, methodsOverride) : defaultMethods;\n const copiers = getTagSpecificCopiers(methods);\n const createCache = createCacheOverride || createDefaultCache;\n\n // Extra safety check to ensure that object and array copiers are always provided,\n // avoiding runtime errors.\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!copiers.Object || !copiers.Array) {\n throw new Error('An object and array copier must be provided.');\n }\n\n return { createCache, copiers, methods, strict: Boolean(strict) };\n}\n\n/**\n * Get the copiers used for each specific object tag.\n */\nexport function getTagSpecificCopiers(methods: Required): Copiers {\n return {\n Arguments: methods.object,\n Array: methods.array,\n ArrayBuffer: methods.arrayBuffer,\n AsyncGenerator: methods.asyncGenerator,\n Blob: methods.blob,\n Boolean: copyPrimitiveWrapper,\n DataView: methods.dataView,\n Date: methods.date,\n Error: methods.error,\n Float32Array: methods.arrayBuffer,\n Float64Array: methods.arrayBuffer,\n Generator: methods.generator,\n Int8Array: methods.arrayBuffer,\n Int16Array: methods.arrayBuffer,\n Int32Array: methods.arrayBuffer,\n Map: methods.map,\n Number: copyPrimitiveWrapper,\n Object: methods.object,\n Promise: copySelf,\n RegExp: methods.regExp,\n Set: methods.set,\n String: copyPrimitiveWrapper,\n WeakMap: copySelf,\n WeakSet: copySelf,\n Uint8Array: methods.arrayBuffer,\n Uint8ClampedArray: methods.arrayBuffer,\n Uint16Array: methods.arrayBuffer,\n Uint32Array: methods.arrayBuffer,\n Uint64Array: methods.arrayBuffer,\n };\n}\n","import type { State } from './copier.ts';\nimport { getOptions } from './options.js';\nimport type { CreateCopierOptions } from './options.ts';\nimport { getTag } from './utils.js';\n\nexport type { State } from './copier.ts';\nexport type { CreateCopierOptions } from './options.ts';\n\n/**\n * Create a custom copier based on custom options for any of the following:\n * - `createCache` method to create a cache for copied objects\n * - custom copier `methods` for specific object types\n * - `strict` mode to copy all properties with their descriptors\n */\nexport function createCopier(options: CreateCopierOptions = {}) {\n const { createCache, copiers } = getOptions(options);\n const { Array: copyArray, Object: copyObject } = copiers;\n\n function copier(value: any, state: State): any {\n state.prototype = state.Constructor = undefined;\n\n if (!value || typeof value !== 'object') {\n return value;\n }\n\n if (state.cache.has(value)) {\n return state.cache.get(value);\n }\n\n state.prototype = Object.getPrototypeOf(value);\n // Using logical AND for speed, since optional chaining transforms to\n // a local variable usage.\n // eslint-disable-next-line @typescript-eslint/prefer-optional-chain\n state.Constructor = state.prototype && state.prototype.constructor;\n\n // plain objects\n if (!state.Constructor || state.Constructor === Object) {\n return copyObject(value as Record, state);\n }\n\n // arrays\n if (Array.isArray(value)) {\n return copyArray(value, state);\n }\n\n const tagSpecificCopier = copiers[getTag(value)];\n\n if (tagSpecificCopier) {\n return tagSpecificCopier(value, state);\n }\n\n return typeof value.then === 'function' ? value : copyObject(value as Record, state);\n }\n\n return function copy(value: Value): Value {\n return copier(value, {\n Constructor: undefined,\n cache: createCache(),\n copier,\n prototype: undefined,\n });\n };\n}\n\n/**\n * Copy an value deeply as much as possible, where strict recreation of object properties\n * are maintained. All properties (including non-enumerable ones) are copied with their\n * original property descriptors on both objects and arrays.\n */\nexport const copyStrict = createCopier({ strict: true });\n\n/**\n * Copy an value deeply as much as possible.\n */\nexport const copy = createCopier();\n"],"names":[],"mappings":";;;;;;IAMA;IACA,MAAM,gBAAgB,GAAG,QAAQ,CAAC,SAAS,CAAC,QAAQ;IACpD;IACA,MAAM,cAAc,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ;IAEhD;;IAEG;IACG,SAAU,aAAa,CAAC,SAAc,EAAA;QAC1C,IAAI,CAAC,SAAS,EAAE;IACd,QAAA,OAAO,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC;QAC5B;IAEA,IAAA,MAAM,WAAW,GAAG,SAAS,CAAC,WAAW;IAEzC,IAAA,IAAI,WAAW,KAAK,MAAM,EAAE;IAC1B,QAAA,OAAO,SAAS,KAAK,MAAM,CAAC,SAAS,GAAG,EAAE,GAAG,MAAM,CAAC,MAAM,CAAC,SAA0B,CAAC;QACxF;IAEA,IAAA,IAAI,WAAW,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,OAAO,CAAC,eAAe,CAAC,EAAE;IAC/E,QAAA,IAAI;gBACF,OAAO,IAAI,WAAW,EAAE;YAC1B;IAAE,QAAA,OAAA,EAAA,EAAM;;YAER;QACF;IAEA,IAAA,OAAO,MAAM,CAAC,MAAM,CAAC,SAA0B,CAAC;IAClD;IAEA;;IAEG;IACG,SAAU,MAAM,CAAC,KAAU,EAAA;QAC/B,MAAM,SAAS,GAAG,KAAK,CAAC,MAAM,CAAC,WAAW,CAAC;QAE3C,IAAI,SAAS,EAAE;IACb,QAAA,OAAO,SAAS;QAClB;QAEA,MAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC;IAEvC,IAAA,OAAO,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;IAC3C;;ICrCA;IACA,MAAM,EAAE,cAAc,EAAE,oBAAoB,EAAE,GAAG,MAAM,CAAC,SAAS;IAEjE,SAAS,iBAAiB,CACxB,QAAe,EACf,KAAY,EACZ,QAAyB,EACzB,KAAY,EAAA;QAEZ,MAAM,aAAa,GAAG,MAAM,CAAC,wBAAwB,CAAC,QAAQ,EAAE,QAAQ,CAAC,IAAI;IAC3E,QAAA,YAAY,EAAE,IAAI;IAClB,QAAA,UAAU,EAAE,IAAI;IAChB,QAAA,KAAK,EAAE,QAAQ,CAAC,QAAuB,CAAC;IACxC,QAAA,QAAQ,EAAE,IAAI;SACf;QACD,MAAM,UAAU,GACd,aAAa,CAAC,GAAG,IAAI,aAAa,CAAC;IACjC,UAAE;IACF,UAAE;gBACE,YAAY,EAAE,aAAa,CAAC,YAAY;gBACxC,UAAU,EAAE,aAAa,CAAC,UAAU;gBACpC,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE,KAAK,CAAC;gBAC/C,QAAQ,EAAE,aAAa,CAAC,QAAQ;aACjC;IAEP,IAAA,IAAI;YACF,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,QAAQ,EAAE,UAAU,CAAC;QACpD;IAAE,IAAA,OAAA,EAAA,EAAM;;YAEN,KAAK,CAAC,QAAuB,CAAC,GAAG,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,GAAG,EAAE,GAAG,UAAU,CAAC,KAAK;QACvF;IACF;IAEA;;IAEG;IACH,SAAS,uBAAuB,CAAuB,KAAY,EAAE,KAAY,EAAE,KAAY,EAAA;QAC7F,MAAM,KAAK,GAAG,MAAM,CAAC,mBAAmB,CAAC,KAAK,CAAC;IAE/C,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,KAAK,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;IACjD,QAAA,iBAAiB,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,CAAE,EAAE,KAAK,CAAC;QACvD;QAEA,MAAM,OAAO,GAAG,MAAM,CAAC,qBAAqB,CAAC,KAAK,CAAC;IAEnD,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;IACnD,QAAA,iBAAiB,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,CAAE,EAAE,KAAK,CAAC;QACzD;IAEA,IAAA,OAAO,KAAK;IACd;IAEA;;IAEG;IACG,SAAU,cAAc,CAAC,KAAY,EAAE,KAAY,EAAA;IACvD,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAE;;QAGrC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC;IAE7B,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,KAAK,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;IACjD,QAAA,KAAK,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,KAAK,CAAC;QAClD;IAEA,IAAA,OAAO,KAAK;IACd;IAEA;;IAEG;IACG,SAAU,eAAe,CAAsB,KAAY,EAAE,KAAY,EAAA;IAC7E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;QAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC;QAE7B,OAAO,uBAAuB,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC;IACrD;IAEA;;IAEG;IACG,SAAU,eAAe,CAAgC,WAAkB,EAAE,MAAa,EAAA;IAC9F,IAAA,OAAO,WAAW,CAAC,KAAK,CAAC,CAAC,CAAU;IACtC;IAEA;;IAEG;IACG,SAAU,QAAQ,CAAqB,IAAW,EAAE,MAAa,EAAA;IACrE,IAAA,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAU;IACrD;IAEA;;IAEG;IACG,SAAU,YAAY,CAAyB,QAAe,EAAE,KAAY,EAAA;IAChF,IAAA,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,eAAe,CAAC,QAAQ,CAAC,MAAa,CAAC,CAAC;IACvE;IAEA;;IAEG;IACG,SAAU,QAAQ,CAAqB,IAAW,EAAE,KAAY,EAAA;QACpE,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC;IAC9C;IAEA;;IAEG;IACG,SAAU,YAAY,CAA8B,GAAU,EAAE,KAAY,EAAA;IAChF,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;QAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC;QAE3B,GAAG,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,KAAI;IACzB,QAAA,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IAC5C,IAAA,CAAC,CAAC;IAEF,IAAA,OAAO,KAAK;IACd;IAEA;;IAEG;IACG,SAAU,aAAa,CAA8B,GAAU,EAAE,KAAY,EAAA;IACjF,IAAA,OAAO,uBAAuB,CAAC,GAAG,EAAE,YAAY,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC;IACtE;IAEA;;IAEG;IACG,SAAU,eAAe,CAAoC,MAAa,EAAE,KAAY,EAAA;QAC5F,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC;;QAG5C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC;IAE9B,IAAA,KAAK,MAAM,GAAG,IAAI,MAAM,EAAE;YACxB,IAAI,cAAc,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE;IACpC,YAAA,KAAK,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC;YAC/C;QACF;QAEA,MAAM,OAAO,GAAG,MAAM,CAAC,qBAAqB,CAAC,MAAM,CAAC;IAEpD,IAAA,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE;IACnD,QAAA,MAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAE;YAE9B,IAAI,oBAAoB,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE;IAC7C,YAAA,KAAK,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAE,MAAc,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC;YAC9D;QACF;IAEA,IAAA,OAAO,KAAK;IACd;IAEA;;;IAGG;IACG,SAAU,gBAAgB,CAAoC,MAAa,EAAE,KAAY,EAAA;QAC7F,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC;;QAG5C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC;QAE9B,OAAO,uBAAuB,CAAC,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC;IACtD;IAEA;;IAEG;IACG,SAAU,oBAAoB,CAIlC,eAAsB,EAAE,KAAY,EAAA;QACpC,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,eAAe,CAAC,OAAO,EAAE,CAAC;IACzD;IAEA;;IAEG;IACG,SAAU,UAAU,CAAuB,MAAa,EAAE,KAAY,EAAA;IAC1E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,KAAK,CAAU;IAEzE,IAAA,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS;IAElC,IAAA,OAAO,KAAK;IACd;IAEA;;;;;IAKG;IACG,SAAU,QAAQ,CAAQ,KAAY,EAAE,MAAa,EAAA;IACzD,IAAA,OAAO,KAAK;IACd;IAEA;;IAEG;IACG,SAAU,YAAY,CAAyB,GAAU,EAAE,KAAY,EAAA;IAC3E,IAAA,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,WAAW,EAAW;;QAG9C,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC;IAE3B,IAAA,GAAG,CAAC,OAAO,CAAC,CAAC,KAAK,KAAI;IACpB,QAAA,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IACvC,IAAA,CAAC,CAAC;IAEF,IAAA,OAAO,KAAK;IACd;IAEA;;IAEG;IACG,SAAU,aAAa,CAAyB,GAAU,EAAE,KAAY,EAAA;IAC5E,IAAA,OAAO,uBAAuB,CAAC,GAAG,EAAE,YAAY,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,KAAK,CAAC;IACtE;;aCzJgB,kBAAkB,GAAA;QAChC,OAAO,IAAI,OAAO,EAAE;IACtB;IAEM,SAAU,UAAU,CAAC,EACzB,WAAW,EAAE,mBAAmB,EAChC,OAAO,EAAE,eAAe,EACxB,MAAM,GACc,EAAA;IACpB,IAAA,MAAM,cAAc,GAAG;YACrB,KAAK,EAAE,MAAM,GAAG,eAAe,GAAG,cAAc;IAChD,QAAA,WAAW,EAAE,eAAe;IAC5B,QAAA,cAAc,EAAE,QAAQ;IACxB,QAAA,IAAI,EAAE,QAAQ;IACd,QAAA,QAAQ,EAAE,YAAY;IACtB,QAAA,IAAI,EAAE,QAAQ;IACd,QAAA,KAAK,EAAE,QAAQ;IACf,QAAA,SAAS,EAAE,QAAQ;YACnB,GAAG,EAAE,MAAM,GAAG,aAAa,GAAG,YAAY;YAC1C,MAAM,EAAE,MAAM,GAAG,gBAAgB,GAAG,eAAe;IACnD,QAAA,MAAM,EAAE,UAAU;YAClB,GAAG,EAAE,MAAM,GAAG,aAAa,GAAG,YAAY;SAC3C;IAED,IAAA,MAAM,OAAO,GAAG,eAAe,GAAG,MAAM,CAAC,MAAM,CAAC,cAAc,EAAE,eAAe,CAAC,GAAG,cAAc;IACjG,IAAA,MAAM,OAAO,GAAG,qBAAqB,CAAC,OAAO,CAAC;IAC9C,IAAA,MAAM,WAAW,GAAG,mBAAmB,IAAI,kBAAkB;;;;QAK7D,IAAI,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE;IACrC,QAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;QACjE;IAEA,IAAA,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE;IACnE;IAEA;;IAEG;IACG,SAAU,qBAAqB,CAAC,OAAgC,EAAA;QACpE,OAAO;YACL,SAAS,EAAE,OAAO,CAAC,MAAM;YACzB,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,WAAW,EAAE,OAAO,CAAC,WAAW;YAChC,cAAc,EAAE,OAAO,CAAC,cAAc;YACtC,IAAI,EAAE,OAAO,CAAC,IAAI;IAClB,QAAA,OAAO,EAAE,oBAAoB;YAC7B,QAAQ,EAAE,OAAO,CAAC,QAAQ;YAC1B,IAAI,EAAE,OAAO,CAAC,IAAI;YAClB,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,YAAY,EAAE,OAAO,CAAC,WAAW;YACjC,YAAY,EAAE,OAAO,CAAC,WAAW;YACjC,SAAS,EAAE,OAAO,CAAC,SAAS;YAC5B,SAAS,EAAE,OAAO,CAAC,WAAW;YAC9B,UAAU,EAAE,OAAO,CAAC,WAAW;YAC/B,UAAU,EAAE,OAAO,CAAC,WAAW;YAC/B,GAAG,EAAE,OAAO,CAAC,GAAG;IAChB,QAAA,MAAM,EAAE,oBAAoB;YAC5B,MAAM,EAAE,OAAO,CAAC,MAAM;IACtB,QAAA,OAAO,EAAE,QAAQ;YACjB,MAAM,EAAE,OAAO,CAAC,MAAM;YACtB,GAAG,EAAE,OAAO,CAAC,GAAG;IAChB,QAAA,MAAM,EAAE,oBAAoB;IAC5B,QAAA,OAAO,EAAE,QAAQ;IACjB,QAAA,OAAO,EAAE,QAAQ;YACjB,UAAU,EAAE,OAAO,CAAC,WAAW;YAC/B,iBAAiB,EAAE,OAAO,CAAC,WAAW;YACtC,WAAW,EAAE,OAAO,CAAC,WAAW;YAChC,WAAW,EAAE,OAAO,CAAC,WAAW;YAChC,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC;IACH;;ICrJA;;;;;IAKG;IACG,SAAU,YAAY,CAAC,OAAA,GAA+B,EAAE,EAAA;QAC5D,MAAM,EAAE,WAAW,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC,OAAO,CAAC;QACpD,MAAM,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,GAAG,OAAO;IAExD,IAAA,SAAS,MAAM,CAAC,KAAU,EAAE,KAAY,EAAA;YACtC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,WAAW,GAAG,SAAS;YAE/C,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;IACvC,YAAA,OAAO,KAAK;YACd;YAEA,IAAI,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;gBAC1B,OAAO,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC;YAC/B;YAEA,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC;;;;IAI9C,QAAA,KAAK,CAAC,WAAW,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,SAAS,CAAC,WAAW;;YAGlE,IAAI,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,KAAK,MAAM,EAAE;IACtD,YAAA,OAAO,UAAU,CAAC,KAA4B,EAAE,KAAK,CAAC;YACxD;;IAGA,QAAA,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;IACxB,YAAA,OAAO,SAAS,CAAC,KAAK,EAAE,KAAK,CAAC;YAChC;YAEA,MAAM,iBAAiB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEhD,IAAI,iBAAiB,EAAE;IACrB,YAAA,OAAO,iBAAiB,CAAC,KAAK,EAAE,KAAK,CAAC;YACxC;IAEA,QAAA,OAAO,OAAO,KAAK,CAAC,IAAI,KAAK,UAAU,GAAG,KAAK,GAAG,UAAU,CAAC,KAA4B,EAAE,KAAK,CAAC;QACnG;QAEA,OAAO,SAAS,IAAI,CAAQ,KAAY,EAAA;YACtC,OAAO,MAAM,CAAC,KAAK,EAAE;IACnB,YAAA,WAAW,EAAE,SAAS;gBACtB,KAAK,EAAE,WAAW,EAAE;gBACpB,MAAM;IACN,YAAA,SAAS,EAAE,SAAS;IACrB,SAAA,CAAC;IACJ,IAAA,CAAC;IACH;IAEA;;;;IAIG;AACI,UAAM,UAAU,GAAG,YAAY,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE;IAEvD;;IAEG;AACI,UAAM,IAAI,GAAG,YAAY;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/fast-copy/dist/umd/options.d.ts b/node_modules/fast-copy/dist/umd/options.d.ts new file mode 100644 index 0000000..40e1c36 --- /dev/null +++ b/node_modules/fast-copy/dist/umd/options.d.ts @@ -0,0 +1,64 @@ +import type { InternalCopier } from './copier.ts'; +import type { Cache } from './utils.ts'; +export interface CopierMethods { + array?: InternalCopier; + arrayBuffer?: InternalCopier; + asyncGenerator?: InternalCopier; + blob?: InternalCopier; + dataView?: InternalCopier; + date?: InternalCopier; + error?: InternalCopier; + generator?: InternalCopier; + map?: InternalCopier>; + object?: InternalCopier>; + regExp?: InternalCopier; + set?: InternalCopier>; +} +interface Copiers { + [key: string]: InternalCopier | undefined; + Arguments: InternalCopier>; + Array: InternalCopier; + ArrayBuffer: InternalCopier; + AsyncGenerator: InternalCopier; + Blob: InternalCopier; + Boolean: InternalCopier; + DataView: InternalCopier; + Date: InternalCopier; + Error: InternalCopier; + Float32Array: InternalCopier; + Float64Array: InternalCopier; + Generator: InternalCopier; + Int8Array: InternalCopier; + Int16Array: InternalCopier; + Int32Array: InternalCopier; + Map: InternalCopier>; + Number: InternalCopier; + Object: InternalCopier>; + Promise: InternalCopier>; + RegExp: InternalCopier; + Set: InternalCopier>; + String: InternalCopier; + WeakMap: InternalCopier>; + WeakSet: InternalCopier>; + Uint8Array: InternalCopier; + Uint8ClampedArray: InternalCopier; + Uint16Array: InternalCopier; + Uint32Array: InternalCopier; + Uint64Array: InternalCopier; +} +export interface CreateCopierOptions { + createCache?: () => Cache; + methods?: CopierMethods; + strict?: boolean; +} +export interface RequiredCreateCopierOptions extends Omit, 'methods'> { + copiers: Copiers; + methods: Required; +} +export declare function createDefaultCache(): Cache; +export declare function getOptions({ createCache: createCacheOverride, methods: methodsOverride, strict, }: CreateCopierOptions): RequiredCreateCopierOptions; +/** + * Get the copiers used for each specific object tag. + */ +export declare function getTagSpecificCopiers(methods: Required): Copiers; +export {}; diff --git a/node_modules/fast-copy/dist/umd/utils.d.ts b/node_modules/fast-copy/dist/umd/utils.d.ts new file mode 100644 index 0000000..42c6c70 --- /dev/null +++ b/node_modules/fast-copy/dist/umd/utils.d.ts @@ -0,0 +1,13 @@ +export interface Cache { + has: (value: any) => boolean; + set: (key: any, value: any) => void; + get: (key: any) => any; +} +/** + * Get an empty version of the object with the same prototype it has. + */ +export declare function getCleanClone(prototype: any): any; +/** + * Get the tag of the value passed, so that the correct copier can be used. + */ +export declare function getTag(value: any): string; diff --git a/node_modules/fast-copy/index.d.ts b/node_modules/fast-copy/index.d.ts new file mode 100644 index 0000000..c163618 --- /dev/null +++ b/node_modules/fast-copy/index.d.ts @@ -0,0 +1,54 @@ +interface Cache { + has: (value: any) => boolean; + set: (key: any, value: any) => void; + get: (key: any) => any; +} + +type InternalCopier = (value: Value, state: State) => Value; +interface State { + Constructor: any; + cache: Cache; + copier: InternalCopier; + prototype: any; +} + +interface CopierMethods { + array?: InternalCopier; + arrayBuffer?: InternalCopier; + asyncGenerator?: InternalCopier; + blob?: InternalCopier; + dataView?: InternalCopier; + date?: InternalCopier; + error?: InternalCopier; + generator?: InternalCopier; + map?: InternalCopier>; + object?: InternalCopier>; + regExp?: InternalCopier; + set?: InternalCopier>; +} +interface CreateCopierOptions { + createCache?: () => Cache; + methods?: CopierMethods; + strict?: boolean; +} + +/** + * Create a custom copier based on custom options for any of the following: + * - `createCache` method to create a cache for copied objects + * - custom copier `methods` for specific object types + * - `strict` mode to copy all properties with their descriptors + */ +declare function createCopier(options?: CreateCopierOptions): (value: Value) => Value; +/** + * Copy an value deeply as much as possible, where strict recreation of object properties + * are maintained. All properties (including non-enumerable ones) are copied with their + * original property descriptors on both objects and arrays. + */ +declare const copyStrict: (value: Value) => Value; +/** + * Copy an value deeply as much as possible. + */ +declare const copy: (value: Value) => Value; + +export { copy, copyStrict, createCopier }; +export type { CreateCopierOptions, State }; diff --git a/node_modules/fast-copy/package.json b/node_modules/fast-copy/package.json new file mode 100644 index 0000000..8ea2f13 --- /dev/null +++ b/node_modules/fast-copy/package.json @@ -0,0 +1,103 @@ +{ + "author": "tony_quetano@planttheidea.com", + "browser": "dist/umd/index.js", + "bugs": { + "url": "https://github.com/planttheidea/fast-copy/issues" + }, + "contributors": [ + "Dariusz Rzepka " + ], + "description": "A blazing fast deep object copier", + "devDependencies": { + "@planttheidea/build-tools": "^1.2.2", + "@types/lodash": "^4.17.21", + "@types/node": "^24.10.1", + "@types/ramda": "^0.31.1", + "@types/react": "^19.2.7", + "@vitest/coverage-v8": "^4.0.15", + "cli-table3": "^0.6.5", + "clone": "^2.1.2", + "deepclone": "^1.0.2", + "eslint": "^9.39.1", + "fast-clone": "^1.5.13", + "lodash": "^4.17.21", + "prettier": "^3.7.4", + "ramda": "^0.32.0", + "react": "^19.2.1", + "react-dom": "^19.2.1", + "release-it": "19.0.6", + "rollup": "^4.53.3", + "tinybench": "^6.0.0", + "typescript": "^5.9.3", + "vite": "^7.2.6", + "vitest": "^4.0.15" + }, + "exports": { + ".": { + "import": { + "types": "./dist/es/index.d.mts", + "default": "./dist/es/index.mjs" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + }, + "default": { + "types": "./dist/umd/index.d.ts", + "default": "./dist/umd/index.js" + } + } + }, + "files": [ + "dist", + "CHANGELOG.md", + "LICENSE", + "README.md", + "index.d.ts", + "package.json" + ], + "homepage": "https://github.com/planttheidea/fast-copy#readme", + "keywords": [ + "clone", + "deep", + "copy", + "fast" + ], + "license": "MIT", + "main": "dist/cjs/index.cjs", + "module": "dist/es/index.mjs", + "name": "fast-copy", + "repository": { + "type": "git", + "url": "git+https://github.com/planttheidea/fast-copy.git" + }, + "scripts": { + "benchmark": "npm run build && node benchmark/index.js", + "build": "npm run clean && npm run build:dist && npm run build:types", + "build:dist": "NODE_ENV=production rollup -c config/rollup.config.js", + "build:types": "pti fix-types -l dist", + "clean": "rm -rf dist", + "clean:cjs": "rm -rf dist/cjs", + "clean:es": "rm -rf dist/es", + "clean:esm": "rm -rf dist/esm", + "clean:min": "rm -rf dist/min", + "dev": "vite --config=config/vite.config.ts", + "format": "prettier . --log-level=warn --write", + "format:check": "prettier . --log-level=warn --check", + "lint": "eslint --max-warnings=0", + "lint:fix": "npm run lint -- --fix", + "release:alpha": "release-it --config=config/release-it/alpha.json", + "release:beta": "release-it --config=config/release-it/beta.json", + "release:dry": "release-it --dry-run", + "release:rc": "release-it --config=config/release-it/rc.json", + "release:scripts": "npm run format:check && npm run typecheck && npm run lint && npm run test && npm run build", + "release:stable": "release-it --config=config/release-it/stable.json", + "start": "npm run dev", + "test": "vitest run --config=config/vitest.config.ts", + "typecheck": "tsc --noEmit" + }, + "sideEffects": false, + "type": "module", + "types": "./index.d.ts", + "version": "4.0.2" +} diff --git a/node_modules/fast-safe-stringify/.travis.yml b/node_modules/fast-safe-stringify/.travis.yml new file mode 100644 index 0000000..2b06d25 --- /dev/null +++ b/node_modules/fast-safe-stringify/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +sudo: false +node_js: +- '4' +- '6' +- '8' +- '9' +- '10' diff --git a/node_modules/fast-safe-stringify/CHANGELOG.md b/node_modules/fast-safe-stringify/CHANGELOG.md new file mode 100644 index 0000000..55f2d08 --- /dev/null +++ b/node_modules/fast-safe-stringify/CHANGELOG.md @@ -0,0 +1,17 @@ +# Changelog + +## v.2.0.0 + +Features + +- Added stable-stringify (see documentation) +- Support replacer +- Support spacer +- toJSON support without forceDecirc property +- Improved performance + +Breaking changes + +- Manipulating the input value in a `toJSON` function is not possible anymore in + all cases (see documentation) +- Dropped support for e.g. IE8 and Node.js < 4 diff --git a/node_modules/fast-safe-stringify/LICENSE b/node_modules/fast-safe-stringify/LICENSE new file mode 100644 index 0000000..d310c2d --- /dev/null +++ b/node_modules/fast-safe-stringify/LICENSE @@ -0,0 +1,23 @@ +The MIT License (MIT) + +Copyright (c) 2016 David Mark Clements +Copyright (c) 2017 David Mark Clements & Matteo Collina +Copyright (c) 2018 David Mark Clements, Matteo Collina & Ruben Bridgewater + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/fast-safe-stringify/benchmark.js b/node_modules/fast-safe-stringify/benchmark.js new file mode 100644 index 0000000..7ba5e9f --- /dev/null +++ b/node_modules/fast-safe-stringify/benchmark.js @@ -0,0 +1,137 @@ +const Benchmark = require('benchmark') +const suite = new Benchmark.Suite() +const { inspect } = require('util') +const jsonStringifySafe = require('json-stringify-safe') +const fastSafeStringify = require('./') + +const array = new Array(10).fill(0).map((_, i) => i) +const obj = { foo: array } +const circ = JSON.parse(JSON.stringify(obj)) +circ.o = { obj: circ, array } +const circGetters = JSON.parse(JSON.stringify(obj)) +Object.assign(circGetters, { get o () { return { obj: circGetters, array } } }) + +const deep = require('./package.json') +deep.deep = JSON.parse(JSON.stringify(deep)) +deep.deep.deep = JSON.parse(JSON.stringify(deep)) +deep.deep.deep.deep = JSON.parse(JSON.stringify(deep)) +deep.array = array + +const deepCirc = JSON.parse(JSON.stringify(deep)) +deepCirc.deep.deep.deep.circ = deepCirc +deepCirc.deep.deep.circ = deepCirc +deepCirc.deep.circ = deepCirc +deepCirc.array = array + +const deepCircGetters = JSON.parse(JSON.stringify(deep)) +for (let i = 0; i < 10; i++) { + deepCircGetters[i.toString()] = { + deep: { + deep: { + get circ () { return deep.deep }, + deep: { get circ () { return deep.deep.deep } } + }, + get circ () { return deep } + }, + get array () { return array } + } +} + +const deepCircNonCongifurableGetters = JSON.parse(JSON.stringify(deep)) +Object.defineProperty(deepCircNonCongifurableGetters.deep.deep.deep, 'circ', { + get: () => deepCircNonCongifurableGetters, + enumerable: true, + configurable: false +}) +Object.defineProperty(deepCircNonCongifurableGetters.deep.deep, 'circ', { + get: () => deepCircNonCongifurableGetters, + enumerable: true, + configurable: false +}) +Object.defineProperty(deepCircNonCongifurableGetters.deep, 'circ', { + get: () => deepCircNonCongifurableGetters, + enumerable: true, + configurable: false +}) +Object.defineProperty(deepCircNonCongifurableGetters, 'array', { + get: () => array, + enumerable: true, + configurable: false +}) + +suite.add('util.inspect: simple object ', function () { + inspect(obj, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: circular ', function () { + inspect(circ, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: circular getters ', function () { + inspect(circGetters, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: deep ', function () { + inspect(deep, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: deep circular ', function () { + inspect(deepCirc, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: large deep circular getters ', function () { + inspect(deepCircGetters, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: deep non-conf circular getters', function () { + inspect(deepCircNonCongifurableGetters, { showHidden: false, depth: null }) +}) + +suite.add('\njson-stringify-safe: simple object ', function () { + jsonStringifySafe(obj) +}) +suite.add('json-stringify-safe: circular ', function () { + jsonStringifySafe(circ) +}) +suite.add('json-stringify-safe: circular getters ', function () { + jsonStringifySafe(circGetters) +}) +suite.add('json-stringify-safe: deep ', function () { + jsonStringifySafe(deep) +}) +suite.add('json-stringify-safe: deep circular ', function () { + jsonStringifySafe(deepCirc) +}) +suite.add('json-stringify-safe: large deep circular getters ', function () { + jsonStringifySafe(deepCircGetters) +}) +suite.add('json-stringify-safe: deep non-conf circular getters', function () { + jsonStringifySafe(deepCircNonCongifurableGetters) +}) + +suite.add('\nfast-safe-stringify: simple object ', function () { + fastSafeStringify(obj) +}) +suite.add('fast-safe-stringify: circular ', function () { + fastSafeStringify(circ) +}) +suite.add('fast-safe-stringify: circular getters ', function () { + fastSafeStringify(circGetters) +}) +suite.add('fast-safe-stringify: deep ', function () { + fastSafeStringify(deep) +}) +suite.add('fast-safe-stringify: deep circular ', function () { + fastSafeStringify(deepCirc) +}) +suite.add('fast-safe-stringify: large deep circular getters ', function () { + fastSafeStringify(deepCircGetters) +}) +suite.add('fast-safe-stringify: deep non-conf circular getters', function () { + fastSafeStringify(deepCircNonCongifurableGetters) +}) + +// add listeners +suite.on('cycle', function (event) { + console.log(String(event.target)) +}) + +suite.on('complete', function () { + console.log('\nFastest is ' + this.filter('fastest').map('name')) +}) + +suite.run({ delay: 1, minSamples: 150 }) diff --git a/node_modules/fast-safe-stringify/index.d.ts b/node_modules/fast-safe-stringify/index.d.ts new file mode 100644 index 0000000..9a9b1f0 --- /dev/null +++ b/node_modules/fast-safe-stringify/index.d.ts @@ -0,0 +1,23 @@ +declare function stringify( + value: any, + replacer?: (key: string, value: any) => any, + space?: string | number, + options?: { depthLimit: number | undefined; edgesLimit: number | undefined } +): string; + +declare namespace stringify { + export function stable( + value: any, + replacer?: (key: string, value: any) => any, + space?: string | number, + options?: { depthLimit: number | undefined; edgesLimit: number | undefined } + ): string; + export function stableStringify( + value: any, + replacer?: (key: string, value: any) => any, + space?: string | number, + options?: { depthLimit: number | undefined; edgesLimit: number | undefined } + ): string; +} + +export default stringify; diff --git a/node_modules/fast-safe-stringify/index.js b/node_modules/fast-safe-stringify/index.js new file mode 100644 index 0000000..ecf7e51 --- /dev/null +++ b/node_modules/fast-safe-stringify/index.js @@ -0,0 +1,229 @@ +module.exports = stringify +stringify.default = stringify +stringify.stable = deterministicStringify +stringify.stableStringify = deterministicStringify + +var LIMIT_REPLACE_NODE = '[...]' +var CIRCULAR_REPLACE_NODE = '[Circular]' + +var arr = [] +var replacerStack = [] + +function defaultOptions () { + return { + depthLimit: Number.MAX_SAFE_INTEGER, + edgesLimit: Number.MAX_SAFE_INTEGER + } +} + +// Regular stringify +function stringify (obj, replacer, spacer, options) { + if (typeof options === 'undefined') { + options = defaultOptions() + } + + decirc(obj, '', 0, [], undefined, 0, options) + var res + try { + if (replacerStack.length === 0) { + res = JSON.stringify(obj, replacer, spacer) + } else { + res = JSON.stringify(obj, replaceGetterValues(replacer), spacer) + } + } catch (_) { + return JSON.stringify('[unable to serialize, circular reference is too complex to analyze]') + } finally { + while (arr.length !== 0) { + var part = arr.pop() + if (part.length === 4) { + Object.defineProperty(part[0], part[1], part[3]) + } else { + part[0][part[1]] = part[2] + } + } + } + return res +} + +function setReplace (replace, val, k, parent) { + var propertyDescriptor = Object.getOwnPropertyDescriptor(parent, k) + if (propertyDescriptor.get !== undefined) { + if (propertyDescriptor.configurable) { + Object.defineProperty(parent, k, { value: replace }) + arr.push([parent, k, val, propertyDescriptor]) + } else { + replacerStack.push([val, k, replace]) + } + } else { + parent[k] = replace + arr.push([parent, k, val]) + } +} + +function decirc (val, k, edgeIndex, stack, parent, depth, options) { + depth += 1 + var i + if (typeof val === 'object' && val !== null) { + for (i = 0; i < stack.length; i++) { + if (stack[i] === val) { + setReplace(CIRCULAR_REPLACE_NODE, val, k, parent) + return + } + } + + if ( + typeof options.depthLimit !== 'undefined' && + depth > options.depthLimit + ) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent) + return + } + + if ( + typeof options.edgesLimit !== 'undefined' && + edgeIndex + 1 > options.edgesLimit + ) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent) + return + } + + stack.push(val) + // Optimize for Arrays. Big arrays could kill the performance otherwise! + if (Array.isArray(val)) { + for (i = 0; i < val.length; i++) { + decirc(val[i], i, i, stack, val, depth, options) + } + } else { + var keys = Object.keys(val) + for (i = 0; i < keys.length; i++) { + var key = keys[i] + decirc(val[key], key, i, stack, val, depth, options) + } + } + stack.pop() + } +} + +// Stable-stringify +function compareFunction (a, b) { + if (a < b) { + return -1 + } + if (a > b) { + return 1 + } + return 0 +} + +function deterministicStringify (obj, replacer, spacer, options) { + if (typeof options === 'undefined') { + options = defaultOptions() + } + + var tmp = deterministicDecirc(obj, '', 0, [], undefined, 0, options) || obj + var res + try { + if (replacerStack.length === 0) { + res = JSON.stringify(tmp, replacer, spacer) + } else { + res = JSON.stringify(tmp, replaceGetterValues(replacer), spacer) + } + } catch (_) { + return JSON.stringify('[unable to serialize, circular reference is too complex to analyze]') + } finally { + // Ensure that we restore the object as it was. + while (arr.length !== 0) { + var part = arr.pop() + if (part.length === 4) { + Object.defineProperty(part[0], part[1], part[3]) + } else { + part[0][part[1]] = part[2] + } + } + } + return res +} + +function deterministicDecirc (val, k, edgeIndex, stack, parent, depth, options) { + depth += 1 + var i + if (typeof val === 'object' && val !== null) { + for (i = 0; i < stack.length; i++) { + if (stack[i] === val) { + setReplace(CIRCULAR_REPLACE_NODE, val, k, parent) + return + } + } + try { + if (typeof val.toJSON === 'function') { + return + } + } catch (_) { + return + } + + if ( + typeof options.depthLimit !== 'undefined' && + depth > options.depthLimit + ) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent) + return + } + + if ( + typeof options.edgesLimit !== 'undefined' && + edgeIndex + 1 > options.edgesLimit + ) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent) + return + } + + stack.push(val) + // Optimize for Arrays. Big arrays could kill the performance otherwise! + if (Array.isArray(val)) { + for (i = 0; i < val.length; i++) { + deterministicDecirc(val[i], i, i, stack, val, depth, options) + } + } else { + // Create a temporary object in the required way + var tmp = {} + var keys = Object.keys(val).sort(compareFunction) + for (i = 0; i < keys.length; i++) { + var key = keys[i] + deterministicDecirc(val[key], key, i, stack, val, depth, options) + tmp[key] = val[key] + } + if (typeof parent !== 'undefined') { + arr.push([parent, k, val]) + parent[k] = tmp + } else { + return tmp + } + } + stack.pop() + } +} + +// wraps replacer function to handle values we couldn't replace +// and mark them as replaced value +function replaceGetterValues (replacer) { + replacer = + typeof replacer !== 'undefined' + ? replacer + : function (k, v) { + return v + } + return function (key, val) { + if (replacerStack.length > 0) { + for (var i = 0; i < replacerStack.length; i++) { + var part = replacerStack[i] + if (part[1] === key && part[0] === val) { + val = part[2] + replacerStack.splice(i, 1) + break + } + } + } + return replacer.call(this, key, val) + } +} diff --git a/node_modules/fast-safe-stringify/package.json b/node_modules/fast-safe-stringify/package.json new file mode 100644 index 0000000..206a591 --- /dev/null +++ b/node_modules/fast-safe-stringify/package.json @@ -0,0 +1,46 @@ +{ + "name": "fast-safe-stringify", + "version": "2.1.1", + "description": "Safely and quickly serialize JavaScript objects", + "keywords": [ + "stable", + "stringify", + "JSON", + "JSON.stringify", + "safe", + "serialize" + ], + "main": "index.js", + "scripts": { + "test": "standard && tap --no-esm test.js test-stable.js", + "benchmark": "node benchmark.js" + }, + "author": "David Mark Clements", + "contributors": [ + "Ruben Bridgewater", + "Matteo Collina", + "Ben Gourley", + "Gabriel Lesperance", + "Alex Liu", + "Christoph Walcher", + "Nicholas Young" + ], + "license": "MIT", + "typings": "index", + "devDependencies": { + "benchmark": "^2.1.4", + "clone": "^2.1.0", + "json-stringify-safe": "^5.0.1", + "standard": "^11.0.0", + "tap": "^12.0.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/davidmarkclements/fast-safe-stringify.git" + }, + "bugs": { + "url": "https://github.com/davidmarkclements/fast-safe-stringify/issues" + }, + "homepage": "https://github.com/davidmarkclements/fast-safe-stringify#readme", + "dependencies": {} +} diff --git a/node_modules/fast-safe-stringify/readme.md b/node_modules/fast-safe-stringify/readme.md new file mode 100644 index 0000000..47179c9 --- /dev/null +++ b/node_modules/fast-safe-stringify/readme.md @@ -0,0 +1,170 @@ +# fast-safe-stringify + +Safe and fast serialization alternative to [JSON.stringify][]. + +Gracefully handles circular structures instead of throwing in most cases. +It could return an error string if the circular object is too complex to analyze, +e.g. in case there are proxies involved. + +Provides a deterministic ("stable") version as well that will also gracefully +handle circular structures. See the example below for further information. + +## Usage + +The same as [JSON.stringify][]. + +`stringify(value[, replacer[, space[, options]]])` + +```js +const safeStringify = require('fast-safe-stringify') +const o = { a: 1 } +o.o = o + +console.log(safeStringify(o)) +// '{"a":1,"o":"[Circular]"}' +console.log(JSON.stringify(o)) +// TypeError: Converting circular structure to JSON + +function replacer(key, value) { + console.log('Key:', JSON.stringify(key), 'Value:', JSON.stringify(value)) + // Remove the circular structure + if (value === '[Circular]') { + return + } + return value +} + +// those are also defaults limits when no options object is passed into safeStringify +// configure it to lower the limit. +const options = { + depthLimit: Number.MAX_SAFE_INTEGER, + edgesLimit: Number.MAX_SAFE_INTEGER +}; + +const serialized = safeStringify(o, replacer, 2, options) +// Key: "" Value: {"a":1,"o":"[Circular]"} +// Key: "a" Value: 1 +// Key: "o" Value: "[Circular]" +console.log(serialized) +// { +// "a": 1 +// } +``` + + +Using the deterministic version also works the same: + +```js +const safeStringify = require('fast-safe-stringify') +const o = { b: 1, a: 0 } +o.o = o + +console.log(safeStringify(o)) +// '{"b":1,"a":0,"o":"[Circular]"}' +console.log(safeStringify.stableStringify(o)) +// '{"a":0,"b":1,"o":"[Circular]"}' +console.log(JSON.stringify(o)) +// TypeError: Converting circular structure to JSON +``` + +A faster and side-effect free implementation is available in the +[safe-stable-stringify][] module. However it is still considered experimental +due to a new and more complex implementation. + +### Replace strings constants + +- `[Circular]` - when same reference is found +- `[...]` - when some limit from options object is reached + +## Differences to JSON.stringify + +In general the behavior is identical to [JSON.stringify][]. The [`replacer`][] +and [`space`][] options are also available. + +A few exceptions exist to [JSON.stringify][] while using [`toJSON`][] or +[`replacer`][]: + +### Regular safe stringify + +- Manipulating a circular structure of the passed in value in a `toJSON` or the + `replacer` is not possible! It is possible for any other value and property. + +- In case a circular structure is detected and the [`replacer`][] is used it + will receive the string `[Circular]` as the argument instead of the circular + object itself. + +### Deterministic ("stable") safe stringify + +- Manipulating the input object either in a [`toJSON`][] or the [`replacer`][] + function will not have any effect on the output. The output entirely relies on + the shape the input value had at the point passed to the stringify function! + +- In case a circular structure is detected and the [`replacer`][] is used it + will receive the string `[Circular]` as the argument instead of the circular + object itself. + +A side effect free variation without these limitations can be found as well +([`safe-stable-stringify`][]). It is also faster than the current +implementation. It is still considered experimental due to a new and more +complex implementation. + +## Benchmarks + +Although not JSON, the Node.js `util.inspect` method can be used for similar +purposes (e.g. logging) and also handles circular references. + +Here we compare `fast-safe-stringify` with some alternatives: +(Lenovo T450s with a i7-5600U CPU using Node.js 8.9.4) + +```md +fast-safe-stringify: simple object x 1,121,497 ops/sec ±0.75% (97 runs sampled) +fast-safe-stringify: circular x 560,126 ops/sec ±0.64% (96 runs sampled) +fast-safe-stringify: deep x 32,472 ops/sec ±0.57% (95 runs sampled) +fast-safe-stringify: deep circular x 32,513 ops/sec ±0.80% (92 runs sampled) + +util.inspect: simple object x 272,837 ops/sec ±1.48% (90 runs sampled) +util.inspect: circular x 116,896 ops/sec ±1.19% (95 runs sampled) +util.inspect: deep x 19,382 ops/sec ±0.66% (92 runs sampled) +util.inspect: deep circular x 18,717 ops/sec ±0.63% (96 runs sampled) + +json-stringify-safe: simple object x 233,621 ops/sec ±0.97% (94 runs sampled) +json-stringify-safe: circular x 110,409 ops/sec ±1.85% (95 runs sampled) +json-stringify-safe: deep x 8,705 ops/sec ±0.87% (96 runs sampled) +json-stringify-safe: deep circular x 8,336 ops/sec ±2.20% (93 runs sampled) +``` + +For stable stringify comparisons, see the performance benchmarks in the +[`safe-stable-stringify`][] readme. + +## Protip + +Whether `fast-safe-stringify` or alternatives are used: if the use case +consists of deeply nested objects without circular references the following +pattern will give best results. +Shallow or one level nested objects on the other hand will slow down with it. +It is entirely dependant on the use case. + +```js +const stringify = require('fast-safe-stringify') + +function tryJSONStringify (obj) { + try { return JSON.stringify(obj) } catch (_) {} +} + +const serializedString = tryJSONStringify(deep) || stringify(deep) +``` + +## Acknowledgements + +Sponsored by [nearForm](http://nearform.com) + +## License + +MIT + +[`replacer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The%20replacer%20parameter +[`safe-stable-stringify`]: https://github.com/BridgeAR/safe-stable-stringify +[`space`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The%20space%20argument +[`toJSON`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON()_behavior +[benchmark]: https://github.com/epoberezkin/fast-json-stable-stringify/blob/67f688f7441010cfef91a6147280cc501701e83b/benchmark +[JSON.stringify]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify diff --git a/node_modules/fast-safe-stringify/test-stable.js b/node_modules/fast-safe-stringify/test-stable.js new file mode 100644 index 0000000..c55b95c --- /dev/null +++ b/node_modules/fast-safe-stringify/test-stable.js @@ -0,0 +1,404 @@ +const test = require('tap').test +const fss = require('./').stable +const clone = require('clone') +const s = JSON.stringify +const stream = require('stream') + +test('circular reference to root', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.circle = fixture + const expected = s({ circle: '[Circular]', name: 'Tywin Lannister' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular getter reference to root', function (assert) { + const fixture = { + name: 'Tywin Lannister', + get circle () { + return fixture + } + } + + const expected = s({ circle: '[Circular]', name: 'Tywin Lannister' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular reference to root', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.id = { circle: fixture } + const expected = s({ id: { circle: '[Circular]' }, name: 'Tywin Lannister' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { name: 'Tyrion Lannister' } + } + fixture.child.dinklage = fixture.child + const expected = s({ + child: { + dinklage: '[Circular]', + name: 'Tyrion Lannister' + }, + name: 'Tywin Lannister' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { name: 'Tyrion Lannister' } + } + fixture.child.actor = { dinklage: fixture.child } + const expected = s({ + child: { + actor: { dinklage: '[Circular]' }, + name: 'Tyrion Lannister' + }, + name: 'Tywin Lannister' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular objects in an array', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.hand = [fixture, fixture] + const expected = s({ + hand: ['[Circular]', '[Circular]'], + name: 'Tywin Lannister' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular references in an array', function (assert) { + const fixture = { + name: 'Tywin Lannister', + offspring: [{ name: 'Tyrion Lannister' }, { name: 'Cersei Lannister' }] + } + fixture.offspring[0].dinklage = fixture.offspring[0] + fixture.offspring[1].headey = fixture.offspring[1] + + const expected = s({ + name: 'Tywin Lannister', + offspring: [ + { dinklage: '[Circular]', name: 'Tyrion Lannister' }, + { headey: '[Circular]', name: 'Cersei Lannister' } + ] + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular arrays', function (assert) { + const fixture = [] + fixture.push(fixture, fixture) + const expected = s(['[Circular]', '[Circular]']) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular arrays', function (assert) { + const fixture = [] + fixture.push( + { name: 'Jon Snow', bastards: fixture }, + { name: 'Ramsay Bolton', bastards: fixture } + ) + const expected = s([ + { bastards: '[Circular]', name: 'Jon Snow' }, + { bastards: '[Circular]', name: 'Ramsay Bolton' } + ]) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('repeated non-circular references in objects', function (assert) { + const daenerys = { name: 'Daenerys Targaryen' } + const fixture = { + motherOfDragons: daenerys, + queenOfMeereen: daenerys + } + const expected = s(fixture) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('repeated non-circular references in arrays', function (assert) { + const daenerys = { name: 'Daenerys Targaryen' } + const fixture = [daenerys, daenerys] + const expected = s(fixture) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('double child circular reference', function (assert) { + // create circular reference + const child = { name: 'Tyrion Lannister' } + child.dinklage = child + + // include it twice in the fixture + const fixture = { name: 'Tywin Lannister', childA: child, childB: child } + const cloned = clone(fixture) + const expected = s({ + childA: { + dinklage: '[Circular]', + name: 'Tyrion Lannister' + }, + childB: { + dinklage: '[Circular]', + name: 'Tyrion Lannister' + }, + name: 'Tywin Lannister' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + + // check if the fixture has not been modified + assert.same(fixture, cloned) + assert.end() +}) + +test('child circular reference with toJSON', function (assert) { + // Create a test object that has an overridden `toJSON` property + TestObject.prototype.toJSON = function () { + return { special: 'case' } + } + function TestObject (content) {} + + // Creating a simple circular object structure + const parentObject = {} + parentObject.childObject = new TestObject() + parentObject.childObject.parentObject = parentObject + + // Creating a simple circular object structure + const otherParentObject = new TestObject() + otherParentObject.otherChildObject = {} + otherParentObject.otherChildObject.otherParentObject = otherParentObject + + // Making sure our original tests work + assert.same(parentObject.childObject.parentObject, parentObject) + assert.same( + otherParentObject.otherChildObject.otherParentObject, + otherParentObject + ) + + // Should both be idempotent + assert.equal(fss(parentObject), '{"childObject":{"special":"case"}}') + assert.equal(fss(otherParentObject), '{"special":"case"}') + + // Therefore the following assertion should be `true` + assert.same(parentObject.childObject.parentObject, parentObject) + assert.same( + otherParentObject.otherChildObject.otherParentObject, + otherParentObject + ) + + assert.end() +}) + +test('null object', function (assert) { + const expected = s(null) + const actual = fss(null) + assert.equal(actual, expected) + assert.end() +}) + +test('null property', function (assert) { + const expected = s({ f: null }) + const actual = fss({ f: null }) + assert.equal(actual, expected) + assert.end() +}) + +test('nested child circular reference in toJSON', function (assert) { + var circle = { some: 'data' } + circle.circle = circle + var a = { + b: { + toJSON: function () { + a.b = 2 + return '[Redacted]' + } + }, + baz: { + circle, + toJSON: function () { + a.baz = circle + return '[Redacted]' + } + } + } + var o = { + a, + bar: a + } + + const expected = s({ + a: { + b: '[Redacted]', + baz: '[Redacted]' + }, + bar: { + // TODO: This is a known limitation of the current implementation. + // The ideal result would be: + // + // b: 2, + // baz: { + // circle: '[Circular]', + // some: 'data' + // } + // + b: '[Redacted]', + baz: '[Redacted]' + } + }) + const actual = fss(o) + assert.equal(actual, expected) + assert.end() +}) + +test('circular getters are restored when stringified', function (assert) { + const fixture = { + name: 'Tywin Lannister', + get circle () { + return fixture + } + } + fss(fixture) + + assert.equal(fixture.circle, fixture) + assert.end() +}) + +test('non-configurable circular getters use a replacer instead of markers', function (assert) { + const fixture = { name: 'Tywin Lannister' } + Object.defineProperty(fixture, 'circle', { + configurable: false, + get: function () { + return fixture + }, + enumerable: true + }) + + fss(fixture) + + assert.equal(fixture.circle, fixture) + assert.end() +}) + +test('getter child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + get dinklage () { + return fixture.child + } + }, + get self () { + return fixture + } + } + + const expected = s({ + child: { + dinklage: '[Circular]', + name: 'Tyrion Lannister' + }, + name: 'Tywin Lannister', + self: '[Circular]' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('Proxy throwing', function (assert) { + assert.plan(1) + const s = new stream.PassThrough() + s.resume() + s.write('', () => { + assert.end() + }) + const actual = fss({ s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) }) + assert.equal(actual, '"[unable to serialize, circular reference is too complex to analyze]"') +}) + +test('depthLimit option - will replace deep objects', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister' + }, + get self () { + return fixture + } + } + + const expected = s({ + child: '[...]', + name: 'Tywin Lannister', + self: '[Circular]' + }) + const actual = fss(fixture, undefined, undefined, { + depthLimit: 1, + edgesLimit: 1 + }) + assert.equal(actual, expected) + assert.end() +}) + +test('edgesLimit option - will replace deep objects', function (assert) { + const fixture = { + object: { + 1: { test: 'test' }, + 2: { test: 'test' }, + 3: { test: 'test' }, + 4: { test: 'test' } + }, + array: [ + { test: 'test' }, + { test: 'test' }, + { test: 'test' }, + { test: 'test' } + ], + get self () { + return fixture + } + } + + const expected = s({ + array: [{ test: 'test' }, { test: 'test' }, { test: 'test' }, '[...]'], + object: { + 1: { test: 'test' }, + 2: { test: 'test' }, + 3: { test: 'test' }, + 4: '[...]' + }, + self: '[Circular]' + }) + const actual = fss(fixture, undefined, undefined, { + depthLimit: 3, + edgesLimit: 3 + }) + assert.equal(actual, expected) + assert.end() +}) diff --git a/node_modules/fast-safe-stringify/test.js b/node_modules/fast-safe-stringify/test.js new file mode 100644 index 0000000..a4170e9 --- /dev/null +++ b/node_modules/fast-safe-stringify/test.js @@ -0,0 +1,397 @@ +const test = require('tap').test +const fss = require('./') +const clone = require('clone') +const s = JSON.stringify +const stream = require('stream') + +test('circular reference to root', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.circle = fixture + const expected = s({ name: 'Tywin Lannister', circle: '[Circular]' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular getter reference to root', function (assert) { + const fixture = { + name: 'Tywin Lannister', + get circle () { + return fixture + } + } + const expected = s({ name: 'Tywin Lannister', circle: '[Circular]' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular reference to root', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.id = { circle: fixture } + const expected = s({ name: 'Tywin Lannister', id: { circle: '[Circular]' } }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { name: 'Tyrion Lannister' } + } + fixture.child.dinklage = fixture.child + const expected = s({ + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + dinklage: '[Circular]' + } + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { name: 'Tyrion Lannister' } + } + fixture.child.actor = { dinklage: fixture.child } + const expected = s({ + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + actor: { dinklage: '[Circular]' } + } + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular objects in an array', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.hand = [fixture, fixture] + const expected = s({ + name: 'Tywin Lannister', + hand: ['[Circular]', '[Circular]'] + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular references in an array', function (assert) { + const fixture = { + name: 'Tywin Lannister', + offspring: [{ name: 'Tyrion Lannister' }, { name: 'Cersei Lannister' }] + } + fixture.offspring[0].dinklage = fixture.offspring[0] + fixture.offspring[1].headey = fixture.offspring[1] + + const expected = s({ + name: 'Tywin Lannister', + offspring: [ + { name: 'Tyrion Lannister', dinklage: '[Circular]' }, + { name: 'Cersei Lannister', headey: '[Circular]' } + ] + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular arrays', function (assert) { + const fixture = [] + fixture.push(fixture, fixture) + const expected = s(['[Circular]', '[Circular]']) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular arrays', function (assert) { + const fixture = [] + fixture.push( + { name: 'Jon Snow', bastards: fixture }, + { name: 'Ramsay Bolton', bastards: fixture } + ) + const expected = s([ + { name: 'Jon Snow', bastards: '[Circular]' }, + { name: 'Ramsay Bolton', bastards: '[Circular]' } + ]) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('repeated non-circular references in objects', function (assert) { + const daenerys = { name: 'Daenerys Targaryen' } + const fixture = { + motherOfDragons: daenerys, + queenOfMeereen: daenerys + } + const expected = s(fixture) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('repeated non-circular references in arrays', function (assert) { + const daenerys = { name: 'Daenerys Targaryen' } + const fixture = [daenerys, daenerys] + const expected = s(fixture) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('double child circular reference', function (assert) { + // create circular reference + const child = { name: 'Tyrion Lannister' } + child.dinklage = child + + // include it twice in the fixture + const fixture = { name: 'Tywin Lannister', childA: child, childB: child } + const cloned = clone(fixture) + const expected = s({ + name: 'Tywin Lannister', + childA: { + name: 'Tyrion Lannister', + dinklage: '[Circular]' + }, + childB: { + name: 'Tyrion Lannister', + dinklage: '[Circular]' + } + }) + const actual = fss(fixture) + assert.equal(actual, expected) + + // check if the fixture has not been modified + assert.same(fixture, cloned) + assert.end() +}) + +test('child circular reference with toJSON', function (assert) { + // Create a test object that has an overridden `toJSON` property + TestObject.prototype.toJSON = function () { + return { special: 'case' } + } + function TestObject (content) {} + + // Creating a simple circular object structure + const parentObject = {} + parentObject.childObject = new TestObject() + parentObject.childObject.parentObject = parentObject + + // Creating a simple circular object structure + const otherParentObject = new TestObject() + otherParentObject.otherChildObject = {} + otherParentObject.otherChildObject.otherParentObject = otherParentObject + + // Making sure our original tests work + assert.same(parentObject.childObject.parentObject, parentObject) + assert.same( + otherParentObject.otherChildObject.otherParentObject, + otherParentObject + ) + + // Should both be idempotent + assert.equal(fss(parentObject), '{"childObject":{"special":"case"}}') + assert.equal(fss(otherParentObject), '{"special":"case"}') + + // Therefore the following assertion should be `true` + assert.same(parentObject.childObject.parentObject, parentObject) + assert.same( + otherParentObject.otherChildObject.otherParentObject, + otherParentObject + ) + + assert.end() +}) + +test('null object', function (assert) { + const expected = s(null) + const actual = fss(null) + assert.equal(actual, expected) + assert.end() +}) + +test('null property', function (assert) { + const expected = s({ f: null }) + const actual = fss({ f: null }) + assert.equal(actual, expected) + assert.end() +}) + +test('nested child circular reference in toJSON', function (assert) { + const circle = { some: 'data' } + circle.circle = circle + const a = { + b: { + toJSON: function () { + a.b = 2 + return '[Redacted]' + } + }, + baz: { + circle, + toJSON: function () { + a.baz = circle + return '[Redacted]' + } + } + } + const o = { + a, + bar: a + } + + const expected = s({ + a: { + b: '[Redacted]', + baz: '[Redacted]' + }, + bar: { + b: 2, + baz: { + some: 'data', + circle: '[Circular]' + } + } + }) + const actual = fss(o) + assert.equal(actual, expected) + assert.end() +}) + +test('circular getters are restored when stringified', function (assert) { + const fixture = { + name: 'Tywin Lannister', + get circle () { + return fixture + } + } + fss(fixture) + + assert.equal(fixture.circle, fixture) + assert.end() +}) + +test('non-configurable circular getters use a replacer instead of markers', function (assert) { + const fixture = { name: 'Tywin Lannister' } + Object.defineProperty(fixture, 'circle', { + configurable: false, + get: function () { + return fixture + }, + enumerable: true + }) + + fss(fixture) + + assert.equal(fixture.circle, fixture) + assert.end() +}) + +test('getter child circular reference are replaced instead of marked', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + get dinklage () { + return fixture.child + } + }, + get self () { + return fixture + } + } + + const expected = s({ + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + dinklage: '[Circular]' + }, + self: '[Circular]' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('Proxy throwing', function (assert) { + assert.plan(1) + const s = new stream.PassThrough() + s.resume() + s.write('', () => { + assert.end() + }) + const actual = fss({ s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) }) + assert.equal(actual, '"[unable to serialize, circular reference is too complex to analyze]"') +}) + +test('depthLimit option - will replace deep objects', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister' + }, + get self () { + return fixture + } + } + + const expected = s({ + name: 'Tywin Lannister', + child: '[...]', + self: '[Circular]' + }) + const actual = fss(fixture, undefined, undefined, { + depthLimit: 1, + edgesLimit: 1 + }) + assert.equal(actual, expected) + assert.end() +}) + +test('edgesLimit option - will replace deep objects', function (assert) { + const fixture = { + object: { + 1: { test: 'test' }, + 2: { test: 'test' }, + 3: { test: 'test' }, + 4: { test: 'test' } + }, + array: [ + { test: 'test' }, + { test: 'test' }, + { test: 'test' }, + { test: 'test' } + ], + get self () { + return fixture + } + } + + const expected = s({ + object: { + 1: { test: 'test' }, + 2: { test: 'test' }, + 3: { test: 'test' }, + 4: '[...]' + }, + array: [{ test: 'test' }, { test: 'test' }, { test: 'test' }, '[...]'], + self: '[Circular]' + }) + const actual = fss(fixture, undefined, undefined, { + depthLimit: 3, + edgesLimit: 3 + }) + assert.equal(actual, expected) + assert.end() +}) diff --git a/node_modules/help-me/.github/workflows/ci.yml b/node_modules/help-me/.github/workflows/ci.yml new file mode 100644 index 0000000..f7f7cc5 --- /dev/null +++ b/node_modules/help-me/.github/workflows/ci.yml @@ -0,0 +1,28 @@ +name: ci + +on: [push, pull_request] + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + matrix: + os: [ubuntu-latest, windows-latest] + node-version: [14.x, 16.x, 18.x, 20.x] + + steps: + - uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test diff --git a/node_modules/help-me/LICENSE b/node_modules/help-me/LICENSE new file mode 100644 index 0000000..bc090c0 --- /dev/null +++ b/node_modules/help-me/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2014-2022 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/help-me/README.md b/node_modules/help-me/README.md new file mode 100644 index 0000000..c0c55c8 --- /dev/null +++ b/node_modules/help-me/README.md @@ -0,0 +1,66 @@ +help-me +======= + +Help command for node, to use with [minimist](http://npm.im/minimist) and [commist](http://npm.im/commist). + +Example +------- + +```js +'use strict' + +var helpMe = require('help-me') +var path = require('path') +var help = helpMe({ + dir: path.join(__dirname, 'doc'), + // the default + ext: '.txt' +}) + +help + .createStream(['hello']) // can support also strings + .pipe(process.stdout) + +// little helper to do the same +help.toStdout(['hello']) +``` + +Using ESM and top-level await:: + +```js +import { help } from 'help-me' +import { join } from 'desm' + +await help({ + dir: join(import.meta.url, 'doc'), + // the default + ext: '.txt' +}, ['hello']) +``` + +Usage with commist +------------------ + +[Commist](http://npm.im/commist) provide a command system for node. + +```js +var commist = require('commist')() +var path = require('path') +var help = require('help-me')({ + dir: path.join(__dirname, 'doc') +}) + +commist.register('help', help.toStdout) + +commist.parse(process.argv.splice(2)) +``` + +Acknowledgements +---------------- + +This project was kindly sponsored by [nearForm](http://nearform.com). + +License +------- + +MIT diff --git a/node_modules/help-me/doc/hello.txt b/node_modules/help-me/doc/hello.txt new file mode 100644 index 0000000..b2e26f8 --- /dev/null +++ b/node_modules/help-me/doc/hello.txt @@ -0,0 +1 @@ +this is hello world diff --git a/node_modules/help-me/doc/help.txt b/node_modules/help-me/doc/help.txt new file mode 100644 index 0000000..5812ea8 --- /dev/null +++ b/node_modules/help-me/doc/help.txt @@ -0,0 +1,5 @@ +HELP-ME by Matteo + +* start starts a script +* help shows help + diff --git a/node_modules/help-me/example.js b/node_modules/help-me/example.js new file mode 100644 index 0000000..ad22a4a --- /dev/null +++ b/node_modules/help-me/example.js @@ -0,0 +1,18 @@ +'use strict' + +const path = require('path') +const commist = require('commist')() +const help = require('./')({ + dir: path.join(path.dirname(require.main.filename), 'doc') +}) + +commist.register('help', help.toStdout) +commist.register('start', function () { + console.log('Starting the script!') +}) + +const res = commist.parse(process.argv.splice(2)) + +if (res) { + help.toStdout() +} diff --git a/node_modules/help-me/fixture/basic/hello.txt b/node_modules/help-me/fixture/basic/hello.txt new file mode 100644 index 0000000..8f43ac9 --- /dev/null +++ b/node_modules/help-me/fixture/basic/hello.txt @@ -0,0 +1 @@ +ahdsadhdash diff --git a/node_modules/help-me/fixture/basic/help.txt b/node_modules/help-me/fixture/basic/help.txt new file mode 100644 index 0000000..3b18e51 --- /dev/null +++ b/node_modules/help-me/fixture/basic/help.txt @@ -0,0 +1 @@ +hello world diff --git a/node_modules/help-me/fixture/dir/a/b.txt b/node_modules/help-me/fixture/dir/a/b.txt new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/help-me/fixture/no-ext/hello b/node_modules/help-me/fixture/no-ext/hello new file mode 100644 index 0000000..c91d514 --- /dev/null +++ b/node_modules/help-me/fixture/no-ext/hello @@ -0,0 +1 @@ +ghghghhg diff --git a/node_modules/help-me/fixture/sameprefix/hello world.txt b/node_modules/help-me/fixture/sameprefix/hello world.txt new file mode 100644 index 0000000..95d09f2 --- /dev/null +++ b/node_modules/help-me/fixture/sameprefix/hello world.txt @@ -0,0 +1 @@ +hello world \ No newline at end of file diff --git a/node_modules/help-me/fixture/sameprefix/hello.txt b/node_modules/help-me/fixture/sameprefix/hello.txt new file mode 100644 index 0000000..b6fc4c6 --- /dev/null +++ b/node_modules/help-me/fixture/sameprefix/hello.txt @@ -0,0 +1 @@ +hello \ No newline at end of file diff --git a/node_modules/help-me/fixture/shortnames/abcde fghi lmno.txt b/node_modules/help-me/fixture/shortnames/abcde fghi lmno.txt new file mode 100644 index 0000000..509318c --- /dev/null +++ b/node_modules/help-me/fixture/shortnames/abcde fghi lmno.txt @@ -0,0 +1 @@ +ewweqjewqjewqj diff --git a/node_modules/help-me/fixture/shortnames/abcde hello.txt b/node_modules/help-me/fixture/shortnames/abcde hello.txt new file mode 100644 index 0000000..4022537 --- /dev/null +++ b/node_modules/help-me/fixture/shortnames/abcde hello.txt @@ -0,0 +1 @@ +45678 diff --git a/node_modules/help-me/fixture/shortnames/hello world.txt b/node_modules/help-me/fixture/shortnames/hello world.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/node_modules/help-me/fixture/shortnames/hello world.txt @@ -0,0 +1 @@ +12345 diff --git a/node_modules/help-me/help-me.js b/node_modules/help-me/help-me.js new file mode 100644 index 0000000..6a52bf0 --- /dev/null +++ b/node_modules/help-me/help-me.js @@ -0,0 +1,134 @@ +'use strict' + +const fs = require('fs') +const { PassThrough, Writable, pipeline } = require('stream') +const process = require('process') +const { join } = require('path') + +const defaults = { + ext: '.txt', + help: 'help' +} + +function isDirectory (path) { + try { + const stat = fs.lstatSync(path) + return stat.isDirectory() + } catch (err) { + return false + } +} + +function createDefaultStream () { + return new Writable({ + write (chunk, encoding, callback) { + process.stdout.write(chunk, callback) + } + }) +} + +function helpMe (opts) { + opts = Object.assign({}, defaults, opts) + + if (!opts.dir) { + throw new Error('missing dir') + } + + if (!isDirectory(opts.dir)) { + throw new Error(`${opts.dir} is not a directory`) + } + + return { + createStream: createStream, + toStdout: toStdout + } + + function createStream (args) { + if (typeof args === 'string') { + args = args.split(' ') + } else if (!args || args.length === 0) { + args = [opts.help] + } + + const out = new PassThrough() + const re = new RegExp( + args + .map(function (arg) { + return arg + '[a-zA-Z0-9]*' + }) + .join('[ /]+') + ) + + if (process.platform === 'win32') { + opts.dir = opts.dir.split('\\').join('/') + } + + fs.readdir(opts.dir, function (err, files) { + if (err) return out.emit('error', err) + + const regexp = new RegExp('.*' + opts.ext + '$') + files = files + .filter(function (file) { + const matched = file.match(regexp) + return !!matched + }) + .map(function (relative) { + return { file: join(opts.dir, relative), relative } + }) + .filter(function (file) { + return file.relative.match(re) + }) + + if (files.length === 0) { + return out.emit('error', new Error('no such help file')) + } else if (files.length > 1) { + const exactMatch = files.find( + (file) => file.relative === `${args[0]}${opts.ext}` + ) + if (!exactMatch) { + out.write('There are ' + files.length + ' help pages ') + out.write('that matches the given request, please disambiguate:\n') + files.forEach(function (file) { + out.write(' * ') + out.write(file.relative.replace(opts.ext, '')) + out.write('\n') + }) + out.end() + return + } + files = [exactMatch] + } + + pipeline(fs.createReadStream(files[0].file), out, () => {}) + }) + + return out + } + + function toStdout (args = [], opts) { + opts = opts || {} + const stream = opts.stream || createDefaultStream() + const _onMissingHelp = opts.onMissingHelp || onMissingHelp + return new Promise((resolve, reject) => { + createStream(args) + .on('error', (err) => { + _onMissingHelp(err, args, stream).then(resolve, reject) + }) + .pipe(stream) + .on('close', resolve) + .on('end', resolve) + }) + } + + function onMissingHelp (_, args, stream) { + stream.write(`no such help file: ${args.join(' ')}.\n\n`) + return toStdout([], { stream, async onMissingHelp () {} }) + } +} + +function help (opts, args) { + return helpMe(opts).toStdout(args, opts) +} + +module.exports = helpMe +module.exports.help = help diff --git a/node_modules/help-me/package.json b/node_modules/help-me/package.json new file mode 100644 index 0000000..7a44944 --- /dev/null +++ b/node_modules/help-me/package.json @@ -0,0 +1,36 @@ +{ + "name": "help-me", + "version": "5.0.0", + "description": "Help command for node, partner of minimist and commist", + "main": "help-me.js", + "scripts": { + "test": "standard && node test.js | tap-spec" + }, + "repository": { + "type": "git", + "url": "https://github.com/mcollina/help-me.git" + }, + "keywords": [ + "help", + "command", + "minimist", + "commist" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/mcollina/help-me/issues" + }, + "homepage": "https://github.com/mcollina/help-me", + "devDependencies": { + "commist": "^2.0.0", + "concat-stream": "^2.0.0", + "pre-commit": "^1.1.3", + "proxyquire": "^2.1.3", + "standard": "^16.0.0", + "tap-spec": "^5.0.0", + "tape": "^5.0.0" + }, + "dependencies": { + } +} diff --git a/node_modules/help-me/test.js b/node_modules/help-me/test.js new file mode 100644 index 0000000..eca9b28 --- /dev/null +++ b/node_modules/help-me/test.js @@ -0,0 +1,316 @@ +'use strict' + +const test = require('tape') +const concat = require('concat-stream') +const fs = require('fs') +const os = require('os') +const path = require('path') +const helpMe = require('./') +const proxyquire = require('proxyquire') + +test('throws if no directory is passed', function (t) { + try { + helpMe() + t.fail() + } catch (err) { + t.equal(err.message, 'missing dir') + } + t.end() +}) + +test('throws if a normal file is passed', function (t) { + try { + helpMe({ + dir: __filename + }) + t.fail() + } catch (err) { + t.equal(err.message, `${__filename} is not a directory`) + } + t.end() +}) + +test('throws if the directory cannot be accessed', function (t) { + try { + helpMe({ + dir: './foo' + }) + t.fail() + } catch (err) { + t.equal(err.message, './foo is not a directory') + } + t.end() +}) + +test('show a generic help.txt from a folder to a stream with relative path in dir', function (t) { + t.plan(2) + + helpMe({ + dir: 'fixture/basic' + }).createStream() + .pipe(concat(function (data) { + fs.readFile('fixture/basic/help.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) +}) + +test('show a generic help.txt from a folder to a stream with absolute path in dir', function (t) { + t.plan(2) + + helpMe({ + dir: path.join(__dirname, 'fixture/basic') + }).createStream() + .pipe(concat(function (data) { + fs.readFile('fixture/basic/help.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) +}) + +test('custom help command with an array', function (t) { + t.plan(2) + + helpMe({ + dir: 'fixture/basic' + }).createStream(['hello']) + .pipe(concat(function (data) { + fs.readFile('fixture/basic/hello.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) +}) + +test('custom help command without an ext', function (t) { + t.plan(2) + + helpMe({ + dir: 'fixture/no-ext', + ext: '' + }).createStream(['hello']) + .pipe(concat(function (data) { + fs.readFile('fixture/no-ext/hello', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) +}) + +test('custom help command with a string', function (t) { + t.plan(2) + + helpMe({ + dir: 'fixture/basic' + }).createStream('hello') + .pipe(concat(function (data) { + fs.readFile('fixture/basic/hello.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) +}) + +test('missing help file', function (t) { + t.plan(1) + + helpMe({ + dir: 'fixture/basic' + }).createStream('abcde') + .on('error', function (err) { + t.equal(err.message, 'no such help file') + }) + .resume() +}) + +test('custom help command with an array', function (t) { + const helper = helpMe({ + dir: 'fixture/shortnames' + }) + + t.test('abbreviates two words in one', function (t) { + t.plan(2) + + helper + .createStream(['world']) + .pipe(concat(function (data) { + fs.readFile('fixture/shortnames/hello world.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) + }) + + t.test('abbreviates three words in two', function (t) { + t.plan(2) + + helper + .createStream(['abcde', 'fghi']) + .pipe(concat(function (data) { + fs.readFile('fixture/shortnames/abcde fghi lmno.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) + }) + + t.test('abbreviates a word', function (t) { + t.plan(2) + + helper + .createStream(['abc', 'fg']) + .pipe(concat(function (data) { + fs.readFile('fixture/shortnames/abcde fghi lmno.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) + }) + + t.test('abbreviates a word using strings', function (t) { + t.plan(2) + + helper + .createStream('abc fg') + .pipe(concat(function (data) { + fs.readFile('fixture/shortnames/abcde fghi lmno.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + })) + }) + + t.test('print a disambiguation', function (t) { + t.plan(1) + + const expected = '' + + 'There are 2 help pages that matches the given request, please disambiguate:\n' + + ' * abcde fghi lmno\n' + + ' * abcde hello\n' + + helper + .createStream(['abc']) + .pipe(concat({ encoding: 'string' }, function (data) { + t.equal(data, expected) + })) + }) + + t.test('choose exact match over partial', function (t) { + t.plan(1) + + helpMe({ + dir: 'fixture/sameprefix' + }).createStream(['hello']) + .pipe(concat({ encoding: 'string' }, function (data) { + t.equal(data, 'hello') + })) + }) +}) + +test('toStdout helper', async function (t) { + t.plan(2) + + let completed = false + const stream = concat(function (data) { + completed = true + fs.readFile('fixture/basic/help.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + }) + + await helpMe({ + dir: 'fixture/basic' + }).toStdout([], { stream }) + + t.ok(completed) +}) + +test('handle error in toStdout', async function (t) { + t.plan(2) + + let completed = false + const stream = concat(function (data) { + completed = true + fs.readFile('fixture/basic/help.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), 'no such help file: something.\n\n' + expected.toString()) + }) + }) + + await helpMe({ + dir: 'fixture/basic' + }).toStdout(['something'], { + stream + }) + + t.ok(completed) +}) + +test('customize missing help fle message', async function (t) { + t.plan(3) + + const stream = concat(function (data) { + t.equal(data.toString(), 'kaboom\n\n') + }) + + await helpMe({ + dir: 'fixture/basic' + }).toStdout(['something'], { + stream, + async onMissingHelp (err, args, stream) { + t.equal(err.message, 'no such help file') + t.deepEquals(args, ['something']) + stream.end('kaboom\n\n') + } + }) +}) + +test('toStdout without factory', async function (t) { + t.plan(2) + + let completed = false + const stream = concat(function (data) { + completed = true + fs.readFile('fixture/basic/help.txt', function (err, expected) { + t.error(err) + t.equal(data.toString(), expected.toString()) + }) + }) + + await helpMe.help({ + dir: 'fixture/basic', + stream + }, []) + + t.ok(completed) +}) + +test('should allow for awaiting the response with default stdout stream', async function (t) { + t.plan(2) + + const _process = Object.create(process) + const stdout = Object.create(process.stdout) + Object.defineProperty(_process, 'stdout', { + value: stdout + }) + + let completed = false + stdout.write = (data, cb) => { + t.equal(data.toString(), 'hello world' + os.EOL) + completed = true + cb() + } + + const helpMe = proxyquire('./help-me', { + process: _process + }) + + await helpMe.help({ + dir: 'fixture/basic' + }) + + t.ok(completed) +}) diff --git a/node_modules/joycon/LICENSE b/node_modules/joycon/LICENSE new file mode 100644 index 0000000..510cc07 --- /dev/null +++ b/node_modules/joycon/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) egoist <0x142857@gmail.com> (https://github.com/egoist) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/joycon/README.md b/node_modules/joycon/README.md new file mode 100644 index 0000000..fccd077 --- /dev/null +++ b/node_modules/joycon/README.md @@ -0,0 +1,133 @@ + +# joycon + +[![NPM version](https://img.shields.io/npm/v/joycon.svg?style=flat)](https://npmjs.com/package/joycon) [![NPM downloads](https://img.shields.io/npm/dm/joycon.svg?style=flat)](https://npmjs.com/package/joycon) [![install size](https://packagephobia.now.sh/badge?p=joycon@2.0.0)](https://packagephobia.now.sh/result?p=joycon@2.0.0) [![CircleCI](https://circleci.com/gh/egoist/joycon/tree/master.svg?style=shield)](https://circleci.com/gh/egoist/joycon/tree/master) [![donate](https://img.shields.io/badge/$-donate-ff69b4.svg?maxAge=2592000&style=flat)](https://github.com/egoist/donate) [![chat](https://img.shields.io/badge/chat-on%20discord-7289DA.svg?style=flat)](https://chat.egoist.moe) + +## Differences with [cosmiconfig](https://github.com/davidtheclark/cosmiconfig)? + +JoyCon is zero-dependency but feature-complete. + +## Install + +```bash +yarn add joycon +``` + +## Usage + +```js +const JoyCon = require('joycon') + +const joycon = new JoyCon() + +joycon.load(['package-lock.json', 'yarn.lock']) +.then(result => { + // result is {} when files do not exist + // otherwise { path, data } +}) +``` + +By default non-js files are parsed as JSON, if you want something different you can add a loader: + +```js +const joycon = new JoyCon() + +joycon.addLoader({ + test: /\.toml$/, + load(filepath) { + return require('toml').parse(filepath) + } +}) + +joycon.load(['cargo.toml']) +``` + +## API + +### constructor([options]) + +#### options + +##### files + +- Type: `string[]` + +The files to search. + +##### cwd + +The directory to search files. + +##### stopDir + +The directory to stop searching. + +##### packageKey + +You can load config from certain property in a `package.json` file. For example, when you set `packageKey: 'babel'`, it will load the `babel` property in `package.json` instead of the entire data. + +##### parseJSON + +- Type: `(str: string) => any` +- Default: `JSON.parse` + +The function used to parse JSON string. + +### resolve([files], [cwd], [stopDir]) +### resolve([options]) + +`files` defaults to `options.files`. + +`cwd` defaults to `options.cwd`. + +`stopDir` defaults to `options.stopDir` then `path.parse(cwd).root`. + +If using a single object `options`, it will be the same as constructor options. + +Search files and resolve the path of the file we found. + +There's also `.resolveSync` method. + +### load(...args) + +The signature is the same as [resolve](#resolvefiles-cwd-stopdir). + +Search files and resolve `{ path, data }` of the file we found. + +There's also `.loadSync` method. + +### addLoader(Loader) + +```typescript +interface Loader { + name?: string + test: RegExp + load(filepath: string)?: Promise + loadSync(filepath: string)?: any +} +``` + +At least one of `load` and `loadSync` is required, depending on whether you're calling the synchonous methods or not. + +### removeLoader(name) + +Remove loaders by loader name. + +### clearCache() + +Each JoyCon instance uses its own cache. + +## Contributing + +1. Fork it! +2. Create your feature branch: `git checkout -b my-new-feature` +3. Commit your changes: `git commit -am 'Add some feature'` +4. Push to the branch: `git push origin my-new-feature` +5. Submit a pull request :D + +## Author + +**joycon** © [egoist](https://github.com/egoist), Released under the [MIT](./LICENSE) License.
+Authored and maintained by egoist with help from contributors ([list](https://github.com/egoist/joycon/contributors)). + +> [github.com/egoist](https://github.com/egoist) · GitHub [@egoist](https://github.com/egoist) · Twitter [@_egoistlily](https://twitter.com/_egoistlily) diff --git a/node_modules/joycon/lib/index.js b/node_modules/joycon/lib/index.js new file mode 100644 index 0000000..77c7e40 --- /dev/null +++ b/node_modules/joycon/lib/index.js @@ -0,0 +1,286 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _fs = _interopRequireDefault(require("fs")); + +var _path = _interopRequireDefault(require("path")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const readFileSync = fp => { + return _fs.default.readFileSync(fp, 'utf8'); +}; + +const pathExists = fp => new Promise(resolve => { + _fs.default.access(fp, err => { + resolve(!err); + }); +}); + +const pathExistsSync = _fs.default.existsSync; + +class JoyCon { + constructor({ + files, + cwd = process.cwd(), + stopDir, + packageKey, + parseJSON = JSON.parse + } = {}) { + this.options = { + files, + cwd, + stopDir, + packageKey, + parseJSON + }; + this.existsCache = new Map(); + this.loaders = new Set(); + this.packageJsonCache = new Map(); + this.loadCache = new Map(); + } + + addLoader(loader) { + this.loaders.add(loader); + return this; + } + + removeLoader(name) { + for (const loader of this.loaders) { + if (name && loader.name === name) { + this.loaders.delete(loader); + } + } + + return this; + } + + async recusivelyResolve(options) { + if (options.cwd === options.stopDir || _path.default.basename(options.cwd) === 'node_modules') { + return null; + } + + for (const filename of options.files) { + const file = _path.default.resolve(options.cwd, filename); + + const exists = process.env.NODE_ENV !== 'test' && this.existsCache.has(file) ? this.existsCache.get(file) : await pathExists(file); + this.existsCache.set(file, exists); + + if (exists) { + if (!options.packageKey || _path.default.basename(file) !== 'package.json') { + return file; + } + + const data = require(file); + + delete require.cache[file]; + const hasPackageKey = Object.prototype.hasOwnProperty.call(data, options.packageKey); + + if (hasPackageKey) { + this.packageJsonCache.set(file, data); + return file; + } + } + + continue; + } + + return this.recusivelyResolve(Object.assign({}, options, { + cwd: _path.default.dirname(options.cwd) + })); + } + + recusivelyResolveSync(options) { + if (options.cwd === options.stopDir || _path.default.basename(options.cwd) === 'node_modules') { + return null; + } + + for (const filename of options.files) { + const file = _path.default.resolve(options.cwd, filename); + + const exists = process.env.NODE_ENV !== 'test' && this.existsCache.has(file) ? this.existsCache.get(file) : pathExistsSync(file); + this.existsCache.set(file, exists); + + if (exists) { + if (!options.packageKey || _path.default.basename(file) !== 'package.json') { + return file; + } + + const data = require(file); + + delete require.cache[file]; + const hasPackageKey = Object.prototype.hasOwnProperty.call(data, options.packageKey); + + if (hasPackageKey) { + this.packageJsonCache.set(file, data); + return file; + } + } + + continue; + } + + return this.recusivelyResolveSync(Object.assign({}, options, { + cwd: _path.default.dirname(options.cwd) + })); + } + + async resolve(...args) { + const options = this.normalizeOptions(args); + return this.recusivelyResolve(options); + } + + resolveSync(...args) { + const options = this.normalizeOptions(args); + return this.recusivelyResolveSync(options); + } + + runLoaderSync(loader, filepath) { + return loader.loadSync(filepath); + } + + runLoader(loader, filepath) { + if (!loader.load) return loader.loadSync(filepath); + return loader.load(filepath); + } + + async load(...args) { + const options = this.normalizeOptions(args); + const filepath = await this.recusivelyResolve(options); + + if (filepath) { + const defaultLoader = { + test: /\.+/, + loadSync: filepath => { + const extname = _path.default.extname(filepath).slice(1); + + if (extname === 'js' || extname === 'cjs') { + delete require.cache[filepath]; + return require(filepath); + } + + if (this.packageJsonCache.has(filepath)) { + return this.packageJsonCache.get(filepath)[options.packageKey]; + } + + const data = this.options.parseJSON(readFileSync(filepath)); + return data; + } + }; + const loader = this.findLoader(filepath) || defaultLoader; + let data; + + if (this.loadCache.has(filepath)) { + data = this.loadCache.get(filepath); + } else { + data = await this.runLoader(loader, filepath); + this.loadCache.set(filepath, data); + } + + return { + path: filepath, + data + }; + } + + return {}; + } + + loadSync(...args) { + const options = this.normalizeOptions(args); + const filepath = this.recusivelyResolveSync(options); + + if (filepath) { + const defaultLoader = { + test: /\.+/, + loadSync: filepath => { + const extname = _path.default.extname(filepath).slice(1); + + if (extname === 'js' || extname === 'cjs') { + delete require.cache[filepath]; + return require(filepath); + } + + if (this.packageJsonCache.has(filepath)) { + return this.packageJsonCache.get(filepath)[options.packageKey]; + } + + const data = this.options.parseJSON(readFileSync(filepath)); + return data; + } + }; + const loader = this.findLoader(filepath) || defaultLoader; + let data; + + if (this.loadCache.has(filepath)) { + data = this.loadCache.get(filepath); + } else { + data = this.runLoaderSync(loader, filepath); + this.loadCache.set(filepath, data); + } + + return { + path: filepath, + data + }; + } + + return {}; + } + + findLoader(filepath) { + for (const loader of this.loaders) { + if (loader.test && loader.test.test(filepath)) { + return loader; + } + } + + return null; + } + + clearCache() { + this.existsCache.clear(); + this.packageJsonCache.clear(); + this.loadCache.clear(); + return this; + } + + normalizeOptions(args) { + const options = Object.assign({}, this.options); + + if (Object.prototype.toString.call(args[0]) === '[object Object]') { + Object.assign(options, args[0]); + } else { + if (args[0]) { + options.files = args[0]; + } + + if (args[1]) { + options.cwd = args[1]; + } + + if (args[2]) { + options.stopDir = args[2]; + } + } + + options.cwd = _path.default.resolve(options.cwd); + options.stopDir = options.stopDir ? _path.default.resolve(options.stopDir) : _path.default.parse(options.cwd).root; + + if (!options.files || options.files.length === 0) { + throw new Error('[joycon] files must be an non-empty array!'); + } + + options.__normalized__ = true; + return options; + } + +} + +exports.default = JoyCon; +module.exports = JoyCon; +module.exports.default = JoyCon; \ No newline at end of file diff --git a/node_modules/joycon/package.json b/node_modules/joycon/package.json new file mode 100644 index 0000000..071c73b --- /dev/null +++ b/node_modules/joycon/package.json @@ -0,0 +1,39 @@ +{ + "name": "joycon", + "version": "3.1.1", + "description": "Load config with ease.", + "repository": { + "url": "egoist/joycon", + "type": "git" + }, + "main": "lib/index.js", + "types": "types/index.d.ts", + "files": [ + "lib", + "types/index.d.ts" + ], + "scripts": { + "test": "jest --testPathPattern tests", + "build": "babel src -d lib --no-comments", + "prepublishOnly": "npm run build" + }, + "author": "egoist <0x142857@gmail.com>", + "license": "MIT", + "jest": { + "testEnvironment": "node" + }, + "devDependencies": { + "@babel/cli": "^7.13.10", + "@babel/core": "^7.13.10", + "@babel/preset-env": "^7.13.10", + "@egoist/prettier-config": "^0.1.0", + "@types/node": "^14.14.33", + "babel-jest": "^26.6.3", + "babel-plugin-sync": "^0.1.0", + "jest-cli": "^26.6.3", + "prettier": "^2.2.1" + }, + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/joycon/types/index.d.ts b/node_modules/joycon/types/index.d.ts new file mode 100644 index 0000000..fe6af28 --- /dev/null +++ b/node_modules/joycon/types/index.d.ts @@ -0,0 +1,62 @@ +export interface Options { + /* a list of files to search */ + files?: string[] + /* the directory to search from */ + cwd?: string + /* the directory to stop searching */ + stopDir?: string + /* the key in package.json to read data at */ + packageKey?: string + /* the function used to parse json */ + parseJSON?: (str: string) => any +} + +export interface LoadResult { + /* file path */ + path?: string + /* file data */ + data?: any +} + +export interface AsyncLoader { + /** Optional loader name */ + name?: string + test: RegExp + load(filepath: string): Promise +} + +export interface SyncLoader { + /** Optional loader name */ + name?: string + test: RegExp + loadSync(filepath: string): any +} + +export interface MultiLoader { + /** Optional loader name */ + name?: string + test: RegExp + load(filepath: string): Promise + loadSync(filepath: string): any +} + +declare class JoyCon { + constructor(options?: Options) + + options: Options + + resolve(files?: string[] | Options, cwd?: string, stopDir?: string): Promise + resolveSync(files?: string[] | Options, cwd?: string, stopDir?: string): string | null + + load(files?: string[] | Options, cwd?: string, stopDir?: string): Promise + loadSync(files?: string[] | Options, cwd?: string, stopDir?: string): LoadResult + + addLoader(loader: AsyncLoader | SyncLoader | MultiLoader): this + removeLoader(name: string): this + + /** Clear internal cache */ + clearCache(): this +} + + +export default JoyCon diff --git a/node_modules/minimist/.eslintrc b/node_modules/minimist/.eslintrc new file mode 100644 index 0000000..bd1a5e0 --- /dev/null +++ b/node_modules/minimist/.eslintrc @@ -0,0 +1,29 @@ +{ + "root": true, + + "extends": "@ljharb/eslint-config/node/0.4", + + "rules": { + "array-element-newline": 0, + "complexity": 0, + "func-style": [2, "declaration"], + "max-lines-per-function": 0, + "max-nested-callbacks": 1, + "max-statements-per-line": 1, + "max-statements": 0, + "multiline-comment-style": 0, + "no-continue": 1, + "no-param-reassign": 1, + "no-restricted-syntax": 1, + "object-curly-newline": 0, + }, + + "overrides": [ + { + "files": "test/**", + "rules": { + "camelcase": 0, + }, + }, + ] +} diff --git a/node_modules/minimist/.github/FUNDING.yml b/node_modules/minimist/.github/FUNDING.yml new file mode 100644 index 0000000..a936622 --- /dev/null +++ b/node_modules/minimist/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/minimist +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/minimist/.nycrc b/node_modules/minimist/.nycrc new file mode 100644 index 0000000..55c3d29 --- /dev/null +++ b/node_modules/minimist/.nycrc @@ -0,0 +1,14 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "lines": 86, + "statements": 85.93, + "functions": 82.43, + "branches": 76.06, + "exclude": [ + "coverage", + "example", + "test" + ] +} diff --git a/node_modules/minimist/CHANGELOG.md b/node_modules/minimist/CHANGELOG.md new file mode 100644 index 0000000..c9a1e15 --- /dev/null +++ b/node_modules/minimist/CHANGELOG.md @@ -0,0 +1,298 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.2.8](https://github.com/minimistjs/minimist/compare/v1.2.7...v1.2.8) - 2023-02-09 + +### Merged + +- [Fix] Fix long option followed by single dash [`#17`](https://github.com/minimistjs/minimist/pull/17) +- [Tests] Remove duplicate test [`#12`](https://github.com/minimistjs/minimist/pull/12) +- [Fix] opt.string works with multiple aliases [`#10`](https://github.com/minimistjs/minimist/pull/10) + +### Fixed + +- [Fix] Fix long option followed by single dash (#17) [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Tests] Remove duplicate test (#12) [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] Fix long option followed by single dash [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Fix] opt.string works with multiple aliases (#10) [`#9`](https://github.com/minimistjs/minimist/issues/9) +- [Fix] Fix handling of short option with non-trivial equals [`#5`](https://github.com/minimistjs/minimist/issues/5) +- [Tests] Remove duplicate test [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] opt.string works with multiple aliases [`#9`](https://github.com/minimistjs/minimist/issues/9) + +### Commits + +- Merge tag 'v0.2.3' [`a026794`](https://github.com/minimistjs/minimist/commit/a0267947c7870fc5847cf2d437fbe33f392767da) +- [eslint] fix indentation and whitespace [`5368ca4`](https://github.com/minimistjs/minimist/commit/5368ca4147e974138a54cc0dc4cea8f756546b70) +- [eslint] fix indentation and whitespace [`e5f5067`](https://github.com/minimistjs/minimist/commit/e5f5067259ceeaf0b098d14bec910f87e58708c7) +- [eslint] more cleanup [`62fde7d`](https://github.com/minimistjs/minimist/commit/62fde7d935f83417fb046741531a9e2346a36976) +- [eslint] more cleanup [`36ac5d0`](https://github.com/minimistjs/minimist/commit/36ac5d0d95e4947d074e5737d94814034ca335d1) +- [meta] add `auto-changelog` [`73923d2`](https://github.com/minimistjs/minimist/commit/73923d223553fca08b1ba77e3fbc2a492862ae4c) +- [actions] add reusable workflows [`d80727d`](https://github.com/minimistjs/minimist/commit/d80727df77bfa9e631044d7f16368d8f09242c91) +- [eslint] add eslint; rules to enable later are warnings [`48bc06a`](https://github.com/minimistjs/minimist/commit/48bc06a1b41f00e9cdf183db34f7a51ba70e98d4) +- [eslint] fix indentation [`34b0f1c`](https://github.com/minimistjs/minimist/commit/34b0f1ccaa45183c3c4f06a91f9b405180a6f982) +- [readme] rename and add badges [`5df0fe4`](https://github.com/minimistjs/minimist/commit/5df0fe49211bd09a3636f8686a7cb3012c3e98f0) +- [Dev Deps] switch from `covert` to `nyc` [`a48b128`](https://github.com/minimistjs/minimist/commit/a48b128fdb8d427dfb20a15273f83e38d97bef07) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`f0fb958`](https://github.com/minimistjs/minimist/commit/f0fb958e9a1fe980cdffc436a211b0bda58f621b) +- [meta] create FUNDING.yml; add `funding` in package.json [`3639e0c`](https://github.com/minimistjs/minimist/commit/3639e0c819359a366387e425ab6eabf4c78d3caa) +- [meta] use `npmignore` to autogenerate an npmignore file [`be2e038`](https://github.com/minimistjs/minimist/commit/be2e038c342d8333b32f0fde67a0026b79c8150e) +- Only apps should have lockfiles [`282b570`](https://github.com/minimistjs/minimist/commit/282b570e7489d01b03f2d6d3dabf79cd3e5f84cf) +- isConstructorOrProto adapted from PR [`ef9153f`](https://github.com/minimistjs/minimist/commit/ef9153fc52b6cea0744b2239921c5dcae4697f11) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`098873c`](https://github.com/minimistjs/minimist/commit/098873c213cdb7c92e55ae1ef5aa1af3a8192a79) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`3124ed3`](https://github.com/minimistjs/minimist/commit/3124ed3e46306301ebb3c834874ce0241555c2c4) +- [meta] add `safe-publish-latest` [`4b927de`](https://github.com/minimistjs/minimist/commit/4b927de696d561c636b4f43bf49d4597cb36d6d6) +- [Tests] add `aud` in `posttest` [`b32d9bd`](https://github.com/minimistjs/minimist/commit/b32d9bd0ab340f4e9f8c3a97ff2a4424f25fab8c) +- [meta] update repo URLs [`f9fdfc0`](https://github.com/minimistjs/minimist/commit/f9fdfc032c54884d9a9996a390c63cd0719bbe1a) +- [actions] Avoid 0.6 tests due to build failures [`ba92fe6`](https://github.com/minimistjs/minimist/commit/ba92fe6ebbdc0431cca9a2ea8f27beb492f5e4ec) +- [Dev Deps] update `tape` [`950eaa7`](https://github.com/minimistjs/minimist/commit/950eaa74f112e04d23e9c606c67472c46739b473) +- [Dev Deps] add missing `npmignore` dev dep [`3226afa`](https://github.com/minimistjs/minimist/commit/3226afaf09e9d127ca369742437fe6e88f752d6b) +- Merge tag 'v0.2.2' [`980d7ac`](https://github.com/minimistjs/minimist/commit/980d7ac61a0b4bd552711251ac107d506b23e41f) + +## [v1.2.7](https://github.com/minimistjs/minimist/compare/v1.2.6...v1.2.7) - 2022-10-10 + +### Commits + +- [meta] add `auto-changelog` [`0ebf4eb`](https://github.com/minimistjs/minimist/commit/0ebf4ebcd5f7787a5524d31a849ef41316b83c3c) +- [actions] add reusable workflows [`e115b63`](https://github.com/minimistjs/minimist/commit/e115b63fa9d3909f33b00a2db647ff79068388de) +- [eslint] add eslint; rules to enable later are warnings [`f58745b`](https://github.com/minimistjs/minimist/commit/f58745b9bb84348e1be72af7dbba5840c7c13013) +- [Dev Deps] switch from `covert` to `nyc` [`ab03356`](https://github.com/minimistjs/minimist/commit/ab033567b9c8b31117cb026dc7f1e592ce455c65) +- [readme] rename and add badges [`236f4a0`](https://github.com/minimistjs/minimist/commit/236f4a07e4ebe5ee44f1496ec6974991ab293ffd) +- [meta] create FUNDING.yml; add `funding` in package.json [`783a49b`](https://github.com/minimistjs/minimist/commit/783a49bfd47e8335d3098a8cac75662cf71eb32a) +- [meta] use `npmignore` to autogenerate an npmignore file [`f81ece6`](https://github.com/minimistjs/minimist/commit/f81ece6aaec2fa14e69ff4f1e0407a8c4e2635a2) +- Only apps should have lockfiles [`56cad44`](https://github.com/minimistjs/minimist/commit/56cad44c7f879b9bb5ec18fcc349308024a89bfc) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`49c5f9f`](https://github.com/minimistjs/minimist/commit/49c5f9fb7e6a92db9eb340cc679de92fb3aacded) +- [Tests] add `aud` in `posttest` [`228ae93`](https://github.com/minimistjs/minimist/commit/228ae938f3cd9db9dfd8bd7458b076a7b2aef280) +- [meta] add `safe-publish-latest` [`01fc23f`](https://github.com/minimistjs/minimist/commit/01fc23f5104f85c75059972e01dd33796ab529ff) +- [meta] update repo URLs [`6b164c7`](https://github.com/minimistjs/minimist/commit/6b164c7d68e0b6bf32f894699effdfb7c63041dd) + +## [v1.2.6](https://github.com/minimistjs/minimist/compare/v1.2.5...v1.2.6) - 2022-03-21 + +### Commits + +- test from prototype pollution PR [`bc8ecee`](https://github.com/minimistjs/minimist/commit/bc8ecee43875261f4f17eb20b1243d3ed15e70eb) +- isConstructorOrProto adapted from PR [`c2b9819`](https://github.com/minimistjs/minimist/commit/c2b981977fa834b223b408cfb860f933c9811e4d) +- security notice for additional prototype pollution issue [`ef88b93`](https://github.com/minimistjs/minimist/commit/ef88b9325f77b5ee643ccfc97e2ebda577e4c4e2) + +## [v1.2.5](https://github.com/minimistjs/minimist/compare/v1.2.4...v1.2.5) - 2020-03-12 + +## [v1.2.4](https://github.com/minimistjs/minimist/compare/v1.2.3...v1.2.4) - 2020-03-11 + +### Commits + +- security notice [`4cf1354`](https://github.com/minimistjs/minimist/commit/4cf1354839cb972e38496d35e12f806eea92c11f) +- additional test for constructor prototype pollution [`1043d21`](https://github.com/minimistjs/minimist/commit/1043d212c3caaf871966e710f52cfdf02f9eea4b) + +## [v1.2.3](https://github.com/minimistjs/minimist/compare/v1.2.2...v1.2.3) - 2020-03-10 + +### Commits + +- more failing proto pollution tests [`13c01a5`](https://github.com/minimistjs/minimist/commit/13c01a5327736903704984b7f65616b8476850cc) +- even more aggressive checks for protocol pollution [`38a4d1c`](https://github.com/minimistjs/minimist/commit/38a4d1caead72ef99e824bb420a2528eec03d9ab) + +## [v1.2.2](https://github.com/minimistjs/minimist/compare/v1.2.1...v1.2.2) - 2020-03-10 + +### Commits + +- failing test for protocol pollution [`0efed03`](https://github.com/minimistjs/minimist/commit/0efed0340ec8433638758f7ca0c77cb20a0bfbab) +- cleanup [`67d3722`](https://github.com/minimistjs/minimist/commit/67d3722413448d00a62963d2d30c34656a92d7e2) +- console.dir -> console.log [`47acf72`](https://github.com/minimistjs/minimist/commit/47acf72c715a630bf9ea013867f47f1dd69dfc54) +- don't assign onto __proto__ [`63e7ed0`](https://github.com/minimistjs/minimist/commit/63e7ed05aa4b1889ec2f3b196426db4500cbda94) + +## [v1.2.1](https://github.com/minimistjs/minimist/compare/v1.2.0...v1.2.1) - 2020-03-10 + +### Merged + +- move the `opts['--']` example back where it belongs [`#63`](https://github.com/minimistjs/minimist/pull/63) + +### Commits + +- add test [`6be5dae`](https://github.com/minimistjs/minimist/commit/6be5dae35a32a987bcf4137fcd6c19c5200ee909) +- fix bad boolean regexp [`ac3fc79`](https://github.com/minimistjs/minimist/commit/ac3fc796e63b95128fdbdf67ea7fad71bd59aa76) + +## [v1.2.0](https://github.com/minimistjs/minimist/compare/v1.1.3...v1.2.0) - 2015-08-24 + +### Commits + +- failing -k=v short test [`63416b8`](https://github.com/minimistjs/minimist/commit/63416b8cd1d0d70e4714564cce465a36e4dd26d7) +- kv short fix [`6bbe145`](https://github.com/minimistjs/minimist/commit/6bbe14529166245e86424f220a2321442fe88dc3) +- failing kv short test [`f72ab7f`](https://github.com/minimistjs/minimist/commit/f72ab7f4572adc52902c9b6873cc969192f01b10) +- fixed kv test [`f5a48c3`](https://github.com/minimistjs/minimist/commit/f5a48c3e50e40ca54f00c8e84de4b4d6e9897fa8) +- enforce space between arg key and value [`86b321a`](https://github.com/minimistjs/minimist/commit/86b321affe648a8e016c095a4f0efa9d9074f502) + +## [v1.1.3](https://github.com/minimistjs/minimist/compare/v1.1.2...v1.1.3) - 2015-08-06 + +### Commits + +- add failing test - boolean alias array [`0fa3c5b`](https://github.com/minimistjs/minimist/commit/0fa3c5b3dd98551ddecf5392831b4c21211743fc) +- fix boolean values with multiple aliases [`9c0a6e7`](https://github.com/minimistjs/minimist/commit/9c0a6e7de25a273b11bbf9a7464f0bd833779795) + +## [v1.1.2](https://github.com/minimistjs/minimist/compare/v1.1.1...v1.1.2) - 2015-07-22 + +### Commits + +- Convert boolean arguments to boolean values [`8f3dc27`](https://github.com/minimistjs/minimist/commit/8f3dc27cf833f1d54671b6d0bcb55c2fe19672a9) +- use non-ancient npm, node 0.12 and iojs [`61ed1d0`](https://github.com/minimistjs/minimist/commit/61ed1d034b9ec7282764ce76f3992b1a0b4906ae) +- an older npm for 0.8 [`25cf778`](https://github.com/minimistjs/minimist/commit/25cf778b1220e7838a526832ad6972f75244054f) + +## [v1.1.1](https://github.com/minimistjs/minimist/compare/v1.1.0...v1.1.1) - 2015-03-10 + +### Commits + +- check that they type of a value is a boolean, not just that it is currently set to a boolean [`6863198`](https://github.com/minimistjs/minimist/commit/6863198e36139830ff1f20ffdceaddd93f2c1db9) +- upgrade tape, fix type issues from old tape version [`806712d`](https://github.com/minimistjs/minimist/commit/806712df91604ed02b8e39aa372b84aea659ee34) +- test for setting a boolean to a null default [`8c444fe`](https://github.com/minimistjs/minimist/commit/8c444fe89384ded7d441c120915ea60620b01dd3) +- if the previous value was a boolean, without an default (or with an alias) don't make an array either [`e5f419a`](https://github.com/minimistjs/minimist/commit/e5f419a3b5b3bc3f9e5ac71b7040621af70ed2dd) + +## [v1.1.0](https://github.com/minimistjs/minimist/compare/v1.0.0...v1.1.0) - 2014-08-10 + +### Commits + +- add support for handling "unknown" options not registered with the parser. [`6f3cc5d`](https://github.com/minimistjs/minimist/commit/6f3cc5d4e84524932a6ef2ce3592acc67cdd4383) +- reformat package.json [`02ed371`](https://github.com/minimistjs/minimist/commit/02ed37115194d3697ff358e8e25e5e66bab1d9f8) +- coverage script [`e5531ba`](https://github.com/minimistjs/minimist/commit/e5531ba0479da3b8138d3d8cac545d84ccb1c8df) +- extra fn to get 100% coverage again [`a6972da`](https://github.com/minimistjs/minimist/commit/a6972da89e56bf77642f8ec05a13b6558db93498) + +## [v1.0.0](https://github.com/minimistjs/minimist/compare/v0.2.3...v1.0.0) - 2014-08-10 + +### Commits + +- added stopEarly option [`471c7e4`](https://github.com/minimistjs/minimist/commit/471c7e4a7e910fc7ad8f9df850a186daf32c64e9) +- fix list [`fef6ae7`](https://github.com/minimistjs/minimist/commit/fef6ae79c38b9dc1c49569abb7cd04eb965eac5e) + +## [v0.2.3](https://github.com/minimistjs/minimist/compare/v0.2.2...v0.2.3) - 2023-02-09 + +### Merged + +- [Fix] Fix long option followed by single dash [`#17`](https://github.com/minimistjs/minimist/pull/17) +- [Tests] Remove duplicate test [`#12`](https://github.com/minimistjs/minimist/pull/12) +- [Fix] opt.string works with multiple aliases [`#10`](https://github.com/minimistjs/minimist/pull/10) + +### Fixed + +- [Fix] Fix long option followed by single dash (#17) [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Tests] Remove duplicate test (#12) [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] opt.string works with multiple aliases (#10) [`#9`](https://github.com/minimistjs/minimist/issues/9) + +### Commits + +- [eslint] fix indentation and whitespace [`e5f5067`](https://github.com/minimistjs/minimist/commit/e5f5067259ceeaf0b098d14bec910f87e58708c7) +- [eslint] more cleanup [`36ac5d0`](https://github.com/minimistjs/minimist/commit/36ac5d0d95e4947d074e5737d94814034ca335d1) +- [eslint] fix indentation [`34b0f1c`](https://github.com/minimistjs/minimist/commit/34b0f1ccaa45183c3c4f06a91f9b405180a6f982) +- isConstructorOrProto adapted from PR [`ef9153f`](https://github.com/minimistjs/minimist/commit/ef9153fc52b6cea0744b2239921c5dcae4697f11) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`098873c`](https://github.com/minimistjs/minimist/commit/098873c213cdb7c92e55ae1ef5aa1af3a8192a79) +- [Dev Deps] add missing `npmignore` dev dep [`3226afa`](https://github.com/minimistjs/minimist/commit/3226afaf09e9d127ca369742437fe6e88f752d6b) + +## [v0.2.2](https://github.com/minimistjs/minimist/compare/v0.2.1...v0.2.2) - 2022-10-10 + +### Commits + +- [meta] add `auto-changelog` [`73923d2`](https://github.com/minimistjs/minimist/commit/73923d223553fca08b1ba77e3fbc2a492862ae4c) +- [actions] add reusable workflows [`d80727d`](https://github.com/minimistjs/minimist/commit/d80727df77bfa9e631044d7f16368d8f09242c91) +- [eslint] add eslint; rules to enable later are warnings [`48bc06a`](https://github.com/minimistjs/minimist/commit/48bc06a1b41f00e9cdf183db34f7a51ba70e98d4) +- [readme] rename and add badges [`5df0fe4`](https://github.com/minimistjs/minimist/commit/5df0fe49211bd09a3636f8686a7cb3012c3e98f0) +- [Dev Deps] switch from `covert` to `nyc` [`a48b128`](https://github.com/minimistjs/minimist/commit/a48b128fdb8d427dfb20a15273f83e38d97bef07) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`f0fb958`](https://github.com/minimistjs/minimist/commit/f0fb958e9a1fe980cdffc436a211b0bda58f621b) +- [meta] create FUNDING.yml; add `funding` in package.json [`3639e0c`](https://github.com/minimistjs/minimist/commit/3639e0c819359a366387e425ab6eabf4c78d3caa) +- [meta] use `npmignore` to autogenerate an npmignore file [`be2e038`](https://github.com/minimistjs/minimist/commit/be2e038c342d8333b32f0fde67a0026b79c8150e) +- Only apps should have lockfiles [`282b570`](https://github.com/minimistjs/minimist/commit/282b570e7489d01b03f2d6d3dabf79cd3e5f84cf) +- [meta] add `safe-publish-latest` [`4b927de`](https://github.com/minimistjs/minimist/commit/4b927de696d561c636b4f43bf49d4597cb36d6d6) +- [Tests] add `aud` in `posttest` [`b32d9bd`](https://github.com/minimistjs/minimist/commit/b32d9bd0ab340f4e9f8c3a97ff2a4424f25fab8c) +- [meta] update repo URLs [`f9fdfc0`](https://github.com/minimistjs/minimist/commit/f9fdfc032c54884d9a9996a390c63cd0719bbe1a) + +## [v0.2.1](https://github.com/minimistjs/minimist/compare/v0.2.0...v0.2.1) - 2020-03-12 + +## [v0.2.0](https://github.com/minimistjs/minimist/compare/v0.1.0...v0.2.0) - 2014-06-19 + +### Commits + +- support all-boolean mode [`450a97f`](https://github.com/minimistjs/minimist/commit/450a97f6e2bc85c7a4a13185c19a818d9a5ebe69) + +## [v0.1.0](https://github.com/minimistjs/minimist/compare/v0.0.10...v0.1.0) - 2014-05-12 + +### Commits + +- Provide a mechanism to segregate -- arguments [`ce4a1e6`](https://github.com/minimistjs/minimist/commit/ce4a1e63a7e8d5ab88d2a3768adefa6af98a445a) +- documented argv['--'] [`14db0e6`](https://github.com/minimistjs/minimist/commit/14db0e6dbc6d2b9e472adaa54dad7004b364634f) +- Adding a test-case for notFlags segregation [`715c1e3`](https://github.com/minimistjs/minimist/commit/715c1e3714be223f998f6c537af6b505f0236c16) + +## [v0.0.10](https://github.com/minimistjs/minimist/compare/v0.0.9...v0.0.10) - 2014-05-11 + +### Commits + +- dedicated boolean test [`46e448f`](https://github.com/minimistjs/minimist/commit/46e448f9f513cfeb2bcc8b688b9b47ba1e515c2b) +- dedicated num test [`9bf2d36`](https://github.com/minimistjs/minimist/commit/9bf2d36f1d3b8795be90b8f7de0a937f098aa394) +- aliased values treated as strings [`1ab743b`](https://github.com/minimistjs/minimist/commit/1ab743bad4484d69f1259bed42f9531de01119de) +- cover the case of already numbers, at 100% coverage [`b2bb044`](https://github.com/minimistjs/minimist/commit/b2bb04436599d77a2ce029e8e555e25b3aa55d13) +- another test for higher coverage [`3662624`](https://github.com/minimistjs/minimist/commit/3662624be976d5489d486a856849c048d13be903) + +## [v0.0.9](https://github.com/minimistjs/minimist/compare/v0.0.8...v0.0.9) - 2014-05-08 + +### Commits + +- Eliminate `longest` fn. [`824f642`](https://github.com/minimistjs/minimist/commit/824f642038d1b02ede68b6261d1d65163390929a) + +## [v0.0.8](https://github.com/minimistjs/minimist/compare/v0.0.7...v0.0.8) - 2014-02-20 + +### Commits + +- return '' if flag is string and empty [`fa63ed4`](https://github.com/minimistjs/minimist/commit/fa63ed4651a4ef4eefddce34188e0d98d745a263) +- handle joined single letters [`66c248f`](https://github.com/minimistjs/minimist/commit/66c248f0241d4d421d193b022e9e365f11178534) + +## [v0.0.7](https://github.com/minimistjs/minimist/compare/v0.0.6...v0.0.7) - 2014-02-08 + +### Commits + +- another swap of .test for .match [`d1da408`](https://github.com/minimistjs/minimist/commit/d1da40819acbe846d89a5c02721211e3c1260dde) + +## [v0.0.6](https://github.com/minimistjs/minimist/compare/v0.0.5...v0.0.6) - 2014-02-08 + +### Commits + +- use .test() instead of .match() to not crash on non-string values in the arguments array [`7e0d1ad`](https://github.com/minimistjs/minimist/commit/7e0d1add8c9e5b9b20a4d3d0f9a94d824c578da1) + +## [v0.0.5](https://github.com/minimistjs/minimist/compare/v0.0.4...v0.0.5) - 2013-09-18 + +### Commits + +- Improve '--' handling. [`b11822c`](https://github.com/minimistjs/minimist/commit/b11822c09cc9d2460f30384d12afc0b953c037a4) + +## [v0.0.4](https://github.com/minimistjs/minimist/compare/v0.0.3...v0.0.4) - 2013-09-17 + +## [v0.0.3](https://github.com/minimistjs/minimist/compare/v0.0.2...v0.0.3) - 2013-09-12 + +### Commits + +- failing test for single dash preceeding a double dash [`b465514`](https://github.com/minimistjs/minimist/commit/b465514b82c9ae28972d714facd951deb2ad762b) +- fix for the dot test [`6a095f1`](https://github.com/minimistjs/minimist/commit/6a095f1d364c8fab2d6753d2291a0649315d297a) + +## [v0.0.2](https://github.com/minimistjs/minimist/compare/v0.0.1...v0.0.2) - 2013-08-28 + +### Commits + +- allow dotted aliases & defaults [`321c33e`](https://github.com/minimistjs/minimist/commit/321c33e755485faaeb44eeb1c05d33b2e0a5a7c4) +- use a better version of ff [`e40f611`](https://github.com/minimistjs/minimist/commit/e40f61114cf7be6f7947f7b3eed345853a67dbbb) + +## [v0.0.1](https://github.com/minimistjs/minimist/compare/v0.0.0...v0.0.1) - 2013-06-25 + +### Commits + +- remove trailing commas [`6ff0fa0`](https://github.com/minimistjs/minimist/commit/6ff0fa055064f15dbe06d50b89d5173a6796e1db) + +## v0.0.0 - 2013-06-25 + +### Commits + +- half of the parse test ported [`3079326`](https://github.com/minimistjs/minimist/commit/307932601325087de6cf94188eb798ffc4f3088a) +- stripped down code and a passing test from optimist [`7cced88`](https://github.com/minimistjs/minimist/commit/7cced88d82e399d1a03ed23eb667f04d3f320d10) +- ported parse tests completely over [`9448754`](https://github.com/minimistjs/minimist/commit/944875452e0820df6830b1408c26a0f7d3e1db04) +- docs, package.json [`a5bf46a`](https://github.com/minimistjs/minimist/commit/a5bf46ac9bb3bd114a9c340276c62c1091e538d5) +- move more short tests into short.js [`503edb5`](https://github.com/minimistjs/minimist/commit/503edb5c41d89c0d40831ee517154fc13b0f18b9) +- default bool test was wrong, not the code [`1b9f5db`](https://github.com/minimistjs/minimist/commit/1b9f5db4741b49962846081b68518de824992097) +- passing long tests ripped out of parse.js [`7972c4a`](https://github.com/minimistjs/minimist/commit/7972c4aff1f4803079e1668006658e2a761a0428) +- badges [`84c0370`](https://github.com/minimistjs/minimist/commit/84c037063664d42878aace715fe6572ce01b6f3b) +- all the tests now ported, some failures [`64239ed`](https://github.com/minimistjs/minimist/commit/64239edfe92c711c4eb0da254fcdfad2a5fdb605) +- failing short test [`f8a5341`](https://github.com/minimistjs/minimist/commit/f8a534112dd1138d2fad722def56a848480c446f) +- fixed the numeric test [`6b034f3`](https://github.com/minimistjs/minimist/commit/6b034f37c79342c60083ed97fd222e16928aac51) diff --git a/node_modules/minimist/LICENSE b/node_modules/minimist/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/minimist/README.md b/node_modules/minimist/README.md new file mode 100644 index 0000000..74da323 --- /dev/null +++ b/node_modules/minimist/README.md @@ -0,0 +1,121 @@ +# minimist [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.log(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ + _: ['foo', 'bar', 'baz'], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' +} +``` + +# security + +Previous versions had a prototype pollution bug that could cause privilege +escalation in some circumstances when handling untrusted user input. + +Please use version 1.2.6 or later: + +* https://security.snyk.io/vuln/SNYK-JS-MINIMIST-2429795 (version <=1.2.5) +* https://snyk.io/vuln/SNYK-JS-MINIMIST-559764 (version <=1.2.3) + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a boolean, string or array of strings to always treat as +booleans. if `true` will treat all double hyphenated arguments without equal signs +as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`) +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values +* `opts.stopEarly` - when true, populate `argv._` with everything after the +first non-option +* `opts['--']` - when true, populate `argv._` with everything before the `--` +and `argv['--']` with everything after the `--`. Here's an example: + + ``` + > require('./')('one two three -- four five --six'.split(' '), { '--': true }) + { + _: ['one', 'two', 'three'], + '--': ['four', 'five', '--six'] + } + ``` + + Note that with `opts['--']` set, parsing for arguments still stops after the + `--`. + +* `opts.unknown` - a function which is invoked with a command line parameter not +defined in the `opts` configuration object. If the function returns `false`, the +unknown option is not added to `argv`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT + +[package-url]: https://npmjs.org/package/minimist +[npm-version-svg]: https://versionbadg.es/minimistjs/minimist.svg +[npm-badge-png]: https://nodei.co/npm/minimist.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/minimist.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/minimist.svg +[downloads-url]: https://npm-stat.com/charts.html?package=minimist +[codecov-image]: https://codecov.io/gh/minimistjs/minimist/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/minimistjs/minimist/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/minimistjs/minimist +[actions-url]: https://github.com/minimistjs/minimist/actions diff --git a/node_modules/minimist/example/parse.js b/node_modules/minimist/example/parse.js new file mode 100644 index 0000000..9d90ffb --- /dev/null +++ b/node_modules/minimist/example/parse.js @@ -0,0 +1,4 @@ +'use strict'; + +var argv = require('../')(process.argv.slice(2)); +console.log(argv); diff --git a/node_modules/minimist/index.js b/node_modules/minimist/index.js new file mode 100644 index 0000000..f020f39 --- /dev/null +++ b/node_modules/minimist/index.js @@ -0,0 +1,263 @@ +'use strict'; + +function hasKey(obj, keys) { + var o = obj; + keys.slice(0, -1).forEach(function (key) { + o = o[key] || {}; + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function isNumber(x) { + if (typeof x === 'number') { return true; } + if ((/^0x[0-9a-f]+$/i).test(x)) { return true; } + return (/^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/).test(x); +} + +function isConstructorOrProto(obj, key) { + return (key === 'constructor' && typeof obj[key] === 'function') || key === '__proto__'; +} + +module.exports = function (args, opts) { + if (!opts) { opts = {}; } + + var flags = { + bools: {}, + strings: {}, + unknownFn: null, + }; + + if (typeof opts.unknown === 'function') { + flags.unknownFn = opts.unknown; + } + + if (typeof opts.boolean === 'boolean' && opts.boolean) { + flags.allBools = true; + } else { + [].concat(opts.boolean).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + } + + var aliases = {}; + + function aliasIsBoolean(key) { + return aliases[key].some(function (x) { + return flags.bools[x]; + }); + } + + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + if (aliases[key]) { + [].concat(aliases[key]).forEach(function (k) { + flags.strings[k] = true; + }); + } + }); + + var defaults = opts.default || {}; + + var argv = { _: [] }; + + function argDefined(key, arg) { + return (flags.allBools && (/^--[^=]+$/).test(arg)) + || flags.strings[key] + || flags.bools[key] + || aliases[key]; + } + + function setKey(obj, keys, value) { + var o = obj; + for (var i = 0; i < keys.length - 1; i++) { + var key = keys[i]; + if (isConstructorOrProto(o, key)) { return; } + if (o[key] === undefined) { o[key] = {}; } + if ( + o[key] === Object.prototype + || o[key] === Number.prototype + || o[key] === String.prototype + ) { + o[key] = {}; + } + if (o[key] === Array.prototype) { o[key] = []; } + o = o[key]; + } + + var lastKey = keys[keys.length - 1]; + if (isConstructorOrProto(o, lastKey)) { return; } + if ( + o === Object.prototype + || o === Number.prototype + || o === String.prototype + ) { + o = {}; + } + if (o === Array.prototype) { o = []; } + if (o[lastKey] === undefined || flags.bools[lastKey] || typeof o[lastKey] === 'boolean') { + o[lastKey] = value; + } else if (Array.isArray(o[lastKey])) { + o[lastKey].push(value); + } else { + o[lastKey] = [o[lastKey], value]; + } + } + + function setArg(key, val, arg) { + if (arg && flags.unknownFn && !argDefined(key, arg)) { + if (flags.unknownFn(arg) === false) { return; } + } + + var value = !flags.strings[key] && isNumber(val) + ? Number(val) + : val; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--') + 1); + args = args.slice(0, args.indexOf('--')); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + var key; + var next; + + if ((/^--.+=/).test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + key = m[1]; + var value = m[2]; + if (flags.bools[key]) { + value = value !== 'false'; + } + setArg(key, value, arg); + } else if ((/^--no-.+/).test(arg)) { + key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false, arg); + } else if ((/^--.+/).test(arg)) { + key = arg.match(/^--(.+)/)[1]; + next = args[i + 1]; + if ( + next !== undefined + && !(/^(-|--)[^-]/).test(next) + && !flags.bools[key] + && !flags.allBools + && (aliases[key] ? !aliasIsBoolean(key) : true) + ) { + setArg(key, next, arg); + i += 1; + } else if ((/^(true|false)$/).test(next)) { + setArg(key, next === 'true', arg); + i += 1; + } else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } else if ((/^-[^-]+/).test(arg)) { + var letters = arg.slice(1, -1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + + if (next === '-') { + setArg(letters[j], next, arg); + continue; + } + + if ((/[A-Za-z]/).test(letters[j]) && next[0] === '=') { + setArg(letters[j], next.slice(1), arg); + broken = true; + break; + } + + if ( + (/[A-Za-z]/).test(letters[j]) + && (/-?\d+(\.\d*)?(e-?\d+)?$/).test(next) + ) { + setArg(letters[j], next, arg); + broken = true; + break; + } + + if (letters[j + 1] && letters[j + 1].match(/\W/)) { + setArg(letters[j], arg.slice(j + 2), arg); + broken = true; + break; + } else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg); + } + } + + key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if ( + args[i + 1] + && !(/^(-|--)[^-]/).test(args[i + 1]) + && !flags.bools[key] + && (aliases[key] ? !aliasIsBoolean(key) : true) + ) { + setArg(key, args[i + 1], arg); + i += 1; + } else if (args[i + 1] && (/^(true|false)$/).test(args[i + 1])) { + setArg(key, args[i + 1] === 'true', arg); + i += 1; + } else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + } else { + if (!flags.unknownFn || flags.unknownFn(arg) !== false) { + argv._.push(flags.strings._ || !isNumber(arg) ? arg : Number(arg)); + } + if (opts.stopEarly) { + argv._.push.apply(argv._, args.slice(i + 1)); + break; + } + } + } + + Object.keys(defaults).forEach(function (k) { + if (!hasKey(argv, k.split('.'))) { + setKey(argv, k.split('.'), defaults[k]); + + (aliases[k] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[k]); + }); + } + }); + + if (opts['--']) { + argv['--'] = notFlags.slice(); + } else { + notFlags.forEach(function (k) { + argv._.push(k); + }); + } + + return argv; +}; diff --git a/node_modules/minimist/package.json b/node_modules/minimist/package.json new file mode 100644 index 0000000..c10a334 --- /dev/null +++ b/node_modules/minimist/package.json @@ -0,0 +1,75 @@ +{ + "name": "minimist", + "version": "1.2.8", + "description": "parse argument options", + "main": "index.js", + "devDependencies": { + "@ljharb/eslint-config": "^21.0.1", + "aud": "^2.0.2", + "auto-changelog": "^2.4.0", + "eslint": "=8.8.0", + "in-publish": "^2.0.1", + "npmignore": "^0.3.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^2.0.0", + "tape": "^5.6.3" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=auto", + "prepublishOnly": "safe-publish-latest", + "prepublish": "not-in-publish || npm run prepublishOnly", + "lint": "eslint --ext=js,mjs .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/6..latest", + "ff/5", + "firefox/latest", + "chrome/10", + "chrome/latest", + "safari/5.1", + "safari/latest", + "opera/12" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/minimistjs/minimist.git" + }, + "homepage": "https://github.com/minimistjs/minimist", + "keywords": [ + "argv", + "getopt", + "parser", + "optimist" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github/workflows" + ] + } +} diff --git a/node_modules/minimist/test/all_bool.js b/node_modules/minimist/test/all_bool.js new file mode 100644 index 0000000..befa0c9 --- /dev/null +++ b/node_modules/minimist/test/all_bool.js @@ -0,0 +1,34 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('flag boolean true (default all --args to boolean)', function (t) { + var argv = parse(['moo', '--honk', 'cow'], { + boolean: true, + }); + + t.deepEqual(argv, { + honk: true, + _: ['moo', 'cow'], + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); + +test('flag boolean true only affects double hyphen arguments without equals signs', function (t) { + var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], { + boolean: true, + }); + + t.deepEqual(argv, { + honk: true, + tacos: 'good', + p: 55, + _: ['moo', 'cow'], + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); diff --git a/node_modules/minimist/test/bool.js b/node_modules/minimist/test/bool.js new file mode 100644 index 0000000..e58d47e --- /dev/null +++ b/node_modules/minimist/test/bool.js @@ -0,0 +1,177 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false }, + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'], + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse(['-x', '-z', 'one', 'two', 'three'], { + boolean: ['x', 'y', 'z'], + }); + + t.deepEqual(argv, { + x: true, + y: false, + z: true, + _: ['one', 'two', 'three'], + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); +test('boolean and alias with chainable api', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' }, + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' }, + }); + var expected = { + herp: true, + h: true, + _: ['derp'], + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var opts = { + alias: { h: 'herp' }, + boolean: 'herp', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + _: ['derp'], + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias array with options hash', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var alt = ['--harp', 'derp']; + var opts = { + alias: { h: ['herp', 'harp'] }, + boolean: 'h', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var altPropertyArgv = parse(alt, opts); + var expected = { + harp: true, + herp: true, + h: true, + _: ['derp'], + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.same(altPropertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = ['-h', 'true']; + var regular = ['--herp', 'true']; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + _: [], + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function (t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool', + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool', + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); + +test('boolean --boool=true', function (t) { + var parsed = parse(['--boool=true'], { + default: { + boool: false, + }, + boolean: ['boool'], + }); + + t.same(parsed.boool, true); + t.end(); +}); + +test('boolean --boool=false', function (t) { + var parsed = parse(['--boool=false'], { + default: { + boool: true, + }, + boolean: ['boool'], + }); + + t.same(parsed.boool, false); + t.end(); +}); + +test('boolean using something similar to true', function (t) { + var opts = { boolean: 'h' }; + var result = parse(['-h', 'true.txt'], opts); + var expected = { + h: true, + _: ['true.txt'], + }; + + t.same(result, expected); + t.end(); +}); diff --git a/node_modules/minimist/test/dash.js b/node_modules/minimist/test/dash.js new file mode 100644 index 0000000..7078817 --- /dev/null +++ b/node_modules/minimist/test/dash.js @@ -0,0 +1,43 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(6); + t.deepEqual(parse(['-n', '-']), { n: '-', _: [] }); + t.deepEqual(parse(['--nnn', '-']), { nnn: '-', _: [] }); + t.deepEqual(parse(['-']), { _: ['-'] }); + t.deepEqual(parse(['-f-']), { f: '-', _: [] }); + t.deepEqual( + parse(['-b', '-'], { boolean: 'b' }), + { b: true, _: ['-'] } + ); + t.deepEqual( + parse(['-s', '-'], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(2); + t.deepEqual(parse(['-a', '--', 'b']), { a: true, _: ['b'] }); + t.deepEqual(parse(['--a', '--', 'b']), { a: true, _: ['b'] }); +}); + +test('move arguments after the -- into their own `--` array', function (t) { + t.plan(1); + t.deepEqual( + parse(['--name', 'John', 'before', '--', 'after'], { '--': true }), + { name: 'John', _: ['before'], '--': ['after'] } + ); +}); + +test('--- option value', function (t) { + // A multi-dash value is largely an edge case, but check the behaviour is as expected, + // and in particular the same for short option and long option (as made consistent in Jan 2023). + t.plan(2); + t.deepEqual(parse(['-n', '---']), { n: '---', _: [] }); + t.deepEqual(parse(['--nnn', '---']), { nnn: '---', _: [] }); +}); + diff --git a/node_modules/minimist/test/default_bool.js b/node_modules/minimist/test/default_bool.js new file mode 100644 index 0000000..4e9f625 --- /dev/null +++ b/node_modules/minimist/test/default_bool.js @@ -0,0 +1,37 @@ +'use strict'; + +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true }, + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false }, + }); + t.equal(argv.somefalse, false); + t.end(); +}); + +test('boolean default to null', function (t) { + var argv = parse([], { + boolean: 'maybe', + default: { maybe: null }, + }); + t.equal(argv.maybe, null); + + var argvLong = parse(['--maybe'], { + boolean: 'maybe', + default: { maybe: null }, + }); + t.equal(argvLong.maybe, true); + t.end(); +}); diff --git a/node_modules/minimist/test/dotted.js b/node_modules/minimist/test/dotted.js new file mode 100644 index 0000000..126ff03 --- /dev/null +++ b/node_modules/minimist/test/dotted.js @@ -0,0 +1,24 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], { default: { 'a.b': 11 }, alias: { 'a.b': 'aa.bb' } }); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', { default: { 'a.b': 11 }, alias: { 'a.b': 'aa.bb' } }); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); + +test('dotted default with no alias', function (t) { + var argv = parse('', { default: { 'a.b': 11 } }); + t.equal(argv.a.b, 11); + t.end(); +}); diff --git a/node_modules/minimist/test/kv_short.js b/node_modules/minimist/test/kv_short.js new file mode 100644 index 0000000..6d1b53a --- /dev/null +++ b/node_modules/minimist/test/kv_short.js @@ -0,0 +1,32 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('short -k=v', function (t) { + t.plan(1); + + var argv = parse(['-b=123']); + t.deepEqual(argv, { b: 123, _: [] }); +}); + +test('multi short -k=v', function (t) { + t.plan(1); + + var argv = parse(['-a=whatever', '-b=robots']); + t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] }); +}); + +test('short with embedded equals -k=a=b', function (t) { + t.plan(1); + + var argv = parse(['-k=a=b']); + t.deepEqual(argv, { k: 'a=b', _: [] }); +}); + +test('short with later equals like -ab=c', function (t) { + t.plan(1); + + var argv = parse(['-ab=c']); + t.deepEqual(argv, { a: true, b: 'c', _: [] }); +}); diff --git a/node_modules/minimist/test/long.js b/node_modules/minimist/test/long.js new file mode 100644 index 0000000..9fef51f --- /dev/null +++ b/node_modules/minimist/test/long.js @@ -0,0 +1,33 @@ +'use strict'; + +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse(['--bool']), + { bool: true, _: [] }, + 'long boolean' + ); + t.deepEqual( + parse(['--pow', 'xixxle']), + { pow: 'xixxle', _: [] }, + 'long capture sp' + ); + t.deepEqual( + parse(['--pow=xixxle']), + { pow: 'xixxle', _: [] }, + 'long capture eq' + ); + t.deepEqual( + parse(['--host', 'localhost', '--port', '555']), + { host: 'localhost', port: 555, _: [] }, + 'long captures sp' + ); + t.deepEqual( + parse(['--host=localhost', '--port=555']), + { host: 'localhost', port: 555, _: [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/node_modules/minimist/test/num.js b/node_modules/minimist/test/num.js new file mode 100644 index 0000000..074393e --- /dev/null +++ b/node_modules/minimist/test/num.js @@ -0,0 +1,38 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789', + ]); + t.deepEqual(argv, { + x: 1234, + y: 5.67, + z: 1e7, + w: '10f', + hex: 0xdeadbeef, + _: [789], + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('already a number', function (t) { + var argv = parse(['-x', 1234, 789]); + t.deepEqual(argv, { x: 1234, _: [789] }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); diff --git a/node_modules/minimist/test/parse.js b/node_modules/minimist/test/parse.js new file mode 100644 index 0000000..65d9d90 --- /dev/null +++ b/node_modules/minimist/test/parse.js @@ -0,0 +1,209 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse(['--no-moo']), + { moo: false, _: [] }, + 'no' + ); + t.deepEqual( + parse(['-v', 'a', '-v', 'b', '-v', 'c']), + { v: ['a', 'b', 'c'], _: [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek', + ]), + { + c: true, + a: true, + t: true, + s: 'woo', + h: 'awesome', + b: true, + bool: true, + key: 'value', + multi: ['quux', 'baz'], + meep: false, + name: 'meowmers', + _: ['bare', '--not-a-flag', 'eek'], + } + ); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse(['-t', 'moo'], { boolean: 't' }); + t.deepEqual(argv, { t: true, _: ['moo'] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: ['t', 'verbose'], + default: { verbose: true }, + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'], + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('newlines in params', function (t) { + var args = parse(['-s', 'X\nX']); + t.deepEqual(args, { _: [], s: 'X\nX' }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse(['--s=X\nX']); + t.deepEqual(args, { _: [], s: 'X\nX' }); + t.end(); +}); + +test('strings', function (t) { + var s = parse(['-s', '0001234'], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse(['-x', '56'], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([' ', ' '], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function (t) { + var s = parse(['-s'], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse(['--str'], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse(['-art'], { + string: ['a', 't'], + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + +test('string and alias', function (t) { + var x = parse(['--str', '000123'], { + string: 's', + alias: { s: 'str' }, + }); + + t.equal(x.str, '000123'); + t.equal(typeof x.str, 'string'); + t.equal(x.s, '000123'); + t.equal(typeof x.s, 'string'); + + var y = parse(['-s', '000123'], { + string: 'str', + alias: { str: 's' }, + }); + + t.equal(y.str, '000123'); + t.equal(typeof y.str, 'string'); + t.equal(y.s, '000123'); + t.equal(typeof y.s, 'string'); + + var z = parse(['-s123'], { + alias: { str: ['s', 'S'] }, + string: ['str'], + }); + + t.deepEqual( + z, + { _: [], s: '123', S: '123', str: '123' }, + 'opt.string works with multiple aliases' + ); + t.end(); +}); + +test('slashBreak', function (t) { + t.same( + parse(['-I/foo/bar/baz']), + { I: '/foo/bar/baz', _: [] } + ); + t.same( + parse(['-xyz/foo/bar/baz']), + { x: true, y: true, z: '/foo/bar/baz', _: [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse(['-f', '11', '--zoom', '55'], { + alias: { z: 'zoom' }, + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse(['-f', '11', '--zoom', '55'], { + alias: { z: ['zm', 'zoom'] }, + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop', + ]); + + t.same(argv.foo, { + bar: 3, + baz: 4, + quux: { + quibble: 5, + o_O: true, + }, + }); + t.same(argv.beep, { boop: true }); + t.end(); +}); diff --git a/node_modules/minimist/test/parse_modified.js b/node_modules/minimist/test/parse_modified.js new file mode 100644 index 0000000..32965d1 --- /dev/null +++ b/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,11 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions', function (t) { + t.plan(1); + + var argv = parse(['-b', '123'], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: [123] }); +}); diff --git a/node_modules/minimist/test/proto.js b/node_modules/minimist/test/proto.js new file mode 100644 index 0000000..6e629dd --- /dev/null +++ b/node_modules/minimist/test/proto.js @@ -0,0 +1,64 @@ +'use strict'; + +/* eslint no-proto: 0 */ + +var parse = require('../'); +var test = require('tape'); + +test('proto pollution', function (t) { + var argv = parse(['--__proto__.x', '123']); + t.equal({}.x, undefined); + t.equal(argv.__proto__.x, undefined); + t.equal(argv.x, undefined); + t.end(); +}); + +test('proto pollution (array)', function (t) { + var argv = parse(['--x', '4', '--x', '5', '--x.__proto__.z', '789']); + t.equal({}.z, undefined); + t.deepEqual(argv.x, [4, 5]); + t.equal(argv.x.z, undefined); + t.equal(argv.x.__proto__.z, undefined); + t.end(); +}); + +test('proto pollution (number)', function (t) { + var argv = parse(['--x', '5', '--x.__proto__.z', '100']); + t.equal({}.z, undefined); + t.equal((4).z, undefined); + t.equal(argv.x, 5); + t.equal(argv.x.z, undefined); + t.end(); +}); + +test('proto pollution (string)', function (t) { + var argv = parse(['--x', 'abc', '--x.__proto__.z', 'def']); + t.equal({}.z, undefined); + t.equal('...'.z, undefined); + t.equal(argv.x, 'abc'); + t.equal(argv.x.z, undefined); + t.end(); +}); + +test('proto pollution (constructor)', function (t) { + var argv = parse(['--constructor.prototype.y', '123']); + t.equal({}.y, undefined); + t.equal(argv.y, undefined); + t.end(); +}); + +test('proto pollution (constructor function)', function (t) { + var argv = parse(['--_.concat.constructor.prototype.y', '123']); + function fnToBeTested() {} + t.equal(fnToBeTested.y, undefined); + t.equal(argv.y, undefined); + t.end(); +}); + +// powered by snyk - https://github.com/backstage/backstage/issues/10343 +test('proto pollution (constructor function) snyk', function (t) { + var argv = parse('--_.constructor.constructor.prototype.foo bar'.split(' ')); + t.equal(function () {}.foo, undefined); + t.equal(argv.y, undefined); + t.end(); +}); diff --git a/node_modules/minimist/test/short.js b/node_modules/minimist/test/short.js new file mode 100644 index 0000000..4a7b843 --- /dev/null +++ b/node_modules/minimist/test/short.js @@ -0,0 +1,69 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse(['-n123']), { n: 123, _: [] }); + t.deepEqual( + parse(['-123', '456']), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse(['-b']), + { b: true, _: [] }, + 'short boolean' + ); + t.deepEqual( + parse(['foo', 'bar', 'baz']), + { _: ['foo', 'bar', 'baz'] }, + 'bare' + ); + t.deepEqual( + parse(['-cats']), + { c: true, a: true, t: true, s: true, _: [] }, + 'group' + ); + t.deepEqual( + parse(['-cats', 'meow']), + { c: true, a: true, t: true, s: 'meow', _: [] }, + 'short group next' + ); + t.deepEqual( + parse(['-h', 'localhost']), + { h: 'localhost', _: [] }, + 'short capture' + ); + t.deepEqual( + parse(['-h', 'localhost', '-p', '555']), + { h: 'localhost', p: 555, _: [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse(['-h', 'localhost', '-fp', '555', 'script.js']), + { + f: true, p: 555, h: 'localhost', + _: ['script.js'], + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse(['-h', 'localhost', '-fp', '555', 'script.js']), + { + f: true, p: 555, h: 'localhost', + _: ['script.js'], + } + ); + t.end(); +}); diff --git a/node_modules/minimist/test/stop_early.js b/node_modules/minimist/test/stop_early.js new file mode 100644 index 0000000..52a6a91 --- /dev/null +++ b/node_modules/minimist/test/stop_early.js @@ -0,0 +1,17 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('stops parsing on the first non-option when stopEarly is set', function (t) { + var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], { + stopEarly: true, + }); + + t.deepEqual(argv, { + aaa: 'bbb', + _: ['ccc', '--ddd'], + }); + + t.end(); +}); diff --git a/node_modules/minimist/test/unknown.js b/node_modules/minimist/test/unknown.js new file mode 100644 index 0000000..4f2e0ca --- /dev/null +++ b/node_modules/minimist/test/unknown.js @@ -0,0 +1,104 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('boolean and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'true', '--derp', 'true']; + var regular = ['--herp', 'true', '-d', 'true']; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('flag boolean true any double hyphen argument is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], { + boolean: true, + unknown: unknownFn, + }); + t.same(unknown, ['--tacos=good', 'cow', '-p']); + t.same(argv, { + honk: true, + _: [], + }); + t.end(); +}); + +test('string and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'hello', '--derp', 'goodbye']; + var regular = ['--herp', 'hello', '-d', 'moon']; + var opts = { + alias: { h: 'herp' }, + string: 'h', + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('default and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'hello']; + var regular = ['--herp', 'hello']; + var opts = { + default: { h: 'bar' }, + alias: { h: 'herp' }, + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, []); + t.end(); + unknownFn(); // exercise fn for 100% coverage +}); + +test('value following -- is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['--bad', '--', 'good', 'arg']; + var opts = { + '--': true, + unknown: unknownFn, + }; + var argv = parse(aliased, opts); + + t.same(unknown, ['--bad']); + t.same(argv, { + '--': ['good', 'arg'], + _: [], + }); + t.end(); +}); diff --git a/node_modules/minimist/test/whitespace.js b/node_modules/minimist/test/whitespace.js new file mode 100644 index 0000000..4fdaf1d --- /dev/null +++ b/node_modules/minimist/test/whitespace.js @@ -0,0 +1,10 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace', function (t) { + t.plan(1); + var x = parse(['-x', '\t']).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/on-exit-leak-free/.github/dependabot.yml b/node_modules/on-exit-leak-free/.github/dependabot.yml new file mode 100644 index 0000000..47a54f3 --- /dev/null +++ b/node_modules/on-exit-leak-free/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 +updates: +- package-ecosystem: github-actions + directory: '/' + schedule: + interval: daily + open-pull-requests-limit: 10 +- package-ecosystem: npm + directory: '/' + schedule: + interval: daily + open-pull-requests-limit: 10 \ No newline at end of file diff --git a/node_modules/on-exit-leak-free/.github/workflows/ci.yml b/node_modules/on-exit-leak-free/.github/workflows/ci.yml new file mode 100644 index 0000000..ff41616 --- /dev/null +++ b/node_modules/on-exit-leak-free/.github/workflows/ci.yml @@ -0,0 +1,46 @@ +name: CI +on: + push: + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + matrix: + node-version: [14, 16, 18, 20] + os: [macos-latest, ubuntu-latest, windows-latest] + exclude: + - node-version: 14 + os: windows-latest + + steps: + + - uses: actions/checkout@v4 + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install --ignore-scripts + + - name: Run tests + run: | + npm run test + + automerge: + needs: test + runs-on: ubuntu-latest + steps: + - uses: fastify/github-action-merge-dependabot@v3.9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/on-exit-leak-free/LICENSE b/node_modules/on-exit-leak-free/LICENSE new file mode 100644 index 0000000..2c1a038 --- /dev/null +++ b/node_modules/on-exit-leak-free/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/on-exit-leak-free/README.md b/node_modules/on-exit-leak-free/README.md new file mode 100644 index 0000000..5ccea9c --- /dev/null +++ b/node_modules/on-exit-leak-free/README.md @@ -0,0 +1,54 @@ +# on-exit-leak-free + +This module helps dispose of an object gracefully when the Node.js process exits. +It executes a function with a given parameter +on [`'exit'`](https://nodejs.org/api/process.html#event-exit) without leaking memory, +cleaning things up appropriately if the object is garbage collected. + +Requires `WeakRef` and `FinalizationRegistry`, i.e. use Node v14+. + +## Install + +```bash +npm i on-exit-leak-free +``` + +## Example + +```js +'use strict' + +const { register, unregister } = require('on-exit-leak-free') +const assert = require('assert') + +function setup () { + // This object can be safely garbage collected, + // and the resulting shutdown function will not be called. + // There are no leaks. + const obj = { foo: 'bar' } + register(obj, shutdown) + // use registerBeforeExit(obj, shutdown) to execute the function only + // on beforeExit + // call unregister(obj) to remove +} + +let shutdownCalled = false + +// Please make sure that the function passed to register() +// does not create a closure around unnecessary objects. +function shutdown (obj, eventName) { + console.log(eventName) // beforeExit + shutdownCalled = true + assert.strictEqual(obj.foo, 'bar') +} + +setup() + +process.on('exit', function () { + assert.strictEqual(shutdownCalled, true) +}) +``` + +## License + +MIT diff --git a/node_modules/on-exit-leak-free/index.js b/node_modules/on-exit-leak-free/index.js new file mode 100644 index 0000000..8c1ff44 --- /dev/null +++ b/node_modules/on-exit-leak-free/index.js @@ -0,0 +1,108 @@ +'use strict' + +const refs = { + exit: [], + beforeExit: [] +} +const functions = { + exit: onExit, + beforeExit: onBeforeExit +} + +let registry + +function ensureRegistry () { + if (registry === undefined) { + registry = new FinalizationRegistry(clear) + } +} + +function install (event) { + if (refs[event].length > 0) { + return + } + + process.on(event, functions[event]) +} + +function uninstall (event) { + if (refs[event].length > 0) { + return + } + process.removeListener(event, functions[event]) + if (refs.exit.length === 0 && refs.beforeExit.length === 0) { + registry = undefined + } +} + +function onExit () { + callRefs('exit') +} + +function onBeforeExit () { + callRefs('beforeExit') +} + +function callRefs (event) { + for (const ref of refs[event]) { + const obj = ref.deref() + const fn = ref.fn + + // This should always happen, however GC is + // undeterministic so it might not happen. + /* istanbul ignore else */ + if (obj !== undefined) { + fn(obj, event) + } + } + refs[event] = [] +} + +function clear (ref) { + for (const event of ['exit', 'beforeExit']) { + const index = refs[event].indexOf(ref) + refs[event].splice(index, index + 1) + uninstall(event) + } +} + +function _register (event, obj, fn) { + if (obj === undefined) { + throw new Error('the object can\'t be undefined') + } + install(event) + const ref = new WeakRef(obj) + ref.fn = fn + + ensureRegistry() + registry.register(obj, ref) + refs[event].push(ref) +} + +function register (obj, fn) { + _register('exit', obj, fn) +} + +function registerBeforeExit (obj, fn) { + _register('beforeExit', obj, fn) +} + +function unregister (obj) { + if (registry === undefined) { + return + } + registry.unregister(obj) + for (const event of ['exit', 'beforeExit']) { + refs[event] = refs[event].filter((ref) => { + const _obj = ref.deref() + return _obj && _obj !== obj + }) + uninstall(event) + } +} + +module.exports = { + register, + registerBeforeExit, + unregister +} diff --git a/node_modules/on-exit-leak-free/package.json b/node_modules/on-exit-leak-free/package.json new file mode 100644 index 0000000..11eab49 --- /dev/null +++ b/node_modules/on-exit-leak-free/package.json @@ -0,0 +1,37 @@ +{ + "name": "on-exit-leak-free", + "version": "2.1.2", + "description": "Execute a function on exit without leaking memory, allowing all objects to be garbage collected", + "main": "index.js", + "scripts": { + "test": "standard | snazzy && tap test/*.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/on-exit-or-gc.git" + }, + "keywords": [ + "weak", + "reference", + "finalization", + "registry", + "process", + "exit", + "garbage", + "collector" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/mcollina/on-exit-or-gc/issues" + }, + "homepage": "https://github.com/mcollina/on-exit-or-gc#readme", + "devDependencies": { + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "tap": "^16.0.0" + }, + "engines": { + "node": ">=14.0.0" + } +} diff --git a/node_modules/on-exit-leak-free/test/base.test.js b/node_modules/on-exit-leak-free/test/base.test.js new file mode 100644 index 0000000..d81a030 --- /dev/null +++ b/node_modules/on-exit-leak-free/test/base.test.js @@ -0,0 +1,30 @@ +'use strict' + +const { test } = require('tap') +const { fork } = require('child_process') +const { join } = require('path') +const { once } = require('events') +const { register } = require('..') + +const files = [ + 'close.js', + 'beforeExit', + 'gc-not-close.js', + 'unregister.js' +] + +for (const file of files) { + test(file, async ({ equal }) => { + const child = fork(join(__dirname, 'fixtures', file), [], { + execArgv: ['--expose-gc'] + }) + + const [code] = await once(child, 'close') + + equal(code, 0) + }) +} + +test('undefined', async ({ throws }) => { + throws(() => register(undefined)) +}) diff --git a/node_modules/on-exit-leak-free/test/event-emitter-leak.test.js b/node_modules/on-exit-leak-free/test/event-emitter-leak.test.js new file mode 100644 index 0000000..26ce2ff --- /dev/null +++ b/node_modules/on-exit-leak-free/test/event-emitter-leak.test.js @@ -0,0 +1,23 @@ +'use strict' + +const t = require('tap') +const { register, unregister } = require('..') + +process.on('warning', () => { + t.fail('warning emitted') +}) + +const objs = [] +for (let i = 0; i < 20; i++) { + const obj = { i } + objs.push(obj) + register(obj, shutdown) +} + +for (const obj of objs) { + unregister(obj) +} + +t.pass('completed') + +function shutdown () {} diff --git a/node_modules/on-exit-leak-free/test/fixtures/beforeExit.js b/node_modules/on-exit-leak-free/test/fixtures/beforeExit.js new file mode 100644 index 0000000..512cfa3 --- /dev/null +++ b/node_modules/on-exit-leak-free/test/fixtures/beforeExit.js @@ -0,0 +1,33 @@ +'use strict' + +const { unregister, registerBeforeExit } = require('../..') +const assert = require('assert') + +function setup () { + const obj = { foo: 'bar' } + registerBeforeExit(obj, shutdown) +} + +let shutdownCalled = false +let timeoutFinished = false +function shutdown (obj, event) { + shutdownCalled = true + if (event === 'beforeExit') { + setTimeout(function () { + timeoutFinished = true + assert.strictEqual(obj.foo, 'bar') + unregister(obj) + }, 100) + process.on('beforeExit', function () { + assert.strictEqual(timeoutFinished, true) + }) + } else { + throw new Error('different event') + } +} + +setup() + +process.on('exit', function () { + assert.strictEqual(shutdownCalled, true) +}) diff --git a/node_modules/on-exit-leak-free/test/fixtures/close.js b/node_modules/on-exit-leak-free/test/fixtures/close.js new file mode 100644 index 0000000..3ddf983 --- /dev/null +++ b/node_modules/on-exit-leak-free/test/fixtures/close.js @@ -0,0 +1,21 @@ +'use strict' + +const { register } = require('../..') +const assert = require('assert') + +function setup () { + const obj = { foo: 'bar' } + register(obj, shutdown) +} + +let shutdownCalled = false +function shutdown (obj) { + shutdownCalled = true + assert.strictEqual(obj.foo, 'bar') +} + +setup() + +process.on('exit', function () { + assert.strictEqual(shutdownCalled, true) +}) diff --git a/node_modules/on-exit-leak-free/test/fixtures/gc-not-close.js b/node_modules/on-exit-leak-free/test/fixtures/gc-not-close.js new file mode 100644 index 0000000..f23c301 --- /dev/null +++ b/node_modules/on-exit-leak-free/test/fixtures/gc-not-close.js @@ -0,0 +1,24 @@ +'use strict' + +const { register } = require('../..') +const assert = require('assert') + +function setup () { + let obj = { foo: 'bar' } + register(obj, shutdown) + setImmediate(function () { + obj = undefined + gc() // eslint-disable-line + }) +} + +let shutdownCalled = false +function shutdown (obj) { + shutdownCalled = true +} + +setup() + +process.on('exit', function () { + assert.strictEqual(shutdownCalled, false) +}) diff --git a/node_modules/on-exit-leak-free/test/fixtures/unregister.js b/node_modules/on-exit-leak-free/test/fixtures/unregister.js new file mode 100644 index 0000000..5fe245f --- /dev/null +++ b/node_modules/on-exit-leak-free/test/fixtures/unregister.js @@ -0,0 +1,24 @@ +'use strict' + +const { register, unregister } = require('../..') +const assert = require('assert') + +function setup () { + const obj = { foo: 'bar' } + register(obj, shutdown) + setImmediate(function () { + unregister(obj) + unregister(obj) // twice, this should not throw + }) +} + +let shutdownCalled = false +function shutdown (obj) { + shutdownCalled = true +} + +setup() + +process.on('exit', function () { + assert.strictEqual(shutdownCalled, false) +}) diff --git a/node_modules/once/LICENSE b/node_modules/once/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/once/README.md b/node_modules/once/README.md new file mode 100644 index 0000000..1f1ffca --- /dev/null +++ b/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/node_modules/once/once.js b/node_modules/once/once.js new file mode 100644 index 0000000..2354067 --- /dev/null +++ b/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/node_modules/once/package.json b/node_modules/once/package.json new file mode 100644 index 0000000..16815b2 --- /dev/null +++ b/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/pino-abstract-transport/.github/dependabot.yml b/node_modules/pino-abstract-transport/.github/dependabot.yml new file mode 100644 index 0000000..dfa7fa6 --- /dev/null +++ b/node_modules/pino-abstract-transport/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 10 diff --git a/node_modules/pino-abstract-transport/.github/workflows/ci.yml b/node_modules/pino-abstract-transport/.github/workflows/ci.yml new file mode 100644 index 0000000..c765f4d --- /dev/null +++ b/node_modules/pino-abstract-transport/.github/workflows/ci.yml @@ -0,0 +1,97 @@ +name: CI + +on: + push: + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +# This allows a subsequently queued workflow run to interrupt previous runs +concurrency: + group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" + cancel-in-progress: true + +jobs: + dependency-review: + name: Dependency Review + if: github.event_name == 'pull_request' + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Dependency review + uses: actions/dependency-review-action@v3 + + test: + name: Test + runs-on: ${{ matrix.os }} + permissions: + contents: read + strategy: + matrix: + node-version: [18, 20, 22] + os: [macos-latest, ubuntu-latest, windows-latest] + + steps: + - name: Check out repo + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Restore cached dependencies + uses: actions/cache@v4 + with: + path: node_modules + key: node-modules-${{ hashFiles('package.json') }} + + - name: Install dependencies + run: npm i --ignore-scripts + + - name: Run Tests + run: npm run test-ci + + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2.1.2 + with: + github-token: ${{ secrets.github_token }} + parallel: true + flag-name: run-${{ matrix.node-version }}-${{ matrix.os }} + + coverage: + needs: test + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@v2.1.2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true + + automerge: + name: Automerge Dependabot PRs + if: > + github.event_name == 'pull_request' && + github.event.pull_request.user.login == 'dependabot[bot]' + needs: test + permissions: + pull-requests: write + contents: write + runs-on: ubuntu-latest + steps: + - uses: fastify/github-action-merge-dependabot@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/pino-abstract-transport/.husky/pre-commit b/node_modules/pino-abstract-transport/.husky/pre-commit new file mode 100644 index 0000000..610c2a5 --- /dev/null +++ b/node_modules/pino-abstract-transport/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +npm test diff --git a/node_modules/pino-abstract-transport/LICENSE b/node_modules/pino-abstract-transport/LICENSE new file mode 100644 index 0000000..9dbf149 --- /dev/null +++ b/node_modules/pino-abstract-transport/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 pino + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pino-abstract-transport/README.md b/node_modules/pino-abstract-transport/README.md new file mode 100644 index 0000000..acb91f3 --- /dev/null +++ b/node_modules/pino-abstract-transport/README.md @@ -0,0 +1,172 @@ +# pino-abstract-transport +[![npm version](https://img.shields.io/npm/v/pino-abstract-transport)](https://www.npmjs.com/package/pino-abstract-transport) +[![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino-abstract-transport/ci.yml?branch=main)](https://github.com/pinojs/pino-abstract-transport/actions) +[![Coverage Status](https://coveralls.io/repos/github/pinojs/pino-abstract-transport/badge.svg?branch=main)](https://coveralls.io/github/pinojs/pino-abstract-transport?branch=main) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) + +Write Pino transports easily. + +## Install + +```sh +npm i pino-abstract-transport +``` + +## Usage + +```js +import build from 'pino-abstract-transport' + +export default async function (opts) { + return build(async function (source) { + for await (let obj of source) { + console.log(obj) + } + }) +} +``` + +or in CommonJS and streams: + +```js +'use strict' + +const build = require('pino-abstract-transport') + +module.exports = function (opts) { + return build(function (source) { + source.on('data', function (obj) { + console.log(obj) + }) + }) +} +``` + +## Typescript usage + +Install the type definitions for node. Make sure the major version of the type definitions matches the node version you are using. + +#### Node 16 + +```sh +npm i -D @types/node@16 +``` + +## API + +### build(fn, opts) => Stream + +Create a [`split2`](http://npm.im/split2) instance and returns it. +This same instance is also passed to the given function, which is called +synchronously. + +If `opts.transform` is `true`, `pino-abstract-transform` will +wrap the split2 instance and the returned stream using [`duplexify`](https://www.npmjs.com/package/duplexify), +so they can be concatenated into multiple transports. + +#### Events emitted + +In addition to all events emitted by a [`Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable) +stream, it emits the following events: + +* `unknown` where an unparsable line is found, both the line and optional error is emitted. + +#### Options + +* `parse` an option to change to data format passed to build function. When this option is set to `lines`, + the data is passed as a string, otherwise the data is passed as an object. Default: `undefined`. + +* `close(err, cb)` a function that is called to shutdown the transport. It's called both on error and non-error shutdowns. + It can also return a promise. In this case discard the the `cb` argument. + +* `parseLine(line)` a function that is used to parse line received from `pino`. + +* `expectPinoConfig` a boolean that indicates if the transport expects Pino to add some of its configuration to the stream. Default: `false`. + +## Example + +### custom parseLine + +You can allow custom `parseLine` from users while providing a simple and safe default parseLine. + +```js +'use strict' + +const build = require('pino-abstract-transport') + +function defaultParseLine (line) { + const obj = JSON.parse(line) + // property foo will be added on each line + obj.foo = 'bar' + return obj +} + +module.exports = function (opts) { + const parseLine = typeof opts.parseLine === 'function' ? opts.parseLine : defaultParseLine + return build(function (source) { + source.on('data', function (obj) { + console.log(obj) + }) + }, { + parseLine: parseLine + }) +} +``` + +### Stream concatenation / pipeline + +You can pipeline multiple transports: + +```js +const build = require('pino-abstract-transport') +const { Transform, pipeline } = require('stream') + +function buildTransform () { + return build(function (source) { + return new Transform({ + objectMode: true, + autoDestroy: true, + transform (line, enc, cb) { + line.service = 'bob' + cb(null, JSON.stringify(line)) + } + }) + }, { enablePipelining: true }) +} + +function buildDestination () { + return build(function (source) { + source.on('data', function (obj) { + console.log(obj) + }) + }) +} + +pipeline(process.stdin, buildTransform(), buildDestination(), function (err) { + console.log('pipeline completed!', err) +}) +``` + +### Using pino config + +Setting `expectPinoConfig` to `true` will make the transport wait for pino to send its configuration before starting to process logs. It will add `levels`, `messageKey` and `errorKey` to the stream. + +When used with an incompatible version of pino, the stream will immediately error. + +```js +import build from 'pino-abstract-transport' + +export default function (opts) { + return build(async function (source) { + for await (const obj of source) { + console.log(`[${source.levels.labels[obj.level]}]: ${obj[source.messageKey]}`) + } + }, { + expectPinoConfig: true + }) +} +``` + +## License + +MIT diff --git a/node_modules/pino-abstract-transport/index.d.ts b/node_modules/pino-abstract-transport/index.d.ts new file mode 100644 index 0000000..1ac49a5 --- /dev/null +++ b/node_modules/pino-abstract-transport/index.d.ts @@ -0,0 +1,122 @@ +// Type definitions for pino-abstract-transport 0.4.0 +// Project: https://github.com/pinojs/pino-abstract-transport#readme +// Definitions by: Diyar Oktay + +/// + +import { Transform } from "stream"; + +type BuildOptions = { + /** + * `parseLine(line)` a function that is used to parse line received from pino. + * @default JSON.parse + */ + parseLine?: (line: string) => unknown; + + /** + * `parse` an option to change to data format passed to build function. + * @default undefined + * + */ + parse?: "lines"; + + /** + * `close(err, cb)` a function that is called to shutdown the transport. + * It's called both on error and non-error shutdowns. It can also return + * a promise. In this case discard the the cb argument. + * + * @example + * ```typescript + * { + * close: function (err, cb) { + * process.nextTick(cb, err) + * } + * } + * ``` + * */ + close?: (err: Error, cb: Function) => void | Promise; + + /** + * `metadata` If set to false, do not add metadata properties to the returned stream + */ + metadata?: false; + + /** + * `expectPinoConfig` If set to true, the transport will wait for pino to send its + * configuration before starting to process logs. + */ + expectPinoConfig?: boolean; +}; + +/** + * Pass these options to wrap the split2 stream and + * the returned stream into a Duplex + */ +type EnablePipelining = BuildOptions & { + enablePipelining: true; +}; + +/** + * Create a split2 instance and returns it. This same instance is also passed + * to the given function, which is called after pino has sent its configuration. + * + * @returns {Promise} the split2 instance + */ +declare function build( + fn: (transform: Transform & build.OnUnknown) => void | Promise, + opts: BuildOptions & { expectPinoConfig: true } +): Promise; + +/** + * Create a split2 instance and returns it. This same instance is also passed + * to the given function, which is called synchronously. + * + * @returns {Transform} the split2 instance + */ +declare function build( + fn: (transform: Transform & build.OnUnknown) => void | Promise, + opts?: BuildOptions +): Transform & build.OnUnknown; + +/** + * Creates a split2 instance and passes it to the given function, which is called + * after pino has sent its configuration. Then wraps the split2 instance and + * the returned stream into a Duplex, so they can be concatenated into multiple + * transports. + * + * @returns {Promise} the wrapped split2 instance + */ +declare function build( + fn: (transform: Transform & build.OnUnknown) => Transform & build.OnUnknown, + opts: EnablePipelining & { expectPinoConfig: true } +): Promise; + +/** + * Creates a split2 instance and passes it to the given function, which is called + * synchronously. Then wraps the split2 instance and the returned stream into a + * Duplex, so they can be concatenated into multiple transports. + * + * @returns {Transform} the wrapped split2 instance + */ +declare function build( + fn: (transform: Transform & build.OnUnknown) => Transform & build.OnUnknown, + opts: EnablePipelining +): Transform; + +declare namespace build { + export interface OnUnknown { + /** + * `unknown` is the event emitted where an unparsable line is found + * + * @param event 'unknown' + * @param line the unparsable line + * @param error the error that was thrown when parsing the line + */ + on( + event: "unknown", + listener: (line: string, error: unknown) => void + ): void; + } +} + +export = build; diff --git a/node_modules/pino-abstract-transport/index.js b/node_modules/pino-abstract-transport/index.js new file mode 100644 index 0000000..009d1f2 --- /dev/null +++ b/node_modules/pino-abstract-transport/index.js @@ -0,0 +1,128 @@ +'use strict' + +const metadata = Symbol.for('pino.metadata') +const split = require('split2') +const { Duplex } = require('stream') +const { parentPort, workerData } = require('worker_threads') + +function createDeferred () { + let resolve + let reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + promise.resolve = resolve + promise.reject = reject + return promise +} + +module.exports = function build (fn, opts = {}) { + const waitForConfig = opts.expectPinoConfig === true && workerData?.workerData?.pinoWillSendConfig === true + const parseLines = opts.parse === 'lines' + const parseLine = typeof opts.parseLine === 'function' ? opts.parseLine : JSON.parse + const close = opts.close || defaultClose + const stream = split(function (line) { + let value + + try { + value = parseLine(line) + } catch (error) { + this.emit('unknown', line, error) + return + } + + if (value === null) { + this.emit('unknown', line, 'Null value ignored') + return + } + + if (typeof value !== 'object') { + value = { + data: value, + time: Date.now() + } + } + + if (stream[metadata]) { + stream.lastTime = value.time + stream.lastLevel = value.level + stream.lastObj = value + } + + if (parseLines) { + return line + } + + return value + }, { autoDestroy: true }) + + stream._destroy = function (err, cb) { + const promise = close(err, cb) + if (promise && typeof promise.then === 'function') { + promise.then(cb, cb) + } + } + + if (opts.expectPinoConfig === true && workerData?.workerData?.pinoWillSendConfig !== true) { + setImmediate(() => { + stream.emit('error', new Error('This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.')) + }) + } + + if (opts.metadata !== false) { + stream[metadata] = true + stream.lastTime = 0 + stream.lastLevel = 0 + stream.lastObj = null + } + + if (waitForConfig) { + let pinoConfig = {} + const configReceived = createDeferred() + parentPort.on('message', function handleMessage (message) { + if (message.code === 'PINO_CONFIG') { + pinoConfig = message.config + configReceived.resolve() + parentPort.off('message', handleMessage) + } + }) + + Object.defineProperties(stream, { + levels: { + get () { return pinoConfig.levels } + }, + messageKey: { + get () { return pinoConfig.messageKey } + }, + errorKey: { + get () { return pinoConfig.errorKey } + } + }) + + return configReceived.then(finish) + } + + return finish() + + function finish () { + let res = fn(stream) + + if (res && typeof res.catch === 'function') { + res.catch((err) => { + stream.destroy(err) + }) + + // set it to null to not retain a reference to the promise + res = null + } else if (opts.enablePipelining && res) { + return Duplex.from({ writable: stream, readable: res }) + } + + return stream + } +} + +function defaultClose (err, cb) { + process.nextTick(cb, err) +} diff --git a/node_modules/pino-abstract-transport/package.json b/node_modules/pino-abstract-transport/package.json new file mode 100644 index 0000000..7e0e4fd --- /dev/null +++ b/node_modules/pino-abstract-transport/package.json @@ -0,0 +1,40 @@ +{ + "name": "pino-abstract-transport", + "version": "2.0.0", + "description": "Write Pino transports easily", + "main": "index.js", + "scripts": { + "prepare": "husky install", + "test": "standard | snazzy && tap test/*.test.js && tsd", + "test-ci": "standard | snazzy && tap test/*.test.js --coverage-report=lcovonly && tsd" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/pinojs/pino-abstract-transport.git" + }, + "keywords": [ + "pino", + "transport" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/pinojs/pino-abstract-transport/issues" + }, + "homepage": "https://github.com/pinojs/pino-abstract-transport#readme", + "dependencies": { + "split2": "^4.0.0" + }, + "devDependencies": { + "@types/node": "^20.1.0", + "husky": "^9.0.6", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "tap": "^16.0.0", + "thread-stream": "^2.6.0", + "tsd": "^0.31.0" + }, + "tsd": { + "directory": "./test/types" + } +} diff --git a/node_modules/pino-abstract-transport/test/base.test.js b/node_modules/pino-abstract-transport/test/base.test.js new file mode 100644 index 0000000..2f21241 --- /dev/null +++ b/node_modules/pino-abstract-transport/test/base.test.js @@ -0,0 +1,445 @@ +'use strict' + +const { once } = require('events') +const { Transform, pipeline } = require('stream') + +const { test } = require('tap') +const build = require('../') + +test('parse newlined delimited JSON', ({ same, plan }) => { + plan(2) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + same(expected.shift(), line) + }) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('parse newlined delimited JSON', ({ same, plan }) => { + plan(2) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + same(expected.shift(), line) + }) + }, { parse: 'json' }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('null support', ({ same, plan }) => { + plan(1) + const stream = build(function (source) { + source.on('unknown', function (line) { + same('null', line) + }) + }) + + stream.write('null\n') + stream.end() +}) + +test('broken json', ({ match, same, plan }) => { + plan(2) + const expected = '{ "truncated' + const stream = build(function (source) { + source.on('unknown', function (line, error) { + same(expected, line) + const regex = /^(Unexpected end of JSON input|Unterminated string in JSON at position 12)( \(line 1 column 13\))?$/ + match(error.message, regex) + }) + }) + + stream.write(expected + '\n') + stream.end() +}) + +test('pure values', ({ same, ok, plan }) => { + plan(3) + const stream = build(function (source) { + source.on('data', function (line) { + same(line.data, 42) + ok(line.time) + same(new Date(line.time).getTime(), line.time) + }) + }) + + stream.write('42\n') + stream.end() +}) + +test('support async iteration', ({ same, plan }) => { + plan(2) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(async function (source) { + for await (const line of source) { + same(expected.shift(), line) + } + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('rejecting errors the stream', async ({ same, plan }) => { + const stream = build(async function (source) { + throw new Error('kaboom') + }) + + const [err] = await once(stream, 'error') + same(err.message, 'kaboom') +}) + +test('emits an error if the transport expects pino to send the config, but pino is not going to', async function ({ plan, same }) { + plan(1) + const stream = build(() => {}, { expectPinoConfig: true }) + const [err] = await once(stream, 'error') + same(err.message, 'This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.') +}) + +test('set metadata', ({ same, plan, equal }) => { + plan(9) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected.shift() + same(this.lastLevel, obj.level) + same(this.lastTime, obj.time) + same(this.lastObj, obj) + same(obj, line) + }) + }, { metadata: true }) + + equal(stream[Symbol.for('pino.metadata')], true) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('parse lines', ({ same, plan, equal }) => { + plan(9) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected.shift() + same(this.lastLevel, obj.level) + same(this.lastTime, obj.time) + same(this.lastObj, obj) + same(JSON.stringify(obj), line) + }) + }, { metadata: true, parse: 'lines' }) + + equal(stream[Symbol.for('pino.metadata')], true) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('custom parse line function', ({ same, plan, equal }) => { + plan(11) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + let num = 0 + + function parseLine (str) { + const obj = JSON.parse(str) + same(expected[num], obj) + return obj + } + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected[num] + same(this.lastLevel, obj.level) + same(this.lastTime, obj.time) + same(this.lastObj, obj) + same(obj, line) + num++ + }) + }, { metadata: true, parseLine }) + + equal(stream[Symbol.for('pino.metadata')], true) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('set metadata (default)', ({ same, plan, equal }) => { + plan(9) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected.shift() + same(this.lastLevel, obj.level) + same(this.lastTime, obj.time) + same(this.lastObj, obj) + same(obj, line) + }) + }) + + equal(stream[Symbol.for('pino.metadata')], true) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('do not set metadata', ({ same, plan, equal }) => { + plan(9) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected.shift() + same(this.lastLevel, undefined) + same(this.lastTime, undefined) + same(this.lastObj, undefined) + same(obj, line) + }) + }, { metadata: false }) + + equal(stream[Symbol.for('pino.metadata')], undefined) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('close logic', ({ same, plan, pass }) => { + plan(3) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + same(expected.shift(), line) + }) + }, { + close (err, cb) { + pass('close called') + process.nextTick(cb, err) + } + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('close with promises', ({ same, plan, pass }) => { + plan(3) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + same(expected.shift(), line) + }) + }, { + async close () { + pass('close called') + } + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('support Transform streams', ({ same, plan, error }) => { + plan(7) + + const expected1 = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const expected2 = [] + + const stream1 = build(function (source) { + const transform = new Transform({ + objectMode: true, + autoDestroy: true, + transform (chunk, enc, cb) { + same(expected1.shift(), chunk) + chunk.service = 'from transform' + expected2.push(chunk) + cb(null, JSON.stringify(chunk) + '\n') + } + }) + + pipeline(source, transform, () => {}) + + return transform + }, { enablePipelining: true }) + + const stream2 = build(function (source) { + source.on('data', function (line) { + same(expected2.shift(), line) + }) + }) + + pipeline(stream1, stream2, function (err) { + error(err) + same(expected1, []) + same(expected2, []) + }) + + const lines = expected1.map(JSON.stringify).join('\n') + stream1.write(lines) + stream1.end() +}) diff --git a/node_modules/pino-abstract-transport/test/fixtures/transport-async-iteration.js b/node_modules/pino-abstract-transport/test/fixtures/transport-async-iteration.js new file mode 100644 index 0000000..ddcdaf3 --- /dev/null +++ b/node_modules/pino-abstract-transport/test/fixtures/transport-async-iteration.js @@ -0,0 +1,22 @@ +'use strict' + +const build = require('../..') + +module.exports = async function (threadStreamOpts) { + const { port, opts = {} } = threadStreamOpts + return build( + async function (source) { + for await (const obj of source) { + port.postMessage({ + data: obj, + pinoConfig: { + levels: source.levels, + messageKey: source.messageKey, + errorKey: source.errorKey + } + }) + } + }, + opts + ) +} diff --git a/node_modules/pino-abstract-transport/test/fixtures/transport-on-data.js b/node_modules/pino-abstract-transport/test/fixtures/transport-on-data.js new file mode 100644 index 0000000..58143fa --- /dev/null +++ b/node_modules/pino-abstract-transport/test/fixtures/transport-on-data.js @@ -0,0 +1,22 @@ +'use strict' + +const build = require('../..') + +module.exports = async function (threadStreamOpts) { + const { port, opts = {} } = threadStreamOpts + return build( + function (source) { + source.on('data', function (line) { + port.postMessage({ + data: line, + pinoConfig: { + levels: source.levels, + messageKey: source.messageKey, + errorKey: source.errorKey + } + }) + }) + }, + opts + ) +} diff --git a/node_modules/pino-abstract-transport/test/fixtures/transport-transform.js b/node_modules/pino-abstract-transport/test/fixtures/transport-transform.js new file mode 100644 index 0000000..66c84dc --- /dev/null +++ b/node_modules/pino-abstract-transport/test/fixtures/transport-transform.js @@ -0,0 +1,24 @@ +'use strict' + +const { Transform, pipeline } = require('stream') +const build = require('../..') + +module.exports = function (threadStreamOpts) { + const { opts = {} } = threadStreamOpts + return build(function (source) { + const transform = new Transform({ + objectMode: true, + autoDestroy: true, + transform (chunk, enc, cb) { + chunk.service = 'from transform' + chunk.level = `${source.levels.labels[chunk.level]}(${chunk.level})` + chunk[source.messageKey] = chunk[source.messageKey].toUpperCase() + cb(null, JSON.stringify(chunk) + '\n') + } + }) + + pipeline(source, transform, () => {}) + + return transform + }, { ...opts, enablePipelining: true }) +} diff --git a/node_modules/pino-abstract-transport/test/fixtures/worker-pipeline.js b/node_modules/pino-abstract-transport/test/fixtures/worker-pipeline.js new file mode 100644 index 0000000..38af252 --- /dev/null +++ b/node_modules/pino-abstract-transport/test/fixtures/worker-pipeline.js @@ -0,0 +1,15 @@ +'use strict' + +const { pipeline, PassThrough } = require('stream') + +module.exports = async function ({ targets }) { + const streams = await Promise.all(targets.map(async (t) => { + const fn = require(t.target) + const stream = await fn(t.options) + return stream + })) + + const stream = new PassThrough() + pipeline(stream, ...streams, () => {}) + return stream +} diff --git a/node_modules/pino-abstract-transport/test/types/index.test-d.ts b/node_modules/pino-abstract-transport/test/types/index.test-d.ts new file mode 100644 index 0000000..b5f6a85 --- /dev/null +++ b/node_modules/pino-abstract-transport/test/types/index.test-d.ts @@ -0,0 +1,31 @@ +import build, { OnUnknown } from "../../index"; +import { expectType } from "tsd"; +import { Transform } from "stream"; + +/** + * If enablePipelining is set to true, the function passed as an argument + * must return a transform. The unknown event should be listened to on the + * stream passed in the first argument. + */ +expectType(build((source) => source, { enablePipelining: true })); + +/** + * If expectPinoConfig is set with enablePipelining, build returns a promise + */ +expectType<(Promise)>(build((source) => source, { enablePipelining: true, expectPinoConfig: true })); + +/** + * If enablePipelining is not set the unknown event can be listened to on + * the returned stream. + */ +expectType(build((source) => {})); + +/** + * If expectPinoConfig is set, build returns a promise + */ +expectType<(Promise)>(build((source) => {}, { expectPinoConfig: true })); + +/** + * build also accepts an async function + */ +expectType(build(async (source) => {})); diff --git a/node_modules/pino-abstract-transport/test/worker.test.js b/node_modules/pino-abstract-transport/test/worker.test.js new file mode 100644 index 0000000..5a9fa64 --- /dev/null +++ b/node_modules/pino-abstract-transport/test/worker.test.js @@ -0,0 +1,357 @@ +'use strict' + +const { once } = require('events') +const { join } = require('path') +const ThreadStream = require('thread-stream') +const { MessageChannel } = require('worker_threads') +const { test } = require('tap') + +workerTest('transport-on-data.js') +workerTest('transport-async-iteration.js', ' when using async iteration') + +function workerTest (filename, description = '') { + test(`does not wait for pino to send config by default${description}`, function ({ same, plan }) { + plan(4) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { port: port1 }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const emptyPinoConfig = { + levels: undefined, + messageKey: undefined, + errorKey: undefined + } + + port2.on('message', function (message) { + same(expected.shift(), message.data) + same(emptyPinoConfig, message.pinoConfig) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + }) + + test(`does not wait for pino to send config if transport is not expecting it${description}`, function ({ same, plan }) { + plan(4) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { + port: port1, + pinoWillSendConfig: true + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const emptyPinoConfig = { + levels: undefined, + messageKey: undefined, + errorKey: undefined + } + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + + port2.on('message', function (message) { + same(expected.shift(), message.data) + same(emptyPinoConfig, message.pinoConfig) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + }) + + test(`waits for the pino config when pino intends to send it and the transport requests it${description}`, function ({ same, plan }) { + plan(4) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { + port: port1, + pinoWillSendConfig: true, + opts: { + expectPinoConfig: true + } + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + port2.on('message', function (message) { + same(expected.shift(), message.data) + same(pinoConfig, message.pinoConfig) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + stream.write(lines) + stream.end() + }) + + test(`continues to listen if it receives a message that is not PINO_CONFIG${description}`, function ({ same, plan }) { + plan(4) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', 'transport-on-data.js'), + workerData: { + port: port1, + pinoWillSendConfig: true, + opts: { + expectPinoConfig: true + } + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + port2.on('message', function (message) { + same(expected.shift(), message.data) + same(pinoConfig, message.pinoConfig) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.emit('message', 'not a PINO_CONFIG') + stream.emit('message', { code: 'NOT_PINO_CONFIG', config: { levels: 'foo', messageKey: 'bar', errorKey: 'baz' } }) + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + stream.write(lines) + stream.end() + }) + + test(`waits for the pino config even if it is sent after write${description}`, function ({ same, plan }) { + plan(4) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { + port: port1, + pinoWillSendConfig: true, + opts: { + expectPinoConfig: true + } + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + port2.on('message', function (message) { + same(expected.shift(), message.data) + same(pinoConfig, message.pinoConfig) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + stream.end() + }) + + test(`emits an error if the transport expects pino to send the config, but pino is not going to${description}`, async function ({ plan, same, ok }) { + plan(2) + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { + opts: { + expectPinoConfig: true + } + } + }) + const [err] = await once(stream, 'error') + same(err.message, 'This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.') + ok(stream.destroyed) + }) +} + +test('waits for the pino config when pipelining', function ({ same, plan }) { + plan(2) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', 'worker-pipeline.js'), + workerData: { + pinoWillSendConfig: true, + targets: [{ + target: './transport-transform.js', + options: { + opts: { expectPinoConfig: true } + } + }, { + target: './transport-on-data.js', + options: { + port: port1 + } + }] + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 'info(30)', + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'HELLO WORLD', + service: 'from transform' + }, { + level: 'info(30)', + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'ANOTHER MESSAGE', + prop: 42, + service: 'from transform' + }] + + const lines = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }].map(JSON.stringify).join('\n') + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + port2.on('message', function (message) { + same(expected.shift(), message.data) + }) + + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + stream.write(lines) + stream.end() +}) diff --git a/node_modules/pino-pretty/.borp.yaml b/node_modules/pino-pretty/.borp.yaml new file mode 100644 index 0000000..51d63e0 --- /dev/null +++ b/node_modules/pino-pretty/.borp.yaml @@ -0,0 +1,6 @@ +reporters: + - '@jsumners/line-reporter' + +files: + - 'lib/**/*.test.js' + - 'test/**/*.test.js' \ No newline at end of file diff --git a/node_modules/pino-pretty/.editorconfig b/node_modules/pino-pretty/.editorconfig new file mode 100644 index 0000000..3fec5c5 --- /dev/null +++ b/node_modules/pino-pretty/.editorconfig @@ -0,0 +1,13 @@ + +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# [*.md] +# trim_trailing_whitespace = false diff --git a/node_modules/pino-pretty/.eslintrc b/node_modules/pino-pretty/.eslintrc new file mode 100644 index 0000000..f8c9986 --- /dev/null +++ b/node_modules/pino-pretty/.eslintrc @@ -0,0 +1,8 @@ +{ + "extends": [ + "standard" + ], + "rules": { + "no-var": "off" + } +} diff --git a/node_modules/pino-pretty/.github/dependabot.yml b/node_modules/pino-pretty/.github/dependabot.yml new file mode 100644 index 0000000..35d66ca --- /dev/null +++ b/node_modules/pino-pretty/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 diff --git a/node_modules/pino-pretty/.github/workflows/ci.yml b/node_modules/pino-pretty/.github/workflows/ci.yml new file mode 100644 index 0000000..0d5c00a --- /dev/null +++ b/node_modules/pino-pretty/.github/workflows/ci.yml @@ -0,0 +1,83 @@ +name: CI + +on: + push: + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +# This allows a subsequently queued workflow run to interrupt previous runs +concurrency: + group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" + cancel-in-progress: true + +jobs: + dependency-review: + name: Dependency Review + if: github.event_name == 'pull_request' + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Dependency review + uses: actions/dependency-review-action@v4 + + test: + name: Test + runs-on: ${{ matrix.os }} + permissions: + contents: read + strategy: + matrix: + node-version: [20, 22, 24] + os: [ubuntu-latest] + pino-version: [^9.0.0] + steps: + - name: Check out repo + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v6 + with: + node-version: ${{ matrix.node-version }} + + - name: Restore cached dependencies + uses: actions/cache@v4 + with: + path: node_modules + key: node-modules-${{ hashFiles('package.json') }} + + - name: Install dependencies + run: npm i --ignore-scripts + + - name: Install pino ${{ matrix.pino-version }} + run: npm i --no-save pino@${{ matrix.pino-version }} + + - name: Run tests + run: npm run ci + + automerge: + name: Automerge Dependabot PRs + if: > + github.event_name == 'pull_request' && + github.event.pull_request.user.login == 'dependabot[bot]' + needs: test + permissions: + pull-requests: write + contents: write + runs-on: ubuntu-latest + steps: + - uses: fastify/github-action-merge-dependabot@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/pino-pretty/.taprc.yaml b/node_modules/pino-pretty/.taprc.yaml new file mode 100644 index 0000000..07652a1 --- /dev/null +++ b/node_modules/pino-pretty/.taprc.yaml @@ -0,0 +1,8 @@ +coverage: true +coverage-map: 'coverage-map.js' + +reporter: terse + +files: + - 'lib/**/*.test.js' + - 'test/**/*.test.js' diff --git a/node_modules/pino-pretty/LICENSE b/node_modules/pino-pretty/LICENSE new file mode 100644 index 0000000..54fd861 --- /dev/null +++ b/node_modules/pino-pretty/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 the Pino team listed at https://github.com/pinojs/pino#the-team + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pino-pretty/Readme.md b/node_modules/pino-pretty/Readme.md new file mode 100644 index 0000000..4a55b0d --- /dev/null +++ b/node_modules/pino-pretty/Readme.md @@ -0,0 +1,423 @@ + +# pino-pretty + +[![NPM Package Version](https://img.shields.io/npm/v/pino-pretty)](https://www.npmjs.com/package/pino-pretty) +[![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino-pretty/ci.yml?branch=master)](https://github.com/pinojs/pino-pretty/actions?query=workflow%3ACI) +[![Coverage Status](https://img.shields.io/coveralls/github/pinojs/pino-pretty)](https://coveralls.io/github/pinojs/pino-pretty?branch=master) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) + +This module provides a basic [ndjson](https://github.com/ndjson/ndjson-spec) formatter to be used in __development__. If an +incoming line looks like it could be a log line from an ndjson logger, in +particular the [Pino](https://getpino.io/) logging library, then it will apply +extra formatting by considering things like the log level and timestamp. + +A standard Pino log line like: + +``` +{"level":30,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo","v":1} +``` + +Will format to: + +``` +[17:35:28.992] INFO (42): hello world +``` + +If you landed on this page due to the deprecation of the `prettyPrint` option +of `pino`, read the [Programmatic Integration](#integration) section. + + +## Example + +Using the [example script][exscript] from the Pino module, we can see what the +prettified logs will look like: + +![demo](demo.png) + +[exscript]: https://github.com/pinojs/pino/blob/25ba61f40ea5a1a753c85002812426d765da52a4/examples/basic.js + + +## Install + +```sh +npm install -g pino-pretty +``` + + +## Usage + +It is recommended to use `pino-pretty` with `pino` +by piping output to the CLI tool: + +```sh +node app.js | pino-pretty +``` + + +### CLI Arguments + +- `--colorize` (`-c`): Adds terminal color escape sequences to the output. +- `--no-colorizeObjects`: Suppress colorization of objects. +- `--crlf` (`-f`): Appends carriage return and line feed, instead of just a line + feed, to the formatted log line. +- `--errorProps` (`-e`): When formatting an error object, display this list + of properties. The list should be a comma-separated list of properties Default: `''`. + Do not use this option if logging from pino@7. Support will be removed from future versions. +- `--levelFirst` (`-l`): Display the log level name before the logged date and time. +- `--errorLikeObjectKeys` (`-k`): Define the log keys that are associated with + error like objects. Default: `err,error`. +- `--messageKey` (`-m`): Define the key that contains the main log message. + Default: `msg`. +- `--levelKey` (`--levelKey`): Define the key that contains the level of the log. Nested keys are supported with each property delimited by a dot character (`.`). + Keys may be escaped to target property names that contains the delimiter itself: + (`--levelKey tags\\.level`). + Default: `level`. +- `--levelLabel` (`-b`): Output the log level using the specified label. + Default: `levelLabel`. +- `--minimumLevel` (`-L`): Hide messages below the specified log level. Accepts a number, `trace`, `debug`, `info`, `warn`, `error`, or `fatal`. If any more filtering is required, consider using [`jq`](https://stedolan.github.io/jq/). +- `--customLevels` (`-x`): Override default levels with custom levels, e.g. `-x err:99,info:1` +- `--customColors` (`-X`): Override default colors with custom colors, e.g. `-X err:red,info:blue` +- `--useOnlyCustomProps` (`-U`): Only use custom levels and colors (if provided) (default: true); else fallback to default levels and colors, e.g. `-U false` +- `--messageFormat` (`-o`): Format output of message, e.g. `{levelLabel} - {pid} - url:{req.url}` will output message: `INFO - 1123 - url:localhost:3000/test` + Default: `false` +- `--timestampKey` (`-a`): Define the key that contains the log timestamp. + Default: `time`. +- `--translateTime` (`-t`): Translate the epoch time value into a human-readable + date and time string. This flag also can set the format string to apply when + translating the date to a human-readable format. For a list of available pattern + letters, see the [`dateformat` documentation](https://www.npmjs.com/package/dateformat). + - The default format is `HH:MM:ss.l` in the local timezone. + - Require a `UTC:` prefix to translate time to UTC, e.g. `UTC:yyyy-mm-dd HH:MM:ss.l o`. + - Require a `SYS:` prefix to translate time to the local system's time zone. A + shortcut `SYS:standard` to translate time to `yyyy-mm-dd HH:MM:ss.l o` in + system time zone. +- `--ignore` (`-i`): Ignore one or several keys, nested keys are supported with each property delimited by a dot character (`.`), + keys may be escaped to target property names that contains the delimiter itself: + (`-i time,hostname,req.headers,log\\.domain\\.corp/foo`). + The `--ignore` option would be ignored, if both `--ignore` and `--include` are passed. + Default: `hostname`. +- `--include` (`-I`): The opposite of `--ignore`. Include one or several keys. +- `--hideObject` (`-H`): Hide objects from output (but not error object) +- `--singleLine` (`-S`): Print each log message on a single line (errors will still be multi-line) +- `--config`: Specify a path to a config file containing the pino-pretty options. pino-pretty will attempt to read from a `.pino-prettyrc` in your current directory (`process.cwd`) if not specified + + +## Programmatic Integration + +We recommend against using `pino-pretty` in production and highly +recommend installing `pino-pretty` as a development dependency. + +```bash +npm install --save-dev pino-pretty +``` + +Install `pino-pretty` alongside `pino` and set the transport target to `'pino-pretty'`: + +```js +const pino = require('pino') +const logger = pino({ + transport: { + target: 'pino-pretty' + }, +}) + +logger.info('hi') +``` + +The transport option can also have an options object containing `pino-pretty` options: + +```js +const pino = require('pino') +const logger = pino({ + transport: { + target: 'pino-pretty', + options: { + colorize: true + } + } +}) + +logger.info('hi') +``` + +Use it as a stream: + +```js +const pino = require('pino') +const pretty = require('pino-pretty') +const logger = pino(pretty()) + +logger.info('hi') +``` + +Options are also supported: + +```js +const pino = require('pino') +const pretty = require('pino-pretty') +const stream = pretty({ + colorize: true +}) +const logger = pino(stream) + +logger.info('hi') +``` + +See the [Options](#options) section for all possible options. + +The following configuration ensures that `pino-pretty` is activated only in development mode. + +```js +const pino = require('pino') + +// Define the transport configuration only when the output stream is connected to a TTY +const transport = + process.stdout.isTTY + ? { transport: { target: 'pino-pretty' } } + : {}; + +const logger = pino({ + ...transport +}) + +logger.info('hi') +``` + +### Usage as a stream + +If you are using `pino-pretty` as a stream and you need to provide options to `pino`, +pass the options as the first argument and `pino-pretty` as second argument: + +```js +const pino = require('pino') +const pretty = require('pino-pretty') +const stream = pretty({ + colorize: true +}) +const logger = pino({ level: 'info' }, stream) + +// Nothing is printed +logger.debug('hi') +``` + +### Usage with Jest + +Logging with Jest is _problematic_, as the test framework requires no asynchronous operation to +continue after the test has finished. The following is the only supported way to use this module +with Jest: + +```js +import pino from 'pino' +import pretty from 'pino-pretty' + +test('test pino-pretty', () => { + const logger = pino(pretty({ sync: true })); + logger.info('Info'); + logger.error('Error'); +}); +``` + +### Handling non-serializable options + +Using the new [pino v7+ +transports](https://getpino.io/#/docs/transports?id=v7-transports) not all +options are serializable, for example if you want to use `messageFormat` as a +function you will need to wrap `pino-pretty` in a custom module. + +Executing `main.js` below will log a colorized `hello world` message using a +custom function `messageFormat`: + +```js +// main.js +const pino = require('pino') + +const logger = pino({ + transport: { + target: './pino-pretty-transport', + options: { + colorize: true + } + }, +}) + +logger.info('world') +``` + +```js +// pino-pretty-transport.js +module.exports = opts => require('pino-pretty')({ + ...opts, + messageFormat: (log, messageKey) => `hello ${log[messageKey]}` +}) +``` + +### Checking color support in TTY + +This boolean returns whether the currently used TTY supports colorizing the logs. + +```js +import pretty from 'pino-pretty' + +if (pretty.isColorSupported) { + ... +} + +``` + + +### Options + +The options accepted have keys corresponding to the options described in [CLI Arguments](#cliargs): + +```js +{ + colorize: colorette.isColorSupported, // --colorize + colorizeObjects: true, //--colorizeObjects + crlf: false, // --crlf + errorLikeObjectKeys: ['err', 'error'], // --errorLikeObjectKeys (not required to match custom errorKey with pino >=8.21.0) + errorProps: '', // --errorProps + levelFirst: false, // --levelFirst + messageKey: 'msg', // --messageKey (not required with pino >=8.21.0) + levelKey: 'level', // --levelKey + messageFormat: false, // --messageFormat + timestampKey: 'time', // --timestampKey + translateTime: false, // --translateTime + ignore: 'pid,hostname', // --ignore + include: 'level,time', // --include + hideObject: false, // --hideObject + singleLine: false, // --singleLine + customColors: 'err:red,info:blue', // --customColors + customLevels: 'err:99,info:1', // --customLevels (not required with pino >=8.21.0) + levelLabel: 'levelLabel', // --levelLabel + minimumLevel: 'info', // --minimumLevel + useOnlyCustomProps: true, // --useOnlyCustomProps + // The file or file descriptor (1 is stdout) to write to + destination: 1, + + // Alternatively, pass a `sonic-boom` instance (allowing more flexibility): + // destination: new SonicBoom({ dest: 'a/file', mkdir: true }) + + // You can also configure some SonicBoom options directly + sync: false, // by default we write asynchronously + append: true, // the file is opened with the 'a' flag + mkdir: true, // create the target destination + + + customPrettifiers: {} +} +``` + +The `colorize` default follows +[`colorette.isColorSupported`](https://github.com/jorgebucaran/colorette#iscolorsupported). + +The defaults for `sync`, `append`, `mkdir` inherit from +[`SonicBoom(opts)`](https://github.com/pinojs/sonic-boom#API). + +`customPrettifiers` option provides the ability to add a custom prettify function +for specific log properties. `customPrettifiers` is an object, where keys are +log properties that will be prettified and value is the prettify function itself. +For example, if a log line contains a `query` property, +you can specify a prettifier for it: + +```js +{ + customPrettifiers: { + query: prettifyQuery + } +} +//... +const prettifyQuery = value => { + // do some prettify magic +} +``` + +All prettifiers use this function signature: + +```js +['logObjKey']: (output, keyName, logObj, extras) => string +``` + +* `logObjKey` - name of the key of the property in the log object that should have this function applied to it +* `output` - the value of the property in the log object +* `keyName` - the name of the property (useful for `level` and `message` when `levelKey` or `messageKey` is used) +* `logObj` - the full log object, for context +* `extras` - an object containing **additional** data/functions created in the context of this pino-pretty logger or specific to the key (see `level` prettifying below) + * All `extras` objects contain `colors` which is a [Colorette](https://github.com/jorgebucaran/colorette?tab=readme-ov-file#supported-colors) object containing color functions. Colors are enabled based on `colorize` provided to pino-pretty or `colorette.isColorSupported` if `colorize` was not provided. + +Additionally, `customPrettifiers` can be used to format the `time`, `hostname`, +`pid`, `name`, `caller` and `level` outputs AS WELL AS any arbitrary key-value that exists on a given log object. + +An example usage of `customPrettifiers` using all parameters from the function signature: + +```js +{ + customPrettifiers: { + // The argument for this function will be the same + // string that's at the start of the log-line by default: + time: timestamp => `🕰 ${timestamp}`, + + // The argument for the level-prettifier may vary depending + // on if the levelKey option is used or not. + // By default this will be the same numerics as the Pino default: + level: logLevel => `LEVEL: ${logLevel}`, + // level provides additional data in `extras`: + // * label => derived level label string + // * labelColorized => derived level label string with colorette colors applied based on customColors and whether colors are supported + level: (logLevel, key, log, { label, labelColorized, colors }) => `LEVEL: ${logLevel} LABEL: ${levelLabel} COLORIZED LABEL: ${labelColorized}`, + + // other prettifiers can be used for the other keys if needed, for example + hostname: hostname => `MY HOST: ${hostname}`, + pid: pid => pid, + name: (name, key, log, { colors }) => `${colors.blue(name)}`, + caller: (caller, key, log, { colors }) => `${colors.greenBright(caller)}`, + myCustomLogProp: (value, key, log, { colors }) => `My Prop -> ${colors.bold(value)} <--` + } +} +``` + +`messageFormat` option allows you to customize the message output. +A template `string` like this can define the format: + +```js +{ + messageFormat: '{levelLabel} - {pid} - url:{req.url}' +} +``` + +In addition to this, if / end statement blocks can also be specified. +Else statements and nested conditions are not supported. + +```js +{ + messageFormat: '{levelLabel} - {if pid}{pid} - {end}url:{req.url}' +} +``` + +This option can also be defined as a `function` with this function signature: + +```js +{ + messageFormat: (log, messageKey, levelLabel, { colors }) => { + // do some log message customization + // + // `colors` is a Colorette object with colors enabled based on `colorize` option + return `This is a ${colors.red('colorized')}, custom message: ${log[messageKey]}`; + } +} +``` + +## Limitations + +Because `pino-pretty` uses stdout redirection, in some cases the command may +terminate with an error due to shell limitations. + +For example, currently, mingw64 based shells (e.g. Bash as supplied by [git for +Windows](https://gitforwindows.org)) are affected and terminate the process with +a `stdout is not a tty` error message. + +Any PRs are welcomed! + + +## License + +MIT License diff --git a/node_modules/pino-pretty/benchmark.js b/node_modules/pino-pretty/benchmark.js new file mode 100644 index 0000000..aaf7412 --- /dev/null +++ b/node_modules/pino-pretty/benchmark.js @@ -0,0 +1,105 @@ +'use strict' + +// We do not expect amazing numbers from `pino-pretty` as the whole purpose +// of the module is a very slow operation. However, this benchmark should give +// us some guidance on how features, or code changes, will affect the +// performance of the module. + +const bench = require('fastbench') +const { + prettyFactory +} = require('./index') + +const max = 10 +const tstampMillis = 1693401358754 + +/* eslint-disable no-var */ +const run = bench([ + function basicLog (cb) { + const pretty = prettyFactory({}) + const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"}}\n` + for (var i = 0; i < max; i += 1) { + pretty(input) + } + setImmediate(cb) + }, + + function objectLog (cb) { + const pretty = prettyFactory({}) + const input = { + time: tstampMillis, + pid: 1, + hostname: 'foo', + msg: 'benchmark', + foo: 'foo', + bar: { bar: 'bar' } + } + for (var i = 0; i < max; i += 1) { + pretty(input) + } + setImmediate(cb) + }, + + function coloredLog (cb) { + const pretty = prettyFactory({ colorize: true }) + const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"}}\n` + for (var i = 0; i < max; i += 1) { + pretty(input) + } + setImmediate(cb) + }, + + function customPrettifiers (cb) { + const pretty = prettyFactory({ + customPrettifiers: { + time (tstamp) { + return tstamp + }, + pid () { + return '' + } + } + }) + const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"}}\n` + for (var i = 0; i < max; i += 1) { + pretty(input) + } + setImmediate(cb) + }, + + function logWithErrorObject (cb) { + const pretty = prettyFactory({}) + const err = Error('boom') + const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"},"err":{"message":"${err.message}","stack":"${err.stack}"}}\n` + for (var i = 0; i < max; i += 1) { + pretty(input) + } + setImmediate(cb) + }, + + function logRemappedMsgErrKeys (cb) { + const pretty = prettyFactory({ + messageKey: 'message', + errorLikeObjectKeys: ['myError'] + }) + const err = Error('boom') + const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","message":"benchmark","foo":"foo","bar":{"bar":"bar"},"myError":{"message":"${err.message}","stack":"${err.stack}"}}\n` + for (var i = 0; i < max; i += 1) { + pretty(input) + } + setImmediate(cb) + }, + + function messageFormatString (cb) { + const pretty = prettyFactory({ + messageFormat: '{levelLabel}{if pid} {pid} - {end}{msg}' + }) + const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"}}\n` + for (var i = 0; i < max; i += 1) { + pretty(input) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino-pretty/bin.js b/node_modules/pino-pretty/bin.js new file mode 100644 index 0000000..4a7a5f7 --- /dev/null +++ b/node_modules/pino-pretty/bin.js @@ -0,0 +1,113 @@ +#!/usr/bin/env node + +'use strict' + +const fs = require('node:fs') +const path = require('node:path') +const help = require('help-me')({ + dir: path.join(__dirname, 'help'), + ext: '.txt' +}) +const pump = require('pump') +const sjp = require('secure-json-parse') +const JoyCon = require('joycon') +const { default: stripJsonComments } = require('strip-json-comments') + +const build = require('./') +const CONSTANTS = require('./lib/constants') +const { isObject } = require('./lib/utils') +const minimist = require('minimist') + +const parseJSON = input => { + return sjp.parse(stripJsonComments(input), { protoAction: 'remove' }) +} + +const joycon = new JoyCon({ + parseJSON, + files: [ + 'pino-pretty.config.cjs', + 'pino-pretty.config.js', + '.pino-prettyrc', + '.pino-prettyrc.json' + ], + stopDir: path.dirname(process.cwd()) +}) + +const cmd = minimist(process.argv.slice(2)) + +if (cmd.h || cmd.help) { + help.toStdout() +} else { + const DEFAULT_VALUE = '\0default' + + let opts = minimist(process.argv, { + alias: { + colorize: 'c', + crlf: 'f', + errorProps: 'e', + levelFirst: 'l', + minimumLevel: 'L', + customLevels: 'x', + customColors: 'X', + useOnlyCustomProps: 'U', + errorLikeObjectKeys: 'k', + messageKey: 'm', + levelKey: CONSTANTS.LEVEL_KEY, + levelLabel: 'b', + messageFormat: 'o', + timestampKey: 'a', + translateTime: 't', + ignore: 'i', + include: 'I', + hideObject: 'H', + singleLine: 'S' + }, + default: { + messageKey: DEFAULT_VALUE, + minimumLevel: DEFAULT_VALUE, + levelKey: DEFAULT_VALUE, + timestampKey: DEFAULT_VALUE + } + }) + + // Remove default values + opts = filter(opts, value => value !== DEFAULT_VALUE) + const config = loadConfig(opts.config) + // Override config with cli options + opts = Object.assign({}, config, opts) + // set defaults + opts.errorLikeObjectKeys = opts.errorLikeObjectKeys || 'err,error' + opts.errorProps = opts.errorProps || '' + + const res = build(opts) + pump(process.stdin, res) + + // https://github.com/pinojs/pino/pull/358 + /* istanbul ignore next */ + if (!process.stdin.isTTY && !fs.fstatSync(process.stdin.fd).isFile()) { + process.once('SIGINT', function noOp () {}) + } + + function loadConfig (configPath) { + const files = configPath ? [path.resolve(configPath)] : undefined + const result = joycon.loadSync(files) + if (result.path && !isObject(result.data)) { + configPath = configPath || path.basename(result.path) + throw new Error(`Invalid runtime configuration file: ${configPath}`) + } + if (configPath && !result.data) { + throw new Error(`Failed to load runtime configuration file: ${configPath}`) + } + return result.data + } + + function filter (obj, cb) { + return Object.keys(obj).reduce((acc, key) => { + const value = obj[key] + if (cb(value, key)) { + acc[key] = value + } + return acc + }, {}) + } +} diff --git a/node_modules/pino-pretty/coverage-map.js b/node_modules/pino-pretty/coverage-map.js new file mode 100644 index 0000000..ca58935 --- /dev/null +++ b/node_modules/pino-pretty/coverage-map.js @@ -0,0 +1,9 @@ +'use strict' + +module.exports = testFile => { + // Ignore coverage on files that do not have a direct corollary. + if (testFile.startsWith('test/')) return false + + // Indicate the matching name, sans '.test.js', should be checked for coverage. + return testFile.replace(/\.test\.js$/, '.js') +} diff --git a/node_modules/pino-pretty/demo.png b/node_modules/pino-pretty/demo.png new file mode 100644 index 0000000..2575c91 Binary files /dev/null and b/node_modules/pino-pretty/demo.png differ diff --git a/node_modules/pino-pretty/docs/help.md b/node_modules/pino-pretty/docs/help.md new file mode 100644 index 0000000..ad7231f --- /dev/null +++ b/node_modules/pino-pretty/docs/help.md @@ -0,0 +1,24 @@ + +## Systemd example + +If you run your Node.js process via [Systemd](https://www.freedesktop.org/wiki/Software/systemd/) and you examine your logs with [journalctl](https://www.freedesktop.org/software/systemd/man/journalctl.html) some data will be duplicated. You can use a combination of `journalctl` options and `pino-pretty` options to shape the output. + +For example viewing the prettified logs of a process named `monitor` with `journalctl -u monitor -f | pino-pretty`, might output something like this: + +``` +Apr 24 07:40:01 nanopi node[6080]: {"level":30,"time":1587706801902,"pid":6080,"hostname":"nanopi","msg":"TT +21","v":1} +``` +As you can see, the timestamp, hostname, and pid are duplicated. +If you just want the bare prettified Pino logs you can strip out the duplicate items from the `journalctl` output with the `-o cat` option of `journalctl`: +``` +journalctl -u monitor -f -o cat | pino-pretty +``` +the output now looks something like this: +``` +[1587706801902] INFO (6080 on nanopi): TT 21 +``` +Make the output even more human readable by using the pino-pretty options `-t` to format the timestamp and `-i pid, hostname` to filter out hostname and pid: +``` +[2020-04-24 05:42:24.836 +0000] INFO : TT 21 +``` diff --git a/node_modules/pino-pretty/eslint.config.js b/node_modules/pino-pretty/eslint.config.js new file mode 100644 index 0000000..5212dc9 --- /dev/null +++ b/node_modules/pino-pretty/eslint.config.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('neostandard')({}) diff --git a/node_modules/pino-pretty/help/help.txt b/node_modules/pino-pretty/help/help.txt new file mode 100644 index 0000000..54d6ebb --- /dev/null +++ b/node_modules/pino-pretty/help/help.txt @@ -0,0 +1,68 @@ + Usage: pino-pretty [options] [command] + + Commands: + help Display help + version Display version + + Options: + -c, --colorize Force adding color sequences to the output + -C, --config specify a path to a json file containing the pino-pretty options + -f, --crlf Append CRLF instead of LF to formatted lines + -X, --customColors Override default colors using names from https://www.npmjs.com/package/colorette (`-X err:red,info:blue`) + -x, --customLevels Override default levels (`-x err:99,info:1`) + -k, --errorLikeObjectKeys Define which keys contain error objects (`-k err,error`) (defaults to `err,error`) + -e, --errorProps Comma separated list of properties on error objects to show (`*` for all properties) (defaults to ``) + -h, --help Output usage information + -H, --hideObject Hide objects from output (but not error object) + -i, --ignore Ignore one or several keys: (`-i time,hostname`) + -I, --include The opposite of `--ignore`, only include one or several keys: (`-I level,time`) + -l, --levelFirst Display the log level as the first output field + -L, --levelKey [value] Detect the log level under the specified key (defaults to "level") + -b, --levelLabel [value] Output the log level using the specified label (defaults to "levelLabel") + -o, --messageFormat Format output of message + -m, --messageKey [value] Highlight the message under the specified key (defaults to "msg") + -L, --minimumLevel Hide messages below the specified log level + -S, --singleLine Print all non-error objects on a single line + -a, --timestampKey [value] Display the timestamp from the specified key (defaults to "time") + -t, --translateTime Display epoch timestamps as UTC ISO format or according to an optional format string (default ISO 8601) + -U, --useOnlyCustomProps Only use custom levels and colors (if provided); don't fallback to default levels and colors (-U false) + -v, --version Output the version number + + Examples: + - To prettify logs, simply pipe a log file through + $ cat log | pino-pretty + + - To highlight a string at a key other than 'msg' + $ cat log | pino-pretty -m fooMessage + + - To detect the log level at a key other than 'level' + $ cat log | pino-pretty --levelKey fooLevel + + - To output the log level label using a key other than 'levelLabel' + $ cat log | pino-pretty --levelLabel LVL -o "{LVL}" + + - To display timestamp from a key other than 'time' + $ cat log | pino-pretty -a fooTimestamp + + - To convert Epoch timestamps to ISO timestamps use the -t option + $ cat log | pino-pretty -t + + - To convert Epoch timestamps to local timezone format use the -t option with "SYS:" prefixed format string + $ cat log | pino-pretty -t "SYS:yyyy-mm-dd HH:MM:ss" + + - To flip level and time/date in standard output use the -l option + $ cat log | pino-pretty -l + + - Only prints messages with a minimum log level of info + $ cat log | pino-pretty -L info + + - Prettify logs but don't print pid and hostname + $ cat log | pino-pretty -i pid,hostname + + - Prettify logs but only print time and level + $ cat log | pino-pretty -I time,level + + - Loads options from a config file + $ cat log | pino-pretty --config=/path/to/config.json + + diff --git a/node_modules/pino-pretty/index.d.ts b/node_modules/pino-pretty/index.d.ts new file mode 100644 index 0000000..d4aae86 --- /dev/null +++ b/node_modules/pino-pretty/index.d.ts @@ -0,0 +1,234 @@ +// Type definitions for pino-pretty 7.0 +// Project: https://github.com/pinojs/pino-pretty#readme +// Definitions by: Adam Vigneaux +// tearwyx +// Minimum TypeScript Version: 3.0 + +/// + +import { Transform } from 'node:stream'; +import { OnUnknown } from 'pino-abstract-transport'; +// @ts-ignore fall back to any if pino is not available, i.e. when running pino tests +import { DestinationStream, Level } from 'pino'; +import * as Colorette from "colorette"; + +type LogDescriptor = Record; + +declare function PinoPretty(options?: PinoPretty.PrettyOptions): PinoPretty.PrettyStream; +declare namespace PinoPretty { + + function colorizerFactory( + useColors?: boolean, + customColors?: [number, string][], + useOnlyCustomProps?: boolean, + ): { + ( + level?: number | string, + opts?: { + customLevels?: { [level: number]: string }; + customLevelNames?: { [name: string]: number }; + }, + ): string, + message: (input: string | number) => string, + greyMessage: (input: string | number) => string, + } + + function prettyFactory(options: PrettyOptions): (inputData: any) => string + + interface PrettyOptions { + /** + * Hide objects from output (but not error object). + * @default false + */ + hideObject?: boolean; + /** + * Translate the epoch time value into a human readable date and time string. This flag also can set the format + * string to apply when translating the date to human readable format. For a list of available pattern letters + * see the {@link https://www.npmjs.com/package/dateformat|dateformat documentation}. + * - The default format is `yyyy-mm-dd HH:MM:ss.l o` in UTC. + * - Requires a `SYS:` prefix to translate time to the local system's timezone. Use the shortcut `SYS:standard` + * to translate time to `yyyy-mm-dd HH:MM:ss.l o` in system timezone. + * @default false + */ + translateTime?: boolean | string; + /** + * If set to true, it will print the name of the log level as the first field in the log line. + * @default false + */ + levelFirst?: boolean; + /** + * Define the key that contains the level of the log. + * @default "level" + */ + levelKey?: string; + /** + * Output the log level using the specified label. + * @default "levelLabel" + */ + levelLabel?: string; + /** + * The key in the JSON object to use as the highlighted message. + * @default "msg" + * + * Not required when used with pino >= 8.21.0 + */ + messageKey?: string; + /** + * Print each log message on a single line (errors will still be multi-line). + * @default false + */ + singleLine?: boolean; + /** + * The key in the JSON object to use for timestamp display. + * @default "time" + */ + timestampKey?: string; + /** + * The minimum log level to include in the output. + * @default "trace" + */ + minimumLevel?: Level; + /** + * Format output of message, e.g. {level} - {pid} will output message: INFO - 1123 + * @default false + * + * @example + * ```typescript + * { + * messageFormat: (log, messageKey) => { + * const message = log[messageKey]; + * if (log.requestId) return `[${log.requestId}] ${message}`; + * return message; + * } + * } + * ``` + */ + messageFormat?: false | string | MessageFormatFunc; + /** + * If set to true, will add color information to the formatted output message. + * @default false + */ + colorize?: boolean; + /** + * If set to false while `colorize` is `true`, will output JSON objects without color. + * @default true + */ + colorizeObjects?: boolean; + /** + * Appends carriage return and line feed, instead of just a line feed, to the formatted log line. + * @default false + */ + crlf?: boolean; + /** + * Define the log keys that are associated with error like objects. + * @default ["err", "error"] + * + * Not required to handle custom errorKey when used with pino >= 8.21.0 + */ + errorLikeObjectKeys?: string[]; + /** + * When formatting an error object, display this list of properties. + * The list should be a comma separated list of properties. + * @default "" + */ + errorProps?: string; + /** + * Ignore one or several keys. + * Will be overridden by the option include if include is presented. + * @example "time,hostname" + */ + ignore?: string; + /** + * Include one or several keys. + * @example "time,level" + */ + include?: string; + /** + * Makes messaging synchronous. + * @default false + */ + sync?: boolean; + /** + * The file, file descriptor, or stream to write to. Defaults to 1 (stdout). + * @default 1 + */ + destination?: string | number | DestinationStream | NodeJS.WritableStream; + /** + * Opens the file with the 'a' flag. + * @default true + */ + append?: boolean; + /** + * Ensure directory for destination file exists. + * @default false + */ + mkdir?: boolean; + /** + * Provides the ability to add a custom prettify function for specific log properties. + * `customPrettifiers` is an object, where keys are log properties that will be prettified + * and value is the prettify function itself. + * For example, if a log line contains a query property, you can specify a prettifier for it: + * @default {} + * + * @example + * ```typescript + * { + * customPrettifiers: { + * query: prettifyQuery + * } + * } + * //... + * const prettifyQuery = value => { + * // do some prettify magic + * } + * ``` + */ + customPrettifiers?: Record & + { + level?: Prettifier + }; + /** + * Change the level names and values to an user custom preset. + * + * Can be a CSV string in 'level_name:level_value' format or an object. + * + * @example ( CSV ) customLevels: 'info:10,some_level:40' + * @example ( Object ) customLevels: { info: 10, some_level: 40 } + * + * Not required when used with pino >= 8.21.0 + */ + customLevels?: string|object; + /** + * Change the level colors to an user custom preset. + * + * Can be a CSV string in 'level_name:color_value' format or an object. + * Also supports 'default' as level_name for fallback color. + * + * @example ( CSV ) customColors: 'info:white,some_level:red' + * @example ( Object ) customColors: { info: 'white', some_level: 'red' } + */ + customColors?: string|object; + /** + * Only use custom levels and colors (if provided); else fallback to default levels and colors. + * + * @default true + */ + useOnlyCustomProps?: boolean; + } + + function build(options: PrettyOptions): PrettyStream; + + type Prettifier = (inputData: string | object, key: string, log: object, extras: PrettifierExtras) => string; + type PrettifierExtras = {colors: Colorette.Colorette, label: string, labelColorized: string}; + type MessageFormatFunc = (log: LogDescriptor, messageKey: string, levelLabel: string, extras: PrettifierExtras) => string; + type PrettyStream = Transform & OnUnknown; + type ColorizerFactory = typeof colorizerFactory; + type PrettyFactory = typeof prettyFactory; + type Build = typeof build; + + // @ts-ignore + export const isColorSupported = Colorette.isColorSupported; + export { build, PinoPretty, PrettyOptions, PrettyStream, colorizerFactory, prettyFactory }; +} + +export = PinoPretty; diff --git a/node_modules/pino-pretty/index.js b/node_modules/pino-pretty/index.js new file mode 100644 index 0000000..fcda6b2 --- /dev/null +++ b/node_modules/pino-pretty/index.js @@ -0,0 +1,189 @@ +'use strict' + +const { isColorSupported } = require('colorette') +const pump = require('pump') +const { Transform } = require('node:stream') +const abstractTransport = require('pino-abstract-transport') +const colors = require('./lib/colors') +const { + ERROR_LIKE_KEYS, + LEVEL_KEY, + LEVEL_LABEL, + MESSAGE_KEY, + TIMESTAMP_KEY +} = require('./lib/constants') +const { + buildSafeSonicBoom, + parseFactoryOptions +} = require('./lib/utils') +const pretty = require('./lib/pretty') + +/** + * @typedef {object} PinoPrettyOptions + * @property {boolean} [colorize] Indicates if colors should be used when + * prettifying. The default will be determined by the terminal capabilities at + * run time. + * @property {boolean} [colorizeObjects=true] Apply coloring to rendered objects + * when coloring is enabled. + * @property {boolean} [crlf=false] End lines with `\r\n` instead of `\n`. + * @property {string|null} [customColors=null] A comma separated list of colors + * to use for specific level labels, e.g. `err:red,info:blue`. + * @property {string|null} [customLevels=null] A comma separated list of user + * defined level names and numbers, e.g. `err:99,info:1`. + * @property {CustomPrettifiers} [customPrettifiers={}] A set of prettifier + * functions to apply to keys defined in this object. + * @property {K_ERROR_LIKE_KEYS} [errorLikeObjectKeys] A list of string property + * names to consider as error objects. + * @property {string} [errorProps=''] A comma separated list of properties on + * error objects to include in the output. + * @property {boolean} [hideObject=false] When `true`, data objects will be + * omitted from the output (except for error objects). + * @property {string} [ignore='hostname'] A comma separated list of log keys + * to omit when outputting the prettified log information. + * @property {undefined|string} [include=undefined] A comma separated list of + * log keys to include in the prettified log information. Only the keys in this + * list will be included in the output. + * @property {boolean} [levelFirst=false] When true, the log level will be the + * first field in the prettified output. + * @property {string} [levelKey='level'] The key name in the log data that + * contains the level value for the log. + * @property {string} [levelLabel='levelLabel'] Token name to use in + * `messageFormat` to represent the name of the logged level. + * @property {null|MessageFormatString|MessageFormatFunction} [messageFormat=null] + * When a string, defines how the prettified line should be formatted according + * to defined tokens. When a function, a synchronous function that returns a + * formatted string. + * @property {string} [messageKey='msg'] Defines the key in incoming logs that + * contains the message of the log, if present. + * @property {undefined|string|number} [minimumLevel=undefined] The minimum + * level for logs that should be processed. Any logs below this level will + * be omitted. + * @property {object} [outputStream=process.stdout] The stream to write + * prettified log lines to. + * @property {boolean} [singleLine=false] When `true` any objects, except error + * objects, in the log data will be printed as a single line instead as multiple + * lines. + * @property {string} [timestampKey='time'] Defines the key in incoming logs + * that contains the timestamp of the log, if present. + * @property {boolean|string} [translateTime=true] When true, will translate a + * JavaScript date integer into a human-readable string. If set to a string, + * it must be a format string. + * @property {boolean} [useOnlyCustomProps=true] When true, only custom levels + * and colors will be used if they have been provided. + */ + +/** + * The default options that will be used when prettifying log lines. + * + * @type {PinoPrettyOptions} + */ +const defaultOptions = { + colorize: isColorSupported, + colorizeObjects: true, + crlf: false, + customColors: null, + customLevels: null, + customPrettifiers: {}, + errorLikeObjectKeys: ERROR_LIKE_KEYS, + errorProps: '', + hideObject: false, + ignore: 'hostname', + include: undefined, + levelFirst: false, + levelKey: LEVEL_KEY, + levelLabel: LEVEL_LABEL, + messageFormat: null, + messageKey: MESSAGE_KEY, + minimumLevel: undefined, + outputStream: process.stdout, + singleLine: false, + timestampKey: TIMESTAMP_KEY, + translateTime: true, + useOnlyCustomProps: true +} + +/** + * Processes the supplied options and returns a function that accepts log data + * and produces a prettified log string. + * + * @param {PinoPrettyOptions} options Configuration for the prettifier. + * @returns {LogPrettifierFunc} + */ +function prettyFactory (options) { + const context = parseFactoryOptions(Object.assign({}, defaultOptions, options)) + return pretty.bind({ ...context, context }) +} + +/** + * @typedef {PinoPrettyOptions} BuildStreamOpts + * @property {object|number|string} [destination] A destination stream, file + * descriptor, or target path to a file. + * @property {boolean} [append] + * @property {boolean} [mkdir] + * @property {boolean} [sync=false] + */ + +/** + * Constructs a {@link LogPrettifierFunc} and a stream to which the produced + * prettified log data will be written. + * + * @param {BuildStreamOpts} opts + * @returns {Transform | (Transform & OnUnknown)} + */ +function build (opts = {}) { + let pretty = prettyFactory(opts) + let destination + return abstractTransport(function (source) { + source.on('message', function pinoConfigListener (message) { + if (!message || message.code !== 'PINO_CONFIG') return + Object.assign(opts, { + messageKey: message.config.messageKey, + errorLikeObjectKeys: Array.from(new Set([...(opts.errorLikeObjectKeys || ERROR_LIKE_KEYS), message.config.errorKey])), + customLevels: message.config.levels.values + }) + pretty = prettyFactory(opts) + source.off('message', pinoConfigListener) + }) + const stream = new Transform({ + objectMode: true, + autoDestroy: true, + transform (chunk, enc, cb) { + const line = pretty(chunk) + cb(null, line) + } + }) + + if (typeof opts.destination === 'object' && typeof opts.destination.write === 'function') { + destination = opts.destination + } else { + destination = buildSafeSonicBoom({ + dest: opts.destination || 1, + append: opts.append, + mkdir: opts.mkdir, + sync: opts.sync // by default sonic will be async + }) + } + + source.on('unknown', function (line) { + destination.write(line + '\n') + }) + + pump(source, stream, destination) + return stream + }, { + parse: 'lines', + close (err, cb) { + destination.on('close', () => { + cb(err) + }) + } + }) +} + +module.exports = build +module.exports.build = build +module.exports.PinoPretty = build +module.exports.prettyFactory = prettyFactory +module.exports.colorizerFactory = colors +module.exports.isColorSupported = isColorSupported +module.exports.default = build diff --git a/node_modules/pino-pretty/lib/colors.js b/node_modules/pino-pretty/lib/colors.js new file mode 100644 index 0000000..194a3dc --- /dev/null +++ b/node_modules/pino-pretty/lib/colors.js @@ -0,0 +1,134 @@ +'use strict' + +const nocolor = input => input +const plain = { + default: nocolor, + 60: nocolor, + 50: nocolor, + 40: nocolor, + 30: nocolor, + 20: nocolor, + 10: nocolor, + message: nocolor, + greyMessage: nocolor, + property: nocolor +} + +const { createColors } = require('colorette') +const getLevelLabelData = require('./utils/get-level-label-data') +const availableColors = createColors({ useColor: true }) +const { white, bgRed, red, yellow, green, blue, gray, cyan, magenta } = availableColors + +const colored = { + default: white, + 60: bgRed, + 50: red, + 40: yellow, + 30: green, + 20: blue, + 10: gray, + message: cyan, + greyMessage: gray, + property: magenta +} + +function resolveCustomColoredColorizer (customColors) { + return customColors.reduce( + function (agg, [level, color]) { + agg[level] = typeof availableColors[color] === 'function' ? availableColors[color] : white + + return agg + }, + { default: white, message: cyan, greyMessage: gray, property: magenta } + ) +} + +function colorizeLevel (useOnlyCustomProps) { + return function (level, colorizer, { customLevels, customLevelNames } = {}) { + const [levelStr, levelNum] = getLevelLabelData(useOnlyCustomProps, customLevels, customLevelNames)(level) + + return Object.prototype.hasOwnProperty.call(colorizer, levelNum) ? colorizer[levelNum](levelStr) : colorizer.default(levelStr) + } +} + +function plainColorizer (useOnlyCustomProps) { + const newPlainColorizer = colorizeLevel(useOnlyCustomProps) + const customColoredColorizer = function (level, opts) { + return newPlainColorizer(level, plain, opts) + } + customColoredColorizer.message = plain.message + customColoredColorizer.greyMessage = plain.greyMessage + customColoredColorizer.property = plain.property + customColoredColorizer.colors = createColors({ useColor: false }) + return customColoredColorizer +} + +function coloredColorizer (useOnlyCustomProps) { + const newColoredColorizer = colorizeLevel(useOnlyCustomProps) + const customColoredColorizer = function (level, opts) { + return newColoredColorizer(level, colored, opts) + } + customColoredColorizer.message = colored.message + customColoredColorizer.property = colored.property + customColoredColorizer.greyMessage = colored.greyMessage + customColoredColorizer.colors = availableColors + return customColoredColorizer +} + +function customColoredColorizerFactory (customColors, useOnlyCustomProps) { + const onlyCustomColored = resolveCustomColoredColorizer(customColors) + const customColored = useOnlyCustomProps ? onlyCustomColored : Object.assign({}, colored, onlyCustomColored) + const colorizeLevelCustom = colorizeLevel(useOnlyCustomProps) + + const customColoredColorizer = function (level, opts) { + return colorizeLevelCustom(level, customColored, opts) + } + customColoredColorizer.colors = availableColors + customColoredColorizer.message = customColoredColorizer.message || customColored.message + customColoredColorizer.property = customColoredColorizer.property || customColored.property + customColoredColorizer.greyMessage = customColoredColorizer.greyMessage || customColored.greyMessage + + return customColoredColorizer +} + +/** + * Applies colorization, if possible, to a string representing the passed in + * `level`. For example, the default colorizer will return a "green" colored + * string for the "info" level. + * + * @typedef {function} ColorizerFunc + * @param {string|number} level In either case, the input will map to a color + * for the specified level or to the color for `USERLVL` if the level is not + * recognized. + * @property {function} message Accepts one string parameter that will be + * colorized to a predefined color. + * @property {Colorette.Colorette} colors Available color functions based on `useColor` (or `colorize`) context + */ + +/** + * Factory function get a function to colorized levels. The returned function + * also includes a `.message(str)` method to colorize strings. + * + * @param {boolean} [useColors=false] When `true` a function that applies standard + * terminal colors is returned. + * @param {array[]} [customColors] Tuple where first item of each array is the + * level index and the second item is the color + * @param {boolean} [useOnlyCustomProps] When `true`, only use the provided + * custom colors provided and not fallback to default + * + * @returns {ColorizerFunc} `function (level) {}` has a `.message(str)` method to + * apply colorization to a string. The core function accepts either an integer + * `level` or a `string` level. The integer level will map to a known level + * string or to `USERLVL` if not known. The string `level` will map to the same + * colors as the integer `level` and will also default to `USERLVL` if the given + * string is not a recognized level name. + */ +module.exports = function getColorizer (useColors = false, customColors, useOnlyCustomProps) { + if (useColors && customColors !== undefined) { + return customColoredColorizerFactory(customColors, useOnlyCustomProps) + } else if (useColors) { + return coloredColorizer(useOnlyCustomProps) + } + + return plainColorizer(useOnlyCustomProps) +} diff --git a/node_modules/pino-pretty/lib/colors.test.js b/node_modules/pino-pretty/lib/colors.test.js new file mode 100644 index 0000000..ff41690 --- /dev/null +++ b/node_modules/pino-pretty/lib/colors.test.js @@ -0,0 +1,132 @@ +'use strict' + +const { test } = require('node:test') +const getColorizer = require('./colors') + +const testDefaultColorizer = getColorizer => t => { + const colorizer = getColorizer() + let colorized = colorizer(10) + t.assert.strictEqual(colorized, 'TRACE') + + colorized = colorizer(20) + t.assert.strictEqual(colorized, 'DEBUG') + + colorized = colorizer(30) + t.assert.strictEqual(colorized, 'INFO') + + colorized = colorizer(40) + t.assert.strictEqual(colorized, 'WARN') + + colorized = colorizer(50) + t.assert.strictEqual(colorized, 'ERROR') + + colorized = colorizer(60) + t.assert.strictEqual(colorized, 'FATAL') + + colorized = colorizer(900) + t.assert.strictEqual(colorized, 'USERLVL') + + colorized = colorizer('info') + t.assert.strictEqual(colorized, 'INFO') + + colorized = colorizer('use-default') + t.assert.strictEqual(colorized, 'USERLVL') + + colorized = colorizer.message('foo') + t.assert.strictEqual(colorized, 'foo') + + colorized = colorizer.greyMessage('foo') + t.assert.strictEqual(colorized, 'foo') +} + +const testColoringColorizer = getColorizer => t => { + const colorizer = getColorizer(true) + let colorized = colorizer(10) + t.assert.strictEqual(colorized, '\u001B[90mTRACE\u001B[39m') + + colorized = colorizer(20) + t.assert.strictEqual(colorized, '\u001B[34mDEBUG\u001B[39m') + + colorized = colorizer(30) + t.assert.strictEqual(colorized, '\u001B[32mINFO\u001B[39m') + + colorized = colorizer(40) + t.assert.strictEqual(colorized, '\u001B[33mWARN\u001B[39m') + + colorized = colorizer(50) + t.assert.strictEqual(colorized, '\u001B[31mERROR\u001B[39m') + + colorized = colorizer(60) + t.assert.strictEqual(colorized, '\u001B[41mFATAL\u001B[49m') + + colorized = colorizer(900) + t.assert.strictEqual(colorized, '\u001B[37mUSERLVL\u001B[39m') + + colorized = colorizer('info') + t.assert.strictEqual(colorized, '\u001B[32mINFO\u001B[39m') + + colorized = colorizer('use-default') + t.assert.strictEqual(colorized, '\u001B[37mUSERLVL\u001B[39m') + + colorized = colorizer.message('foo') + t.assert.strictEqual(colorized, '\u001B[36mfoo\u001B[39m') + + colorized = colorizer.greyMessage('foo') + t.assert.strictEqual(colorized, '\u001B[90mfoo\u001B[39m') +} + +const testCustomColoringColorizer = getColorizer => t => { + const customLevels = { + 0: 'INFO', + 1: 'ERR', + default: 'USERLVL' + } + const customLevelNames = { + info: 0, + err: 1 + } + const customColors = [ + [0, 'not-a-color'], + [1, 'red'] + ] + const opts = { + customLevels, + customLevelNames + } + + const colorizer = getColorizer(true, customColors) + const colorizerWithCustomPropUse = getColorizer(true, customColors, true) + let colorized = colorizer(1, opts) + t.assert.strictEqual(colorized, '\u001B[31mERR\u001B[39m') + + colorized = colorizer(0, opts) + t.assert.strictEqual(colorized, '\u001B[37mINFO\u001B[39m') + + colorized = colorizer(900) + t.assert.strictEqual(colorized, '\u001B[37mUSERLVL\u001B[39m') + + colorized = colorizer('err', opts) + t.assert.strictEqual(colorized, '\u001B[31mERR\u001B[39m') + + colorized = colorizer('info', opts) + t.assert.strictEqual(colorized, '\u001B[37mINFO\u001B[39m') + + colorized = colorizer('use-default') + t.assert.strictEqual(colorized, '\u001B[37mUSERLVL\u001B[39m') + + colorized = colorizer(40, opts) + t.assert.strictEqual(colorized, '\u001B[33mWARN\u001B[39m') + + colorized = colorizerWithCustomPropUse(50, opts) + t.assert.strictEqual(colorized, '\u001B[37mUSERLVL\u001B[39m') +} + +test('returns default colorizer - private export', testDefaultColorizer(getColorizer)) +test('returns colorizing colorizer - private export', testColoringColorizer(getColorizer)) +test('returns custom colorizing colorizer - private export', testCustomColoringColorizer(getColorizer)) + +test('custom props defaults to standard levels', t => { + const colorizer = getColorizer(true, [], true) + const colorized = colorizer('info') + t.assert.strictEqual(colorized, '\u001B[37mINFO\u001B[39m') +}) diff --git a/node_modules/pino-pretty/lib/constants.js b/node_modules/pino-pretty/lib/constants.js new file mode 100644 index 0000000..4322166 --- /dev/null +++ b/node_modules/pino-pretty/lib/constants.js @@ -0,0 +1,55 @@ +'use strict' + +/** + * A set of property names that indicate the value represents an error object. + * + * @typedef {string[]} K_ERROR_LIKE_KEYS + */ + +module.exports = { + DATE_FORMAT: 'yyyy-mm-dd HH:MM:ss.l o', + DATE_FORMAT_SIMPLE: 'HH:MM:ss.l', + + /** + * @type {K_ERROR_LIKE_KEYS} + */ + ERROR_LIKE_KEYS: ['err', 'error'], + + MESSAGE_KEY: 'msg', + + LEVEL_KEY: 'level', + + LEVEL_LABEL: 'levelLabel', + + TIMESTAMP_KEY: 'time', + + LEVELS: { + default: 'USERLVL', + 60: 'FATAL', + 50: 'ERROR', + 40: 'WARN', + 30: 'INFO', + 20: 'DEBUG', + 10: 'TRACE' + }, + + LEVEL_NAMES: { + fatal: 60, + error: 50, + warn: 40, + info: 30, + debug: 20, + trace: 10 + }, + + // Object keys that probably came from a logger like Pino or Bunyan. + LOGGER_KEYS: [ + 'pid', + 'hostname', + 'name', + 'level', + 'time', + 'timestamp', + 'caller' + ] +} diff --git a/node_modules/pino-pretty/lib/pretty.js b/node_modules/pino-pretty/lib/pretty.js new file mode 100644 index 0000000..a2b6813 --- /dev/null +++ b/node_modules/pino-pretty/lib/pretty.js @@ -0,0 +1,171 @@ +'use strict' + +module.exports = pretty + +const sjs = require('secure-json-parse') + +const isObject = require('./utils/is-object') +const prettifyErrorLog = require('./utils/prettify-error-log') +const prettifyLevel = require('./utils/prettify-level') +const prettifyMessage = require('./utils/prettify-message') +const prettifyMetadata = require('./utils/prettify-metadata') +const prettifyObject = require('./utils/prettify-object') +const prettifyTime = require('./utils/prettify-time') +const filterLog = require('./utils/filter-log') + +const { + LEVELS, + LEVEL_KEY, + LEVEL_NAMES +} = require('./constants') + +const jsonParser = input => { + try { + return { value: sjs.parse(input, { protoAction: 'remove' }) } + } catch (err) { + return { err } + } +} + +/** + * Orchestrates processing the received log data according to the provided + * configuration and returns a prettified log string. + * + * @typedef {function} LogPrettifierFunc + * @param {string|object} inputData A log string or a log-like object. + * @returns {string} A string that represents the prettified log data. + */ +function pretty (inputData) { + let log + if (!isObject(inputData)) { + const parsed = jsonParser(inputData) + if (parsed.err || !isObject(parsed.value)) { + // pass through + return inputData + this.EOL + } + log = parsed.value + } else { + log = inputData + } + + if (this.minimumLevel) { + // We need to figure out if the custom levels has the desired minimum + // level & use that one if found. If not, determine if the level exists + // in the standard levels. In both cases, make sure we have the level + // number instead of the level name. + let condition + if (this.useOnlyCustomProps) { + condition = this.customLevels + } else { + condition = this.customLevelNames[this.minimumLevel] !== undefined + } + let minimum + if (condition) { + minimum = this.customLevelNames[this.minimumLevel] + } else { + minimum = LEVEL_NAMES[this.minimumLevel] + } + if (!minimum) { + minimum = typeof this.minimumLevel === 'string' + ? LEVEL_NAMES[this.minimumLevel] + : LEVEL_NAMES[LEVELS[this.minimumLevel].toLowerCase()] + } + + const level = log[this.levelKey === undefined ? LEVEL_KEY : this.levelKey] + if (level < minimum) return + } + + const prettifiedMessage = prettifyMessage({ log, context: this.context }) + + if (this.ignoreKeys || this.includeKeys) { + log = filterLog({ log, context: this.context }) + } + + const prettifiedLevel = prettifyLevel({ + log, + context: { + ...this.context, + // This is odd. The colorizer ends up relying on the value of + // `customProperties` instead of the original `customLevels` and + // `customLevelNames`. + ...this.context.customProperties + } + }) + const prettifiedMetadata = prettifyMetadata({ log, context: this.context }) + const prettifiedTime = prettifyTime({ log, context: this.context }) + + let line = '' + if (this.levelFirst && prettifiedLevel) { + line = `${prettifiedLevel}` + } + + if (prettifiedTime && line === '') { + line = `${prettifiedTime}` + } else if (prettifiedTime) { + line = `${line} ${prettifiedTime}` + } + + if (!this.levelFirst && prettifiedLevel) { + if (line.length > 0) { + line = `${line} ${prettifiedLevel}` + } else { + line = prettifiedLevel + } + } + + if (prettifiedMetadata) { + if (line.length > 0) { + line = `${line} ${prettifiedMetadata}:` + } else { + line = prettifiedMetadata + } + } + + if (line.endsWith(':') === false && line !== '') { + line += ':' + } + + if (prettifiedMessage !== undefined) { + if (line.length > 0) { + line = `${line} ${prettifiedMessage}` + } else { + line = prettifiedMessage + } + } + + if (line.length > 0 && !this.singleLine) { + line += this.EOL + } + + // pino@7+ does not log this anymore + if (log.type === 'Error' && typeof log.stack === 'string') { + const prettifiedErrorLog = prettifyErrorLog({ log, context: this.context }) + if (this.singleLine) line += this.EOL + line += prettifiedErrorLog + } else if (this.hideObject === false) { + const skipKeys = [ + this.messageKey, + this.levelKey, + this.timestampKey + ] + .map((key) => key.replaceAll(/\\/g, '')) + .filter(key => { + return typeof log[key] === 'string' || + typeof log[key] === 'number' || + typeof log[key] === 'boolean' + }) + const prettifiedObject = prettifyObject({ + log, + skipKeys, + context: this.context + }) + + // In single line mode, include a space only if prettified version isn't empty + if (this.singleLine && !/^\s$/.test(prettifiedObject)) { + line += ' ' + } + line += prettifiedObject + } + + return line +} diff --git a/node_modules/pino-pretty/lib/utils/build-safe-sonic-boom.js b/node_modules/pino-pretty/lib/utils/build-safe-sonic-boom.js new file mode 100644 index 0000000..3c3b626 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/build-safe-sonic-boom.js @@ -0,0 +1,69 @@ +'use strict' + +module.exports = buildSafeSonicBoom + +const { isMainThread } = require('node:worker_threads') +const SonicBoom = require('sonic-boom') +const noop = require('./noop') + +/** + * Creates a safe SonicBoom instance + * + * @param {object} opts Options for SonicBoom + * + * @returns {object} A new SonicBoom stream + */ +function buildSafeSonicBoom (opts) { + const stream = new SonicBoom(opts) + stream.on('error', filterBrokenPipe) + // if we are sync: false, we must flush on exit + if (!opts.sync && isMainThread) { + setupOnExit(stream) + } + return stream + + function filterBrokenPipe (err) { + if (err.code === 'EPIPE') { + stream.write = noop + stream.end = noop + stream.flushSync = noop + stream.destroy = noop + return + } + stream.removeListener('error', filterBrokenPipe) + } +} + +function setupOnExit (stream) { + /* istanbul ignore next */ + if (global.WeakRef && global.WeakMap && global.FinalizationRegistry) { + // This is leak free, it does not leave event handlers + const onExit = require('on-exit-leak-free') + + onExit.register(stream, autoEnd) + + stream.on('close', function () { + onExit.unregister(stream) + }) + } +} + +/* istanbul ignore next */ +function autoEnd (stream, eventName) { + // This check is needed only on some platforms + + if (stream.destroyed) { + return + } + + if (eventName === 'beforeExit') { + // We still have an event loop, let's use it + stream.flush() + stream.on('drain', function () { + stream.end() + }) + } else { + // We do not have an event loop, so flush synchronously + stream.flushSync() + } +} diff --git a/node_modules/pino-pretty/lib/utils/build-safe-sonic-boom.test.js b/node_modules/pino-pretty/lib/utils/build-safe-sonic-boom.test.js new file mode 100644 index 0000000..4aeb067 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/build-safe-sonic-boom.test.js @@ -0,0 +1,84 @@ +'use strict' + +const { test } = require('node:test') +const { rimraf } = require('rimraf') +const fs = require('node:fs') +const { join } = require('node:path') + +const buildSafeSonicBoom = require('./build-safe-sonic-boom') + +const file = () => { + const dest = join(__dirname, `${process.pid}-${process.hrtime().toString()}`) + const fd = fs.openSync(dest, 'w') + return { dest, fd } +} + +test('should not write when error emitted and code is "EPIPE"', t => { + t.plan(1) + + const { fd, dest } = file() + const stream = buildSafeSonicBoom({ sync: true, fd, mkdir: true }) + t.after(() => rimraf(dest)) + + stream.emit('error', { code: 'EPIPE' }) + stream.write('will not work') + + const dataFile = fs.readFileSync(dest) + t.assert.strictEqual(dataFile.length, 0) +}) + +test('should stream.write works when error code is not "EPIPE"', t => { + t.plan(3) + const { fd, dest } = file() + const stream = buildSafeSonicBoom({ sync: true, fd, mkdir: true }) + + t.after(() => rimraf(dest)) + + stream.on('error', () => t.assert.ok('error emitted')) + + stream.emit('error', 'fake error description') + + t.assert.ok(stream.write('will work')) + + const dataFile = fs.readFileSync(dest) + t.assert.strictEqual(dataFile.toString(), 'will work') +}) + +test('cover setupOnExit', async t => { + t.plan(3) + const { fd, dest } = file() + const stream = buildSafeSonicBoom({ sync: false, fd, mkdir: true }) + + t.after(() => rimraf(dest)) + + stream.on('error', () => t.assert.ok('error emitted')) + stream.emit('error', 'fake error description') + + t.assert.ok(stream.write('will work')) + + await watchFileCreated(dest) + + const dataFile = fs.readFileSync(dest) + t.assert.strictEqual(dataFile.toString(), 'will work') +}) + +function watchFileCreated (filename) { + return new Promise((resolve, reject) => { + const TIMEOUT = 2000 + const INTERVAL = 100 + const threshold = TIMEOUT / INTERVAL + let counter = 0 + const interval = setInterval(() => { + // On some CI runs file is created but not filled + if (fs.existsSync(filename) && fs.statSync(filename).size !== 0) { + clearInterval(interval) + resolve() + } else if (counter <= threshold) { + counter++ + } else { + clearInterval(interval) + reject(new Error(`${filename} was not created.`)) + } + }, INTERVAL) + }) +} diff --git a/node_modules/pino-pretty/lib/utils/create-date.js b/node_modules/pino-pretty/lib/utils/create-date.js new file mode 100644 index 0000000..dc84991 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/create-date.js @@ -0,0 +1,26 @@ +'use strict' + +module.exports = createDate + +const isValidDate = require('./is-valid-date') + +/** + * Constructs a JS Date from a number or string. Accepts any single number + * or single string argument that is valid for the Date() constructor, + * or an epoch as a string. + * + * @param {string|number} epoch The representation of the Date. + * + * @returns {Date} The constructed Date. + */ +function createDate (epoch) { + // If epoch is already a valid argument, return the valid Date + let date = new Date(epoch) + if (isValidDate(date)) { + return date + } + + // Convert to a number to permit epoch as a string + date = new Date(+epoch) + return date +} diff --git a/node_modules/pino-pretty/lib/utils/create-date.test.js b/node_modules/pino-pretty/lib/utils/create-date.test.js new file mode 100644 index 0000000..2d9df2b --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/create-date.test.js @@ -0,0 +1,20 @@ +'use strict' + +const { test } = require('node:test') +const createDate = require('./create-date') + +const wanted = 1624450038567 + +test('accepts arguments the Date constructor would accept', t => { + t.plan(2) + t.assert.strictEqual(createDate(1624450038567).getTime(), wanted) + t.assert.strictEqual(createDate('2021-06-23T12:07:18.567Z').getTime(), wanted) +}) + +test('accepts epoch as a string', t => { + // If Date() accepts this argument, the createDate function is not needed + // and can be replaced with Date() + t.plan(2) + t.assert.notEqual(new Date('16244500385-67').getTime(), wanted) + t.assert.strictEqual(createDate('1624450038567').getTime(), wanted) +}) diff --git a/node_modules/pino-pretty/lib/utils/delete-log-property.js b/node_modules/pino-pretty/lib/utils/delete-log-property.js new file mode 100644 index 0000000..502fcee --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/delete-log-property.js @@ -0,0 +1,28 @@ +'use strict' + +module.exports = deleteLogProperty + +const getPropertyValue = require('./get-property-value') +const splitPropertyKey = require('./split-property-key') + +/** + * Deletes a specified property from a log object if it exists. + * This function mutates the passed in `log` object. + * + * @param {object} log The log object to be modified. + * @param {string} property A string identifying the property to be deleted from + * the log object. Accepts nested properties delimited by a `.` + * Delimiter can be escaped to preserve property names that contain the delimiter. + * e.g. `'prop1.prop2'` or `'prop2\.domain\.corp.prop2'` + */ +function deleteLogProperty (log, property) { + const props = splitPropertyKey(property) + const propToDelete = props.pop() + + log = getPropertyValue(log, props) + + /* istanbul ignore else */ + if (log !== null && typeof log === 'object' && Object.prototype.hasOwnProperty.call(log, propToDelete)) { + delete log[propToDelete] + } +} diff --git a/node_modules/pino-pretty/lib/utils/delete-log-property.test.js b/node_modules/pino-pretty/lib/utils/delete-log-property.test.js new file mode 100644 index 0000000..d8a4e3a --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/delete-log-property.test.js @@ -0,0 +1,31 @@ +'use strict' + +const { test } = require('node:test') +const { createCopier } = require('fast-copy') +const fastCopy = createCopier({}) +const deleteLogProperty = require('./delete-log-property') + +const logData = { + level: 30, + data1: { + data2: { 'data-3': 'bar' } + } +} + +test('deleteLogProperty deletes property of depth 1', t => { + const log = fastCopy(logData) + deleteLogProperty(log, 'data1') + t.assert.deepStrictEqual(log, { level: 30 }) +}) + +test('deleteLogProperty deletes property of depth 2', t => { + const log = fastCopy(logData) + deleteLogProperty(log, 'data1.data2') + t.assert.deepStrictEqual(log, { level: 30, data1: { } }) +}) + +test('deleteLogProperty deletes property of depth 3', t => { + const log = fastCopy(logData) + deleteLogProperty(log, 'data1.data2.data-3') + t.assert.deepStrictEqual(log, { level: 30, data1: { data2: { } } }) +}) diff --git a/node_modules/pino-pretty/lib/utils/filter-log.js b/node_modules/pino-pretty/lib/utils/filter-log.js new file mode 100644 index 0000000..66c1b35 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/filter-log.js @@ -0,0 +1,45 @@ +'use strict' + +module.exports = filterLog + +const { createCopier } = require('fast-copy') +const fastCopy = createCopier({}) + +const deleteLogProperty = require('./delete-log-property') + +/** + * @typedef {object} FilterLogParams + * @property {object} log The log object to be modified. + * @property {PrettyContext} context The context object built from parsing + * the options. + */ + +/** + * Filter a log object by removing or including keys accordingly. + * When `includeKeys` is passed, `ignoredKeys` will be ignored. + * One of ignoreKeys or includeKeys must be pass in. + * + * @param {FilterLogParams} input + * + * @returns {object} A new `log` object instance that + * either only includes the keys in ignoreKeys + * or does not include those in ignoredKeys. + */ +function filterLog ({ log, context }) { + const { ignoreKeys, includeKeys } = context + const logCopy = fastCopy(log) + + if (includeKeys) { + const logIncluded = {} + + includeKeys.forEach((key) => { + logIncluded[key] = logCopy[key] + }) + return logIncluded + } + + ignoreKeys.forEach((ignoreKey) => { + deleteLogProperty(logCopy, ignoreKey) + }) + return logCopy +} diff --git a/node_modules/pino-pretty/lib/utils/filter-log.test.js b/node_modules/pino-pretty/lib/utils/filter-log.test.js new file mode 100644 index 0000000..7594873 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/filter-log.test.js @@ -0,0 +1,183 @@ +'use strict' + +const { describe, test } = require('node:test') +const filterLog = require('./filter-log') + +const context = { + includeKeys: undefined, + ignoreKeys: undefined +} +const logData = { + level: 30, + time: 1522431328992, + data1: { + data2: { 'data-3': 'bar' }, + error: new Error('test') + } +} +const logData2 = Object.assign({ + 'logging.domain.corp/operation': { + id: 'foo', + producer: 'bar' + } +}, logData) + +describe('#filterLog with an ignoreKeys option', () => { + test('filterLog removes single entry', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + ignoreKeys: ['data1.data2.data-3'] + } + }) + t.assert.deepStrictEqual(result, { level: 30, time: 1522431328992, data1: { data2: { }, error: new Error('test') } }) + }) + + test('filterLog removes multiple entries', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + ignoreKeys: ['time', 'data1'] + } + }) + t.assert.deepStrictEqual(result, { level: 30 }) + }) + + test('filterLog keeps error instance', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + ignoreKeys: [] + } + }) + t.assert.strictEqual(logData.data1.error, result.data1.error) + }) + + test('filterLog removes entry with escape sequence', t => { + const result = filterLog({ + log: logData2, + context: { + ...context, + ignoreKeys: ['data1', 'logging\\.domain\\.corp/operation'] + } + }) + t.assert.deepStrictEqual(result, { level: 30, time: 1522431328992 }) + }) + + test('filterLog removes entry with escape sequence nested', t => { + const result = filterLog({ + log: logData2, + context: { + ...context, + ignoreKeys: ['data1', 'logging\\.domain\\.corp/operation.producer'] + } + }) + t.assert.deepStrictEqual(result, { level: 30, time: 1522431328992, 'logging.domain.corp/operation': { id: 'foo' } }) + }) +}) + +for (const ignoreKeys of [ + undefined, + ['level'], + ['level', 'data1.data2.data-3'] +]) { + describe(`#filterLog with an includeKeys option when the ignoreKeys being ${ignoreKeys}`, () => { + test('filterLog include nothing', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + ignoreKeys, + includeKeys: [] + } + }) + t.assert.deepStrictEqual(result, {}) + }) + + test('filterLog include single entry', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + ignoreKeys, + includeKeys: ['time'] + } + }) + t.assert.deepStrictEqual(result, { time: 1522431328992 }) + }) + + test('filterLog include multiple entries', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + ignoreKeys, + includeKeys: ['time', 'data1'] + } + }) + t.assert.deepStrictEqual(result, { + time: 1522431328992, + data1: { + data2: { 'data-3': 'bar' }, + error: new Error('test') + } + }) + }) + }) +} + +describe('#filterLog with circular references', () => { + const logData = { + level: 30, + time: 1522431328992, + data1: 'test' + } + logData.circular = logData + + test('filterLog removes single entry', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + ignoreKeys: ['data1'] + } + }) + + t.assert.deepStrictEqual(result.circular.level, result.level) + t.assert.deepStrictEqual(result.circular.time, result.time) + + delete result.circular + t.assert.deepStrictEqual(result, { level: 30, time: 1522431328992 }) + }) + + test('filterLog includes single entry', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + includeKeys: ['data1'] + } + }) + + t.assert.deepStrictEqual(result, { data1: 'test' }) + }) + + test('filterLog includes circular keys', t => { + const result = filterLog({ + log: logData, + context: { + ...context, + includeKeys: ['level', 'circular'] + } + }) + + t.assert.deepStrictEqual(result.circular.level, logData.level) + t.assert.deepStrictEqual(result.circular.time, logData.time) + + delete result.circular + t.assert.deepStrictEqual(result, { level: 30 }) + }) +}) diff --git a/node_modules/pino-pretty/lib/utils/format-time.js b/node_modules/pino-pretty/lib/utils/format-time.js new file mode 100644 index 0000000..96d7e4d --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/format-time.js @@ -0,0 +1,66 @@ +'use strict' + +module.exports = formatTime + +const { + DATE_FORMAT, + DATE_FORMAT_SIMPLE +} = require('../constants') + +const dateformat = require('dateformat') +const createDate = require('./create-date') +const isValidDate = require('./is-valid-date') + +/** + * Converts a given `epoch` to a desired display format. + * + * @param {number|string} epoch The time to convert. May be any value that is + * valid for `new Date()`. + * @param {boolean|string} [translateTime=false] When `false`, the given `epoch` + * will simply be returned. When `true`, the given `epoch` will be converted + * to a string at UTC using the `DATE_FORMAT_SIMPLE` constant. If `translateTime` is + * a string, the following rules are available: + * + * - ``: The string is a literal format string. This format + * string will be used to interpret the `epoch` and return a display string + * at UTC. + * - `SYS:STANDARD`: The returned display string will follow the `DATE_FORMAT` + * constant at the system's local timezone. + * - `SYS:`: The returned display string will follow the given + * `` at the system's local timezone. + * - `UTC:`: The returned display string will follow the given + * `` at UTC. + * + * @returns {number|string} The formatted time. + */ +function formatTime (epoch, translateTime = false) { + if (translateTime === false) { + return epoch + } + + const instant = createDate(epoch) + + // If the Date is invalid, do not attempt to format + if (!isValidDate(instant)) { + return epoch + } + + if (translateTime === true) { + return dateformat(instant, DATE_FORMAT_SIMPLE) + } + + const upperFormat = translateTime.toUpperCase() + if (upperFormat === 'SYS:STANDARD') { + return dateformat(instant, DATE_FORMAT) + } + + const prefix = upperFormat.substr(0, 4) + if (prefix === 'SYS:' || prefix === 'UTC:') { + if (prefix === 'UTC:') { + return dateformat(instant, translateTime) + } + return dateformat(instant, translateTime.slice(4)) + } + + return dateformat(instant, `UTC:${translateTime}`) +} diff --git a/node_modules/pino-pretty/lib/utils/format-time.test.js b/node_modules/pino-pretty/lib/utils/format-time.test.js new file mode 100644 index 0000000..61ccc86 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/format-time.test.js @@ -0,0 +1,71 @@ +'use strict' + +process.env.TZ = 'UTC' + +const { test } = require('node:test') +const formatTime = require('./format-time') + +const dateStr = '2019-04-06T13:30:00.000-04:00' +const epoch = new Date(dateStr) +const epochMS = epoch.getTime() + +test('passes through epoch if `translateTime` is `false`', t => { + const formattedTime = formatTime(epochMS) + t.assert.strictEqual(formattedTime, epochMS) +}) + +test('passes through epoch if date is invalid', t => { + const input = 'this is not a date' + const formattedTime = formatTime(input, true) + t.assert.strictEqual(formattedTime, input) +}) + +test('translates epoch milliseconds if `translateTime` is `true`', t => { + const formattedTime = formatTime(epochMS, true) + t.assert.strictEqual(formattedTime, '17:30:00.000') +}) + +test('translates epoch milliseconds to UTC string given format', t => { + const formattedTime = formatTime(epochMS, 'd mmm yyyy H:MM') + t.assert.strictEqual(formattedTime, '6 Apr 2019 17:30') +}) + +test('translates epoch milliseconds to SYS:STANDARD', t => { + const formattedTime = formatTime(epochMS, 'SYS:STANDARD') + t.assert.match(formattedTime, /\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} [-+]?\d{4}/) +}) + +test('translates epoch milliseconds to SYS:', t => { + const formattedTime = formatTime(epochMS, 'SYS:d mmm yyyy H:MM') + t.assert.match(formattedTime, /\d{1} \w{3} \d{4} \d{1,2}:\d{2}/) +}) + +test('passes through date string if `translateTime` is `false`', t => { + const formattedTime = formatTime(dateStr) + t.assert.strictEqual(formattedTime, dateStr) +}) + +test('translates date string if `translateTime` is `true`', t => { + const formattedTime = formatTime(dateStr, true) + t.assert.strictEqual(formattedTime, '17:30:00.000') +}) + +test('translates date string to UTC string given format', t => { + const formattedTime = formatTime(dateStr, 'd mmm yyyy H:MM') + t.assert.strictEqual(formattedTime, '6 Apr 2019 17:30') +}) + +test('translates date string to SYS:STANDARD', t => { + const formattedTime = formatTime(dateStr, 'SYS:STANDARD') + t.assert.match(formattedTime, /\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} [-+]?\d{4}/) +}) + +test('translates date string to UTC:', t => { + const formattedTime = formatTime(dateStr, 'UTC:d mmm yyyy H:MM') + t.assert.strictEqual(formattedTime, '6 Apr 2019 17:30') +}) + +test('translates date string to SYS:', t => { + const formattedTime = formatTime(dateStr, 'SYS:d mmm yyyy H:MM') + t.assert.match(formattedTime, /\d{1} \w{3} \d{4} \d{1,2}:\d{2}/) +}) diff --git a/node_modules/pino-pretty/lib/utils/get-level-label-data.js b/node_modules/pino-pretty/lib/utils/get-level-label-data.js new file mode 100644 index 0000000..5b27cab --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/get-level-label-data.js @@ -0,0 +1,29 @@ +'use strict' + +module.exports = getLevelLabelData +const { LEVELS, LEVEL_NAMES } = require('../constants') + +/** + * Given initial settings for custom levels/names and use of only custom props + * get the level label that corresponds with a given level number + * + * @param {boolean} useOnlyCustomProps + * @param {object} customLevels + * @param {object} customLevelNames + * + * @returns {function} A function that takes a number level and returns the level's label string + */ +function getLevelLabelData (useOnlyCustomProps, customLevels, customLevelNames) { + const levels = useOnlyCustomProps ? customLevels || LEVELS : Object.assign({}, LEVELS, customLevels) + const levelNames = useOnlyCustomProps ? customLevelNames || LEVEL_NAMES : Object.assign({}, LEVEL_NAMES, customLevelNames) + return function (level) { + let levelNum = 'default' + if (Number.isInteger(+level)) { + levelNum = Object.prototype.hasOwnProperty.call(levels, level) ? level : levelNum + } else { + levelNum = Object.prototype.hasOwnProperty.call(levelNames, level.toLowerCase()) ? levelNames[level.toLowerCase()] : levelNum + } + + return [levels[levelNum], levelNum] + } +} diff --git a/node_modules/pino-pretty/lib/utils/get-property-value.js b/node_modules/pino-pretty/lib/utils/get-property-value.js new file mode 100644 index 0000000..8305a9f --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/get-property-value.js @@ -0,0 +1,30 @@ +'use strict' + +module.exports = getPropertyValue + +const splitPropertyKey = require('./split-property-key') + +/** + * Gets a specified property from an object if it exists. + * + * @param {object} obj The object to be searched. + * @param {string|string[]} property A string, or an array of strings, identifying + * the property to be retrieved from the object. + * Accepts nested properties delimited by a `.`. + * Delimiter can be escaped to preserve property names that contain the delimiter. + * e.g. `'prop1.prop2'` or `'prop2\.domain\.corp.prop2'`. + * + * @returns {*} + */ +function getPropertyValue (obj, property) { + const props = Array.isArray(property) ? property : splitPropertyKey(property) + + for (const prop of props) { + if (!Object.prototype.hasOwnProperty.call(obj, prop)) { + return + } + obj = obj[prop] + } + + return obj +} diff --git a/node_modules/pino-pretty/lib/utils/get-property-value.test.js b/node_modules/pino-pretty/lib/utils/get-property-value.test.js new file mode 100644 index 0000000..fcce7be --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/get-property-value.test.js @@ -0,0 +1,31 @@ +'use strict' + +const { test } = require('node:test') +const getPropertyValue = require('./get-property-value') + +test('getPropertyValue returns the value of the property', t => { + const result = getPropertyValue({ + foo: 'bar' + }, 'foo') + t.assert.strictEqual(result, 'bar') +}) + +test('getPropertyValue returns the value of the nested property', t => { + const result = getPropertyValue({ extra: { foo: { value: 'bar' } } }, 'extra.foo.value') + t.assert.strictEqual(result, 'bar') +}) + +test('getPropertyValue returns the value of the nested property using the array of nested property keys', t => { + const result = getPropertyValue({ extra: { foo: { value: 'bar' } } }, ['extra', 'foo', 'value']) + t.assert.strictEqual(result, 'bar') +}) + +test('getPropertyValue returns undefined for non-existing properties', t => { + const result = getPropertyValue({ extra: { foo: { value: 'bar' } } }, 'extra.foo.value-2') + t.assert.strictEqual(result, undefined) +}) + +test('getPropertyValue returns undefined for non-existing properties using the array of nested property keys', t => { + const result = getPropertyValue({ extra: { foo: { value: 'bar' } } }, ['extra', 'foo', 'value-2']) + t.assert.strictEqual(result, undefined) +}) diff --git a/node_modules/pino-pretty/lib/utils/handle-custom-levels-names-opts.js b/node_modules/pino-pretty/lib/utils/handle-custom-levels-names-opts.js new file mode 100644 index 0000000..0c0257e --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/handle-custom-levels-names-opts.js @@ -0,0 +1,38 @@ +'use strict' + +module.exports = handleCustomLevelsNamesOpts + +/** + * Parse a CSV string or options object that maps level + * labels to level values. + * + * @param {string|object} cLevels An object mapping level + * names to level values, e.g. `{ info: 30, debug: 65 }`, or a + * CSV string in the format `level_name:level_value`, e.g. + * `info:30,debug:65`. + * + * @returns {object} An object mapping levels names to level values + * e.g. `{ info: 30, debug: 65 }`. + */ +function handleCustomLevelsNamesOpts (cLevels) { + if (!cLevels) return {} + + if (typeof cLevels === 'string') { + return cLevels + .split(',') + .reduce((agg, value, idx) => { + const [levelName, levelNum = idx] = value.split(':') + agg[levelName.toLowerCase()] = levelNum + return agg + }, {}) + } else if (Object.prototype.toString.call(cLevels) === '[object Object]') { + return Object + .keys(cLevels) + .reduce((agg, levelName) => { + agg[levelName.toLowerCase()] = cLevels[levelName] + return agg + }, {}) + } else { + return {} + } +} diff --git a/node_modules/pino-pretty/lib/utils/handle-custom-levels-names-opts.test.js b/node_modules/pino-pretty/lib/utils/handle-custom-levels-names-opts.test.js new file mode 100644 index 0000000..6ffbe4b --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/handle-custom-levels-names-opts.test.js @@ -0,0 +1,45 @@ +'use strict' + +const { test } = require('node:test') +const match = require('@jsumners/assert-match') +const handleCustomLevelsNamesOpts = require('./handle-custom-levels-names-opts') + +test('returns a empty object `{}` for undefined parameter', t => { + const handledCustomLevelNames = handleCustomLevelsNamesOpts() + t.assert.deepStrictEqual(handledCustomLevelNames, {}) +}) + +test('returns a empty object `{}` for unknown parameter', t => { + const handledCustomLevelNames = handleCustomLevelsNamesOpts(123) + t.assert.deepStrictEqual(handledCustomLevelNames, {}) +}) + +test('returns a filled object for string parameter', t => { + const handledCustomLevelNames = handleCustomLevelsNamesOpts('ok:10,warn:20,error:35') + match(handledCustomLevelNames, { + ok: 10, + warn: 20, + error: 35 + }, t) +}) + +test('returns a filled object for object parameter', t => { + const handledCustomLevelNames = handleCustomLevelsNamesOpts({ + ok: 10, + warn: 20, + error: 35 + }) + match(handledCustomLevelNames, { + ok: 10, + warn: 20, + error: 35 + }, t) +}) + +test('defaults missing level num to first index', t => { + const result = handleCustomLevelsNamesOpts('ok:10,info') + match(result, { + ok: 10, + info: 1 + }, t) +}) diff --git a/node_modules/pino-pretty/lib/utils/handle-custom-levels-opts.js b/node_modules/pino-pretty/lib/utils/handle-custom-levels-opts.js new file mode 100644 index 0000000..33931da --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/handle-custom-levels-opts.js @@ -0,0 +1,39 @@ +'use strict' + +module.exports = handleCustomLevelsOpts + +/** + * Parse a CSV string or options object that specifies + * configuration for custom levels. + * + * @param {string|object} cLevels An object mapping level + * names to values, e.g. `{ info: 30, debug: 65 }`, or a + * CSV string in the format `level_name:level_value`, e.g. + * `info:30,debug:65`. + * + * @returns {object} An object mapping levels to labels that + * appear in logs, e.g. `{ '30': 'INFO', '65': 'DEBUG' }`. + */ +function handleCustomLevelsOpts (cLevels) { + if (!cLevels) return {} + + if (typeof cLevels === 'string') { + return cLevels + .split(',') + .reduce((agg, value, idx) => { + const [levelName, levelNum = idx] = value.split(':') + agg[levelNum] = levelName.toUpperCase() + return agg + }, + { default: 'USERLVL' }) + } else if (Object.prototype.toString.call(cLevels) === '[object Object]') { + return Object + .keys(cLevels) + .reduce((agg, levelName) => { + agg[cLevels[levelName]] = levelName.toUpperCase() + return agg + }, { default: 'USERLVL' }) + } else { + return {} + } +} diff --git a/node_modules/pino-pretty/lib/utils/handle-custom-levels-opts.test.js b/node_modules/pino-pretty/lib/utils/handle-custom-levels-opts.test.js new file mode 100644 index 0000000..cf0066f --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/handle-custom-levels-opts.test.js @@ -0,0 +1,47 @@ +'use strict' + +const { test } = require('node:test') +const handleCustomLevelsOpts = require('./handle-custom-levels-opts') + +test('returns a empty object `{}` for undefined parameter', t => { + const handledCustomLevel = handleCustomLevelsOpts() + t.assert.deepStrictEqual(handledCustomLevel, {}) +}) + +test('returns a empty object `{}` for unknown parameter', t => { + const handledCustomLevel = handleCustomLevelsOpts(123) + t.assert.deepStrictEqual(handledCustomLevel, {}) +}) + +test('returns a filled object for string parameter', t => { + const handledCustomLevel = handleCustomLevelsOpts('ok:10,warn:20,error:35') + t.assert.deepStrictEqual(handledCustomLevel, { + 10: 'OK', + 20: 'WARN', + 35: 'ERROR', + default: 'USERLVL' + }) +}) + +test('returns a filled object for object parameter', t => { + const handledCustomLevel = handleCustomLevelsOpts({ + ok: 10, + warn: 20, + error: 35 + }) + t.assert.deepStrictEqual(handledCustomLevel, { + 10: 'OK', + 20: 'WARN', + 35: 'ERROR', + default: 'USERLVL' + }) +}) + +test('defaults missing level num to first index', t => { + const result = handleCustomLevelsOpts('ok:10,info') + t.assert.deepStrictEqual(result, { + 10: 'OK', + 1: 'INFO', + default: 'USERLVL' + }) +}) diff --git a/node_modules/pino-pretty/lib/utils/index.js b/node_modules/pino-pretty/lib/utils/index.js new file mode 100644 index 0000000..e083d73 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/index.js @@ -0,0 +1,107 @@ +'use strict' + +module.exports = { + buildSafeSonicBoom: require('./build-safe-sonic-boom.js'), + createDate: require('./create-date.js'), + deleteLogProperty: require('./delete-log-property.js'), + filterLog: require('./filter-log.js'), + formatTime: require('./format-time.js'), + getPropertyValue: require('./get-property-value.js'), + handleCustomLevelsNamesOpts: require('./handle-custom-levels-names-opts.js'), + handleCustomLevelsOpts: require('./handle-custom-levels-opts.js'), + interpretConditionals: require('./interpret-conditionals.js'), + isObject: require('./is-object.js'), + isValidDate: require('./is-valid-date.js'), + joinLinesWithIndentation: require('./join-lines-with-indentation.js'), + noop: require('./noop.js'), + parseFactoryOptions: require('./parse-factory-options.js'), + prettifyErrorLog: require('./prettify-error-log.js'), + prettifyError: require('./prettify-error.js'), + prettifyLevel: require('./prettify-level.js'), + prettifyMessage: require('./prettify-message.js'), + prettifyMetadata: require('./prettify-metadata.js'), + prettifyObject: require('./prettify-object.js'), + prettifyTime: require('./prettify-time.js'), + splitPropertyKey: require('./split-property-key.js'), + getLevelLabelData: require('./get-level-label-data') +} + +// The remainder of this file consists of jsdoc blocks that are difficult to +// determine a more appropriate "home" for. As an example, the blocks associated +// with custom prettifiers could live in either the `prettify-level`, +// `prettify-metadata`, or `prettify-time` files since they are the primary +// files where such code is used. But we want a central place to define common +// doc blocks, so we are picking this file as the answer. + +/** + * A hash of log property names mapped to prettifier functions. When the + * incoming log data is being processed for prettification, any key on the log + * that matches a key in a custom prettifiers hash will be prettified using + * that matching custom prettifier. The value passed to the custom prettifier + * will the value associated with the corresponding log key. + * + * The hash may contain any arbitrary keys for arbitrary log properties, but it + * may also contain a set of predefined key names that map to well-known log + * properties. These keys are: + * + * + `time` (for the timestamp field) + * + `level` (for the level label field; value may be a level number instead + * of a level label) + * + `hostname` + * + `pid` + * + `name` + * + `caller` + * + * @typedef {Object.} CustomPrettifiers + */ + +/** + * A synchronous function to be used for prettifying a log property. It must + * return a string. + * + * @typedef {function} CustomPrettifierFunc + * @param {any} value The value to be prettified for the key associated with + * the prettifier. + * @returns {string} + */ + +/** + * A tokenized string that indicates how the prettified log line should be + * formatted. Tokens are either log properties enclosed in curly braces, e.g. + * `{levelLabel}`, `{pid}`, or `{req.url}`, or conditional directives in curly + * braces. The only conditional directives supported are `if` and `end`, e.g. + * `{if pid}{pid}{end}`; every `if` must have a matching `end`. Nested + * conditions are not supported. + * + * @typedef {string} MessageFormatString + * + * @example + * `{levelLabel} - {if pid}{pid} - {end}url:{req.url}` + */ + +/** + * @typedef {object} PrettifyMessageExtras + * @property {object} colors Available color functions based on `useColor` (or `colorize`) context + * the options. + */ + +/** + * A function that accepts a log object, name of the message key, and name of + * the level label key and returns a formatted log line. + * + * Note: this function must be synchronous. + * + * @typedef {function} MessageFormatFunction + * @param {object} log The log object to be processed. + * @param {string} messageKey The name of the key in the `log` object that + * contains the log message. + * @param {string} levelLabel The name of the key in the `log` object that + * contains the log level name. + * @param {PrettifyMessageExtras} extras Additional data available for message context + * @returns {string} + * + * @example + * function (log, messageKey, levelLabel) { + * return `${log[levelLabel]} - ${log[messageKey]}` + * } + */ diff --git a/node_modules/pino-pretty/lib/utils/index.test.js b/node_modules/pino-pretty/lib/utils/index.test.js new file mode 100644 index 0000000..8db5d2a --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/index.test.js @@ -0,0 +1,37 @@ +'use strict' + +const { test } = require('node:test') +const index = require('./index.js') +const { readdirSync } = require('node:fs') +const { basename } = require('node:path') + +test( + 'index exports exactly all non-test files excluding itself', + t => { + // Read all files in the `util` directory + const files = readdirSync(__dirname) + + for (const file of files) { + const kebabName = basename(file, '.js') + const snakeName = kebabName.split('-').map((part, idx) => { + if (idx === 0) return part + return part[0].toUpperCase() + part.slice(1) + }).join('') + + if (file.endsWith('.test.js') === false && file !== 'index.js') { + // We expect all files to be exported except… + t.assert.ok(index[snakeName], `exports ${snakeName}`) + } else { + // …test files and the index file itself – those must not be exported + t.assert.ok(!index[snakeName], `does not export ${snakeName}`) + } + + // Remove the exported file from the index object + delete index[snakeName] + } + + // Now the index is expected to be empty, as nothing else should be + // exported from it + t.assert.deepStrictEqual(index, {}, 'does not export anything else') + } +) diff --git a/node_modules/pino-pretty/lib/utils/interpret-conditionals.js b/node_modules/pino-pretty/lib/utils/interpret-conditionals.js new file mode 100644 index 0000000..cc30de8 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/interpret-conditionals.js @@ -0,0 +1,37 @@ +'use strict' + +module.exports = interpretConditionals + +const getPropertyValue = require('./get-property-value') + +/** + * Translates all conditional blocks from within the messageFormat. Translates + * any matching {if key}{key}{end} statements and returns everything between + * if and else blocks if the key provided was found in log. + * + * @param {MessageFormatString|MessageFormatFunction} messageFormat A format + * string or function that defines how the logged message should be + * conditionally formatted. + * @param {object} log The log object to be modified. + * + * @returns {string} The parsed messageFormat. + */ +function interpretConditionals (messageFormat, log) { + messageFormat = messageFormat.replace(/{if (.*?)}(.*?){end}/g, replacer) + + // Remove non-terminated if blocks + messageFormat = messageFormat.replace(/{if (.*?)}/g, '') + // Remove floating end blocks + messageFormat = messageFormat.replace(/{end}/g, '') + + return messageFormat.replace(/\s+/g, ' ').trim() + + function replacer (_, key, value) { + const propertyValue = getPropertyValue(log, key) + if (propertyValue && value.includes(key)) { + return value.replace(new RegExp('{' + key + '}', 'g'), propertyValue) + } else { + return '' + } + } +} diff --git a/node_modules/pino-pretty/lib/utils/interpret-conditionals.test.js b/node_modules/pino-pretty/lib/utils/interpret-conditionals.test.js new file mode 100644 index 0000000..b88a4f3 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/interpret-conditionals.test.js @@ -0,0 +1,69 @@ +'use strict' + +const { test } = require('node:test') +const { createCopier } = require('fast-copy') +const fastCopy = createCopier({}) +const interpretConditionals = require('./interpret-conditionals') + +const logData = { + level: 30, + data1: { + data2: 'bar' + }, + msg: 'foo' +} + +test('interpretConditionals translates if / else statement to found property value', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level} - {if data1.data2}{data1.data2}{end}', log), '{level} - bar') +}) + +test('interpretConditionals translates if / else statement to found property value and leave unmatched property key untouched', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level} - {if data1.data2}{data1.data2} ({msg}){end}', log), '{level} - bar ({msg})') +}) + +test('interpretConditionals removes non-terminated if statements', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level} - {if data1.data2}{data1.data2}', log), '{level} - {data1.data2}') +}) + +test('interpretConditionals removes floating end statements', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level} - {data1.data2}{end}', log), '{level} - {data1.data2}') +}) + +test('interpretConditionals removes floating end statements within translated if / end statements', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level} - {if msg}({msg}){end}{end}', log), '{level} - (foo)') +}) + +test('interpretConditionals removes if / end blocks if existent condition key does not match existent property key', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level}{if msg}{data1.data2}{end}', log), '{level}') +}) + +test('interpretConditionals removes if / end blocks if non-existent condition key does not match existent property key', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level}{if foo}{msg}{end}', log), '{level}') +}) + +test('interpretConditionals removes if / end blocks if existent condition key does not match non-existent property key', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level}{if msg}{foo}{end}', log), '{level}') +}) + +test('interpretConditionals removes if / end blocks if non-existent condition key does not match non-existent property key', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level}{if foo}{bar}{end}', log), '{level}') +}) + +test('interpretConditionals removes if / end blocks if nested condition key does not match property key', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{level}{if data1.msg}{data1.data2}{end}', log), '{level}') +}) + +test('interpretConditionals removes nested if / end statement blocks', t => { + const log = fastCopy(logData) + t.assert.strictEqual(interpretConditionals('{if msg}{if data1.data2}{msg}{data1.data2}{end}{end}', log), 'foo{data1.data2}') +}) diff --git a/node_modules/pino-pretty/lib/utils/is-object.js b/node_modules/pino-pretty/lib/utils/is-object.js new file mode 100644 index 0000000..8dd822b --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/is-object.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = isObject + +function isObject (input) { + return Object.prototype.toString.apply(input) === '[object Object]' +} diff --git a/node_modules/pino-pretty/lib/utils/is-object.test.js b/node_modules/pino-pretty/lib/utils/is-object.test.js new file mode 100644 index 0000000..86ffb42 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/is-object.test.js @@ -0,0 +1,10 @@ +'use strict' + +const { test } = require('node:test') +const isObject = require('./is-object') + +test('returns correct answer', t => { + t.assert.strictEqual(isObject({}), true) + t.assert.strictEqual(isObject([]), false) + t.assert.strictEqual(isObject(42), false) +}) diff --git a/node_modules/pino-pretty/lib/utils/is-valid-date.js b/node_modules/pino-pretty/lib/utils/is-valid-date.js new file mode 100644 index 0000000..1fc821b --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/is-valid-date.js @@ -0,0 +1,14 @@ +'use strict' + +module.exports = isValidDate + +/** + * Checks if the argument is a JS Date and not 'Invalid Date'. + * + * @param {Date} date The date to check. + * + * @returns {boolean} true if the argument is a JS Date and not 'Invalid Date'. + */ +function isValidDate (date) { + return date instanceof Date && !Number.isNaN(date.getTime()) +} diff --git a/node_modules/pino-pretty/lib/utils/is-valid-date.test.js b/node_modules/pino-pretty/lib/utils/is-valid-date.test.js new file mode 100644 index 0000000..1ed1a6f --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/is-valid-date.test.js @@ -0,0 +1,16 @@ +'use strict' + +process.env.TZ = 'UTC' + +const { test } = require('node:test') +const isValidDate = require('./is-valid-date') + +test('returns true for valid dates', t => { + t.assert.strictEqual(isValidDate(new Date()), true) +}) + +test('returns false for non-dates and invalid dates', t => { + t.plan(2) + t.assert.strictEqual(isValidDate('20210621'), false) + t.assert.strictEqual(isValidDate(new Date('2021-41-99')), false) +}) diff --git a/node_modules/pino-pretty/lib/utils/join-lines-with-indentation.js b/node_modules/pino-pretty/lib/utils/join-lines-with-indentation.js new file mode 100644 index 0000000..7761ae6 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/join-lines-with-indentation.js @@ -0,0 +1,29 @@ +'use strict' + +module.exports = joinLinesWithIndentation + +/** + * @typedef {object} JoinLinesWithIndentationParams + * @property {string} input The string to split and reformat. + * @property {string} [ident] The indentation string. Default: ` ` (4 spaces). + * @property {string} [eol] The end of line sequence to use when rejoining + * the lines. Default: `'\n'`. + */ + +/** + * Given a string with line separators, either `\r\n` or `\n`, add indentation + * to all lines subsequent to the first line and rejoin the lines using an + * end of line sequence. + * + * @param {JoinLinesWithIndentationParams} input + * + * @returns {string} A string with lines subsequent to the first indented + * with the given indentation sequence. + */ +function joinLinesWithIndentation ({ input, ident = ' ', eol = '\n' }) { + const lines = input.split(/\r?\n/) + for (let i = 1; i < lines.length; i += 1) { + lines[i] = ident + lines[i] + } + return lines.join(eol) +} diff --git a/node_modules/pino-pretty/lib/utils/join-lines-with-indentation.test.js b/node_modules/pino-pretty/lib/utils/join-lines-with-indentation.test.js new file mode 100644 index 0000000..dbfc02b --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/join-lines-with-indentation.test.js @@ -0,0 +1,16 @@ +'use strict' + +const { test } = require('node:test') +const joinLinesWithIndentation = require('./join-lines-with-indentation') + +test('joinLinesWithIndentation adds indentation to beginning of subsequent lines', t => { + const input = 'foo\nbar\nbaz' + const result = joinLinesWithIndentation({ input }) + t.assert.strictEqual(result, 'foo\n bar\n baz') +}) + +test('joinLinesWithIndentation accepts custom indentation, line breaks, and eol', t => { + const input = 'foo\nbar\r\nbaz' + const result = joinLinesWithIndentation({ input, ident: ' ', eol: '^' }) + t.assert.strictEqual(result, 'foo^ bar^ baz') +}) diff --git a/node_modules/pino-pretty/lib/utils/noop.js b/node_modules/pino-pretty/lib/utils/noop.js new file mode 100644 index 0000000..a77986c --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/noop.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = function noop () {} diff --git a/node_modules/pino-pretty/lib/utils/noop.test.js b/node_modules/pino-pretty/lib/utils/noop.test.js new file mode 100644 index 0000000..bbc1f88 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/noop.test.js @@ -0,0 +1,12 @@ +'use strict' + +const { test } = require('node:test') +const noop = require('./noop') + +test('is a function', t => { + t.assert.strictEqual(typeof noop, 'function') +}) + +test('does nothing', t => { + t.assert.strictEqual(noop('stuff'), undefined) +}) diff --git a/node_modules/pino-pretty/lib/utils/parse-factory-options.js b/node_modules/pino-pretty/lib/utils/parse-factory-options.js new file mode 100644 index 0000000..3b051f0 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/parse-factory-options.js @@ -0,0 +1,173 @@ +'use strict' + +module.exports = parseFactoryOptions + +const { + LEVEL_NAMES +} = require('../constants') +const colors = require('../colors') +const handleCustomLevelsOpts = require('./handle-custom-levels-opts') +const handleCustomLevelsNamesOpts = require('./handle-custom-levels-names-opts') +const handleLevelLabelData = require('./get-level-label-data') + +/** + * A `PrettyContext` is an object to be used by the various functions that + * process log data. It is derived from the provided {@link PinoPrettyOptions}. + * It may be used as a `this` context. + * + * @typedef {object} PrettyContext + * @property {string} EOL The escape sequence chosen as the line terminator. + * @property {string} IDENT The string to use as the indentation sequence. + * @property {ColorizerFunc} colorizer A configured colorizer function. + * @property {Array[Array]} customColors A set of custom color + * names associated with level numbers. + * @property {object} customLevelNames A hash of level numbers to level names, + * e.g. `{ 30: "info" }`. + * @property {object} customLevels A hash of level names to level numbers, + * e.g. `{ info: 30 }`. + * @property {CustomPrettifiers} customPrettifiers A hash of custom prettifier + * functions. + * @property {object} customProperties Comprised of `customLevels` and + * `customLevelNames` if such options are provided. + * @property {string[]} errorLikeObjectKeys The key names in the log data that + * should be considered as holding error objects. + * @property {string[]} errorProps A list of error object keys that should be + * included in the output. + * @property {function} getLevelLabelData Pass a numeric level to return [levelLabelString,levelNum] + * @property {boolean} hideObject Indicates the prettifier should omit objects + * in the output. + * @property {string[]} ignoreKeys Set of log data keys to omit. + * @property {string[]} includeKeys Opposite of `ignoreKeys`. + * @property {boolean} levelFirst Indicates the level should be printed first. + * @property {string} levelKey Name of the key in the log data that contains + * the message. + * @property {string} levelLabel Format token to represent the position of the + * level name in the output string. + * @property {MessageFormatString|MessageFormatFunction} messageFormat + * @property {string} messageKey Name of the key in the log data that contains + * the message. + * @property {string|number} minimumLevel The minimum log level to process + * and output. + * @property {ColorizerFunc} objectColorizer + * @property {boolean} singleLine Indicates objects should be printed on a + * single output line. + * @property {string} timestampKey The name of the key in the log data that + * contains the log timestamp. + * @property {boolean} translateTime Indicates if timestamps should be + * translated to a human-readable string. + * @property {boolean} useOnlyCustomProps + */ + +/** + * @param {PinoPrettyOptions} options The user supplied object of options. + * + * @returns {PrettyContext} + */ +function parseFactoryOptions (options) { + const EOL = options.crlf ? '\r\n' : '\n' + const IDENT = ' ' + const { + customPrettifiers, + errorLikeObjectKeys, + hideObject, + levelFirst, + levelKey, + levelLabel, + messageFormat, + messageKey, + minimumLevel, + singleLine, + timestampKey, + translateTime + } = options + const errorProps = options.errorProps.split(',') + const useOnlyCustomProps = typeof options.useOnlyCustomProps === 'boolean' + ? options.useOnlyCustomProps + : (options.useOnlyCustomProps === 'true') + const customLevels = handleCustomLevelsOpts(options.customLevels) + const customLevelNames = handleCustomLevelsNamesOpts(options.customLevels) + const getLevelLabelData = handleLevelLabelData(useOnlyCustomProps, customLevels, customLevelNames) + + let customColors + if (options.customColors) { + if (typeof options.customColors === 'string') { + customColors = options.customColors.split(',').reduce((agg, value) => { + const [level, color] = value.split(':') + const condition = useOnlyCustomProps + ? options.customLevels + : customLevelNames[level] !== undefined + const levelNum = condition + ? customLevelNames[level] + : LEVEL_NAMES[level] + const colorIdx = levelNum !== undefined + ? levelNum + : level + agg.push([colorIdx, color]) + return agg + }, []) + } else if (typeof options.customColors === 'object') { + customColors = Object.keys(options.customColors).reduce((agg, value) => { + const [level, color] = [value, options.customColors[value]] + const condition = useOnlyCustomProps + ? options.customLevels + : customLevelNames[level] !== undefined + const levelNum = condition + ? customLevelNames[level] + : LEVEL_NAMES[level] + const colorIdx = levelNum !== undefined + ? levelNum + : level + agg.push([colorIdx, color]) + return agg + }, []) + } else { + throw new Error('options.customColors must be of type string or object.') + } + } + + const customProperties = { customLevels, customLevelNames } + if (useOnlyCustomProps === true && !options.customLevels) { + customProperties.customLevels = undefined + customProperties.customLevelNames = undefined + } + + const includeKeys = options.include !== undefined + ? new Set(options.include.split(',')) + : undefined + const ignoreKeys = (!includeKeys && options.ignore) + ? new Set(options.ignore.split(',')) + : undefined + + const colorizer = colors(options.colorize, customColors, useOnlyCustomProps) + const objectColorizer = options.colorizeObjects + ? colorizer + : colors(false, [], false) + + return { + EOL, + IDENT, + colorizer, + customColors, + customLevelNames, + customLevels, + customPrettifiers, + customProperties, + errorLikeObjectKeys, + errorProps, + getLevelLabelData, + hideObject, + ignoreKeys, + includeKeys, + levelFirst, + levelKey, + levelLabel, + messageFormat, + messageKey, + minimumLevel, + objectColorizer, + singleLine, + timestampKey, + translateTime, + useOnlyCustomProps + } +} diff --git a/node_modules/pino-pretty/lib/utils/prettify-error-log.js b/node_modules/pino-pretty/lib/utils/prettify-error-log.js new file mode 100644 index 0000000..2410c65 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-error-log.js @@ -0,0 +1,73 @@ +'use strict' + +module.exports = prettifyErrorLog + +const { + LOGGER_KEYS +} = require('../constants') + +const isObject = require('./is-object') +const joinLinesWithIndentation = require('./join-lines-with-indentation') +const prettifyObject = require('./prettify-object') + +/** + * @typedef {object} PrettifyErrorLogParams + * @property {object} log The error log to prettify. + * @property {PrettyContext} context The context object built from parsing + * the options. + */ + +/** + * Given a log object that has a `type: 'Error'` key, prettify the object and + * return the result. In other + * + * @param {PrettifyErrorLogParams} input + * + * @returns {string} A string that represents the prettified error log. + */ +function prettifyErrorLog ({ log, context }) { + const { + EOL: eol, + IDENT: ident, + errorProps: errorProperties, + messageKey + } = context + const stack = log.stack + const joinedLines = joinLinesWithIndentation({ input: stack, ident, eol }) + let result = `${ident}${joinedLines}${eol}` + + if (errorProperties.length > 0) { + const excludeProperties = LOGGER_KEYS.concat(messageKey, 'type', 'stack') + let propertiesToPrint + if (errorProperties[0] === '*') { + // Print all sibling properties except for the standard exclusions. + propertiesToPrint = Object.keys(log).filter(k => excludeProperties.includes(k) === false) + } else { + // Print only specified properties unless the property is a standard exclusion. + propertiesToPrint = errorProperties.filter(k => excludeProperties.includes(k) === false) + } + + for (let i = 0; i < propertiesToPrint.length; i += 1) { + const key = propertiesToPrint[i] + if (key in log === false) continue + if (isObject(log[key])) { + // The nested object may have "logger" type keys but since they are not + // at the root level of the object being processed, we want to print them. + // Thus, we invoke with `excludeLoggerKeys: false`. + const prettifiedObject = prettifyObject({ + log: log[key], + excludeLoggerKeys: false, + context: { + ...context, + IDENT: ident + ident + } + }) + result = `${result}${ident}${key}: {${eol}${prettifiedObject}${ident}}${eol}` + continue + } + result = `${result}${ident}${key}: ${log[key]}${eol}` + } + } + + return result +} diff --git a/node_modules/pino-pretty/lib/utils/prettify-error-log.test.js b/node_modules/pino-pretty/lib/utils/prettify-error-log.test.js new file mode 100644 index 0000000..bdc3d5f --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-error-log.test.js @@ -0,0 +1,110 @@ +'use strict' + +const { test, describe } = require('node:test') +const prettifyErrorLog = require('./prettify-error-log') +const colors = require('../colors') +const { + ERROR_LIKE_KEYS, + MESSAGE_KEY +} = require('../constants') + +const context = { + EOL: '\n', + IDENT: ' ', + customPrettifiers: {}, + errorLikeObjectKeys: ERROR_LIKE_KEYS, + errorProps: [], + messageKey: MESSAGE_KEY, + objectColorizer: colors() +} + +test('returns string with default settings', t => { + const err = Error('Something went wrong') + const str = prettifyErrorLog({ log: err, context }) + t.assert.ok(str.startsWith(' Error: Something went wrong')) +}) + +test('returns string with custom ident', t => { + const err = Error('Something went wrong') + const str = prettifyErrorLog({ + log: err, + context: { + ...context, + IDENT: ' ' + } + }) + t.assert.ok(str.startsWith(' Error: Something went wrong')) +}) + +test('returns string with custom eol', t => { + const err = Error('Something went wrong') + const str = prettifyErrorLog({ + log: err, + context: { + ...context, + EOL: '\r\n' + } + }) + t.assert.ok(str.startsWith(' Error: Something went wrong\r\n')) +}) + +describe('errorProperties', () => { + test('excludes all for wildcard', t => { + const err = Error('boom') + err.foo = 'foo' + const str = prettifyErrorLog({ + log: err, + context: { + ...context, + errorProps: ['*'] + } + }) + t.assert.ok(str.startsWith(' Error: boom')) + t.assert.strictEqual(str.includes('foo: "foo"'), false) + }) + + test('excludes only selected properties', t => { + const err = Error('boom') + err.foo = 'foo' + const str = prettifyErrorLog({ + log: err, + context: { + ...context, + errorProps: ['foo'] + } + }) + t.assert.ok(str.startsWith(' Error: boom')) + t.assert.strictEqual(str.includes('foo: foo'), true) + }) + + test('ignores specified properties if not present', t => { + const err = Error('boom') + err.foo = 'foo' + const str = prettifyErrorLog({ + log: err, + context: { + ...context, + errorProps: ['foo', 'bar'] + } + }) + t.assert.ok(str.startsWith(' Error: boom')) + t.assert.strictEqual(str.includes('foo: foo'), true) + t.assert.strictEqual(str.includes('bar'), false) + }) + + test('processes nested objects', t => { + const err = Error('boom') + err.foo = { bar: 'bar', message: 'included' } + const str = prettifyErrorLog({ + log: err, + context: { + ...context, + errorProps: ['foo'] + } + }) + t.assert.ok(str.startsWith(' Error: boom')) + t.assert.strictEqual(str.includes('foo: {'), true) + t.assert.strictEqual(str.includes('bar: "bar"'), true) + t.assert.strictEqual(str.includes('message: "included"'), true) + }) +}) diff --git a/node_modules/pino-pretty/lib/utils/prettify-error.js b/node_modules/pino-pretty/lib/utils/prettify-error.js new file mode 100644 index 0000000..ac99826 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-error.js @@ -0,0 +1,49 @@ +'use strict' + +module.exports = prettifyError + +const joinLinesWithIndentation = require('./join-lines-with-indentation') + +/** + * @typedef {object} PrettifyErrorParams + * @property {string} keyName The key assigned to this error in the log object. + * @property {string} lines The STRINGIFIED error. If the error field has a + * custom prettifier, that should be pre-applied as well. + * @property {string} ident The indentation sequence to use. + * @property {string} eol The EOL sequence to use. + */ + +/** + * Prettifies an error string into a multi-line format. + * + * @param {PrettifyErrorParams} input + * + * @returns {string} + */ +function prettifyError ({ keyName, lines, eol, ident }) { + let result = '' + const joinedLines = joinLinesWithIndentation({ input: lines, ident, eol }) + const splitLines = `${ident}${keyName}: ${joinedLines}${eol}`.split(eol) + + for (let j = 0; j < splitLines.length; j += 1) { + if (j !== 0) result += eol + + const line = splitLines[j] + if (/^\s*"stack"/.test(line)) { + const matches = /^(\s*"stack":)\s*(".*"),?$/.exec(line) + /* istanbul ignore else */ + if (matches && matches.length === 3) { + const indentSize = /^\s*/.exec(line)[0].length + 4 + const indentation = ' '.repeat(indentSize) + const stackMessage = matches[2] + result += matches[1] + eol + indentation + JSON.parse(stackMessage).replace(/\n/g, eol + indentation) + } else { + result += line + } + } else { + result += line + } + } + + return result +} diff --git a/node_modules/pino-pretty/lib/utils/prettify-error.test.js b/node_modules/pino-pretty/lib/utils/prettify-error.test.js new file mode 100644 index 0000000..23cc4de --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-error.test.js @@ -0,0 +1,13 @@ +'use strict' + +const { test } = require('node:test') +const stringifySafe = require('fast-safe-stringify') +const prettifyError = require('./prettify-error') + +test('prettifies error', t => { + const error = Error('Bad error!') + const lines = stringifySafe(error, Object.getOwnPropertyNames(error), 2) + + const prettyError = prettifyError({ keyName: 'errorKey', lines, ident: ' ', eol: '\n' }) + t.assert.match(prettyError, /\s*errorKey: {\n\s*"stack":[\s\S]*"message": "Bad error!"/) +}) diff --git a/node_modules/pino-pretty/lib/utils/prettify-level.js b/node_modules/pino-pretty/lib/utils/prettify-level.js new file mode 100644 index 0000000..2e4dce8 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-level.js @@ -0,0 +1,41 @@ +'use strict' + +module.exports = prettifyLevel + +const getPropertyValue = require('./get-property-value') + +/** + * @typedef {object} PrettifyLevelParams + * @property {object} log The log object. + * @property {PrettyContext} context The context object built from parsing + * the options. + */ + +/** + * Checks if the passed in log has a `level` value and returns a prettified + * string for that level if so. + * + * @param {PrettifyLevelParams} input + * + * @returns {undefined|string} If `log` does not have a `level` property then + * `undefined` will be returned. Otherwise, a string from the specified + * `colorizer` is returned. + */ +function prettifyLevel ({ log, context }) { + const { + colorizer, + customLevels, + customLevelNames, + levelKey, + getLevelLabelData + } = context + const prettifier = context.customPrettifiers?.level + const output = getPropertyValue(log, levelKey) + if (output === undefined) return undefined + const labelColorized = colorizer(output, { customLevels, customLevelNames }) + if (prettifier) { + const [label] = getLevelLabelData(output) + return prettifier(output, levelKey, log, { label, labelColorized, colors: colorizer.colors }) + } + return labelColorized +} diff --git a/node_modules/pino-pretty/lib/utils/prettify-level.test.js b/node_modules/pino-pretty/lib/utils/prettify-level.test.js new file mode 100644 index 0000000..02e1e26 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-level.test.js @@ -0,0 +1,70 @@ +'use strict' + +const { test } = require('node:test') +const prettifyLevel = require('./prettify-level') +const getColorizer = require('../colors') +const getLevelLabelData = require('./get-level-label-data') +const { + LEVEL_KEY +} = require('../constants') + +const context = { + colorizer: getColorizer(), + customLevelNames: undefined, + customLevels: undefined, + levelKey: LEVEL_KEY, + customPrettifiers: undefined, + getLevelLabelData: getLevelLabelData(false, {}, {}) +} + +test('returns `undefined` for unknown level', t => { + const colorized = prettifyLevel({ + log: {}, + context: { + ...context + } + }) + t.assert.strictEqual(colorized, undefined) +}) + +test('returns non-colorized value for default colorizer', t => { + const log = { + level: 30 + } + const colorized = prettifyLevel({ + log, + context: { + ...context + } + }) + t.assert.strictEqual(colorized, 'INFO') +}) + +test('returns colorized value for color colorizer', t => { + const log = { + level: 30 + } + const colorizer = getColorizer(true) + const colorized = prettifyLevel({ + log, + context: { + ...context, + colorizer + } + }) + t.assert.strictEqual(colorized, '\u001B[32mINFO\u001B[39m') +}) + +test('passes output through provided prettifier', t => { + const log = { + level: 30 + } + const colorized = prettifyLevel({ + log, + context: { + ...context, + customPrettifiers: { level () { return 'modified' } } + } + }) + t.assert.strictEqual(colorized, 'modified') +}) diff --git a/node_modules/pino-pretty/lib/utils/prettify-message.js b/node_modules/pino-pretty/lib/utils/prettify-message.js new file mode 100644 index 0000000..623b703 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-message.js @@ -0,0 +1,64 @@ +'use strict' + +module.exports = prettifyMessage + +const { + LEVELS +} = require('../constants') + +const getPropertyValue = require('./get-property-value') +const interpretConditionals = require('./interpret-conditionals') + +/** + * @typedef {object} PrettifyMessageParams + * @property {object} log The log object with the message to colorize. + * @property {PrettyContext} context The context object built from parsing + * the options. + */ + +/** + * Prettifies a message string if the given `log` has a message property. + * + * @param {PrettifyMessageParams} input + * + * @returns {undefined|string} If the message key is not found, or the message + * key is not a string, then `undefined` will be returned. Otherwise, a string + * that is the prettified message. + */ +function prettifyMessage ({ log, context }) { + const { + colorizer, + customLevels, + levelKey, + levelLabel, + messageFormat, + messageKey, + useOnlyCustomProps + } = context + if (messageFormat && typeof messageFormat === 'string') { + const parsedMessageFormat = interpretConditionals(messageFormat, log) + + const message = String(parsedMessageFormat).replace( + /{([^{}]+)}/g, + function (match, p1) { + // return log level as string instead of int + let level + if (p1 === levelLabel && (level = getPropertyValue(log, levelKey)) !== undefined) { + const condition = useOnlyCustomProps ? customLevels === undefined : customLevels[level] === undefined + return condition ? LEVELS[level] : customLevels[level] + } + + // Parse nested key access, e.g. `{keyA.subKeyB}`. + const value = getPropertyValue(log, p1) + return value !== undefined ? value : '' + }) + return colorizer.message(message) + } + if (messageFormat && typeof messageFormat === 'function') { + const msg = messageFormat(log, messageKey, levelLabel, { colors: colorizer.colors }) + return colorizer.message(msg) + } + if (messageKey in log === false) return undefined + if (typeof log[messageKey] !== 'string' && typeof log[messageKey] !== 'number' && typeof log[messageKey] !== 'boolean') return undefined + return colorizer.message(log[messageKey]) +} diff --git a/node_modules/pino-pretty/lib/utils/prettify-message.test.js b/node_modules/pino-pretty/lib/utils/prettify-message.test.js new file mode 100644 index 0000000..5c0100f --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-message.test.js @@ -0,0 +1,270 @@ +'use strict' + +const { test } = require('node:test') +const prettifyMessage = require('./prettify-message') +const getColorizer = require('../colors') +const { + LEVEL_KEY, + LEVEL_LABEL +} = require('../constants') +const context = { + colorizer: getColorizer(), + levelKey: LEVEL_KEY, + levelLabel: LEVEL_LABEL, + messageKey: 'msg' +} + +test('returns `undefined` if `messageKey` not found', t => { + const str = prettifyMessage({ log: {}, context }) + t.assert.strictEqual(str, undefined) +}) + +test('returns `undefined` if `messageKey` not string', t => { + const str = prettifyMessage({ log: { msg: {} }, context }) + t.assert.strictEqual(str, undefined) +}) + +test('returns non-colorized value for default colorizer', t => { + const colorizer = getColorizer() + const str = prettifyMessage({ + log: { msg: 'foo' }, + context: { ...context, colorizer } + }) + t.assert.strictEqual(str, 'foo') +}) + +test('returns non-colorized value for alternate `messageKey`', t => { + const str = prettifyMessage({ + log: { message: 'foo' }, + context: { ...context, messageKey: 'message' } + }) + t.assert.strictEqual(str, 'foo') +}) + +test('returns colorized value for color colorizer', t => { + const colorizer = getColorizer(true) + const str = prettifyMessage({ + log: { msg: 'foo' }, + context: { ...context, colorizer } + }) + t.assert.strictEqual(str, '\u001B[36mfoo\u001B[39m') +}) + +test('returns colorized value for color colorizer for alternate `messageKey`', t => { + const colorizer = getColorizer(true) + const str = prettifyMessage({ + log: { message: 'foo' }, + context: { ...context, messageKey: 'message', colorizer } + }) + t.assert.strictEqual(str, '\u001B[36mfoo\u001B[39m') +}) + +test('returns message formatted by `messageFormat` option', t => { + const str = prettifyMessage({ + log: { msg: 'foo', context: 'appModule' }, + context: { ...context, messageFormat: '{context} - {msg}' } + }) + t.assert.strictEqual(str, 'appModule - foo') +}) + +test('returns message formatted by `messageFormat` option - missing prop', t => { + const str = prettifyMessage({ + log: { context: 'appModule' }, + context: { ...context, messageFormat: '{context} - {msg}' } + }) + t.assert.strictEqual(str, 'appModule - ') +}) + +test('returns message formatted by `messageFormat` option - levelLabel & useOnlyCustomProps false', t => { + const str = prettifyMessage({ + log: { msg: 'foo', context: 'appModule', level: 30 }, + context: { + ...context, + messageFormat: '[{level}] {levelLabel} {context} - {msg}', + customLevels: {} + } + }) + t.assert.strictEqual(str, '[30] INFO appModule - foo') +}) + +test('returns message formatted by `messageFormat` option - levelLabel & useOnlyCustomProps true', t => { + const str = prettifyMessage({ + log: { msg: 'foo', context: 'appModule', level: 30 }, + context: { + ...context, + messageFormat: '[{level}] {levelLabel} {context} - {msg}', + customLevels: { 30: 'CHECK' }, + useOnlyCustomProps: true + } + }) + t.assert.strictEqual(str, '[30] CHECK appModule - foo') +}) + +test('returns message formatted by `messageFormat` option - levelLabel & customLevels', t => { + const str = prettifyMessage({ + log: { msg: 'foo', context: 'appModule', level: 123 }, + context: { + ...context, + messageFormat: '[{level}] {levelLabel} {context} - {msg}', + customLevels: { 123: 'CUSTOM' } + } + }) + t.assert.strictEqual(str, '[123] CUSTOM appModule - foo') +}) + +test('returns message formatted by `messageFormat` option - levelLabel, customLevels & useOnlyCustomProps', t => { + const str = prettifyMessage({ + log: { msg: 'foo', context: 'appModule', level: 123 }, + context: { + ...context, + messageFormat: '[{level}] {levelLabel} {context} - {msg}', + customLevels: { 123: 'CUSTOM' }, + useOnlyCustomProps: true + } + }) + t.assert.strictEqual(str, '[123] CUSTOM appModule - foo') +}) + +test('returns message formatted by `messageFormat` option - levelLabel, customLevels & useOnlyCustomProps false', t => { + const str = prettifyMessage({ + log: { msg: 'foo', context: 'appModule', level: 40 }, + context: { + ...context, + messageFormat: '[{level}] {levelLabel} {context} - {msg}', + customLevels: { 123: 'CUSTOM' }, + useOnlyCustomProps: false + } + }) + t.assert.strictEqual(str, '[40] WARN appModule - foo') +}) + +test('returns message formatted by `messageFormat` option - value 0', t => { + const str = prettifyMessage({ + log: { value: 0 }, + context: { ...context, messageFormat: '{value}' }, + }) + t.assert.strictEqual(str, '0') +}) + +test('returns message formatted by `messageFormat` option - value false', t => { + const str = prettifyMessage({ + log: { value: false }, + context: { ...context, messageFormat: '{value}' }, + }) + t.assert.strictEqual(str, 'false') +}) + +test('returns message formatted by `messageFormat` option - value undefined', t => { + const str = prettifyMessage({ + log: { value: undefined }, + context: { ...context, messageFormat: '{value}' }, + }) + t.assert.strictEqual(str, '') +}) + +test('returns message formatted by `messageFormat` option - value null', t => { + const str = prettifyMessage({ + log: { value: null }, + context: { ...context, messageFormat: '{value}' }, + }) + t.assert.strictEqual(str, 'null') +}) + +test('`messageFormat` supports nested curly brackets', t => { + const str = prettifyMessage({ + log: { level: 30 }, + context: { + ...context, + messageFormat: '{{level}}-{level}-{{level}-{level}}' + } + }) + t.assert.strictEqual(str, '{30}-30-{30-30}') +}) + +test('`messageFormat` supports nested object', t => { + const str = prettifyMessage({ + log: { level: 30, request: { url: 'localhost/test' }, msg: 'foo' }, + context: { + ...context, + messageFormat: '{request.url} - param: {request.params.process} - {msg}' + } + }) + t.assert.strictEqual(str, 'localhost/test - param: - foo') +}) + +test('`messageFormat` supports conditional blocks', t => { + const str = prettifyMessage({ + log: { level: 30, req: { id: 'foo' } }, + context: { + ...context, + messageFormat: '{level} | {if req.id}({req.id}){end}{if msg}{msg}{end}' + } + }) + t.assert.strictEqual(str, '30 | (foo)') +}) + +test('`messageFormat` supports function definition', t => { + const str = prettifyMessage({ + log: { level: 30, request: { url: 'localhost/test' }, msg: 'incoming request' }, + context: { + ...context, + messageFormat: (log, messageKey, levelLabel) => { + let msg = log[messageKey] + if (msg === 'incoming request') msg = `--> ${log.request.url}` + return msg + } + } + }) + t.assert.strictEqual(str, '--> localhost/test') +}) + +test('`messageFormat` supports function definition with colorizer object', t => { + const colorizer = getColorizer(true) + const str = prettifyMessage({ + log: { level: 30, request: { url: 'localhost/test' }, msg: 'incoming request' }, + context: { + ...context, + colorizer, + messageFormat: (log, messageKey, levelLabel, { colors }) => { + let msg = log[messageKey] + if (msg === 'incoming request') msg = `--> ${colors.red(log.request.url)}` + return msg + } + } + }) + t.assert.strictEqual(str, '\u001B[36m--> \u001B[31mlocalhost/test\u001B[36m\u001B[39m') +}) + +test('`messageFormat` supports function definition with colorizer object when using custom colors', t => { + const colorizer = getColorizer(true, [[30, 'brightGreen']], false) + const str = prettifyMessage({ + log: { level: 30, request: { url: 'localhost/test' }, msg: 'incoming request' }, + context: { + ...context, + colorizer, + messageFormat: (log, messageKey, levelLabel, { colors }) => { + let msg = log[messageKey] + if (msg === 'incoming request') msg = `--> ${colors.red(log.request.url)}` + return msg + } + } + }) + t.assert.strictEqual(str, '\u001B[36m--> \u001B[31mlocalhost/test\u001B[36m\u001B[39m') +}) + +test('`messageFormat` supports function definition with colorizer object when no color is supported', t => { + const colorizer = getColorizer(false) + const str = prettifyMessage({ + log: { level: 30, request: { url: 'localhost/test' }, msg: 'incoming request' }, + context: { + ...context, + colorizer, + messageFormat: (log, messageKey, levelLabel, { colors }) => { + let msg = log[messageKey] + if (msg === 'incoming request') msg = `--> ${colors.red(log.request.url)}` + return msg + } + } + }) + t.assert.strictEqual(str, '--> localhost/test') +}) diff --git a/node_modules/pino-pretty/lib/utils/prettify-metadata.js b/node_modules/pino-pretty/lib/utils/prettify-metadata.js new file mode 100644 index 0000000..72483b1 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-metadata.js @@ -0,0 +1,73 @@ +'use strict' + +module.exports = prettifyMetadata + +/** + * @typedef {object} PrettifyMetadataParams + * @property {object} log The log that may or may not contain metadata to + * be prettified. + * @property {PrettyContext} context The context object built from parsing + * the options. + */ + +/** + * Prettifies metadata that is usually present in a Pino log line. It looks for + * fields `name`, `pid`, `hostname`, and `caller` and returns a formatted string using + * the fields it finds. + * + * @param {PrettifyMetadataParams} input + * + * @returns {undefined|string} If no metadata is found then `undefined` is + * returned. Otherwise, a string of prettified metadata is returned. + */ +function prettifyMetadata ({ log, context }) { + const { customPrettifiers: prettifiers, colorizer } = context + let line = '' + + if (log.name || log.pid || log.hostname) { + line += '(' + + if (log.name) { + line += prettifiers.name + ? prettifiers.name(log.name, 'name', log, { colors: colorizer.colors }) + : log.name + } + + if (log.pid) { + const prettyPid = prettifiers.pid + ? prettifiers.pid(log.pid, 'pid', log, { colors: colorizer.colors }) + : log.pid + if (log.name && log.pid) { + line += '/' + prettyPid + } else { + line += prettyPid + } + } + + if (log.hostname) { + // If `pid` and `name` were in the ignore keys list then we don't need + // the leading space. + const prettyHostname = prettifiers.hostname + ? prettifiers.hostname(log.hostname, 'hostname', log, { colors: colorizer.colors }) + : log.hostname + + line += `${line === '(' ? 'on' : ' on'} ${prettyHostname}` + } + + line += ')' + } + + if (log.caller) { + const prettyCaller = prettifiers.caller + ? prettifiers.caller(log.caller, 'caller', log, { colors: colorizer.colors }) + : log.caller + + line += `${line === '' ? '' : ' '}<${prettyCaller}>` + } + + if (line === '') { + return undefined + } else { + return line + } +} diff --git a/node_modules/pino-pretty/lib/utils/prettify-metadata.test.js b/node_modules/pino-pretty/lib/utils/prettify-metadata.test.js new file mode 100644 index 0000000..c1c5023 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-metadata.test.js @@ -0,0 +1,149 @@ +'use strict' + +const { test } = require('node:test') +const prettifyMetadata = require('./prettify-metadata') +const getColorizer = require('../colors') +const context = { + customPrettifiers: {}, + colorizer: { + colors: {} + } +} + +test('returns `undefined` if no metadata present', t => { + const str = prettifyMetadata({ log: {}, context }) + t.assert.strictEqual(str, undefined) +}) + +test('works with only `name` present', t => { + const str = prettifyMetadata({ log: { name: 'foo' }, context }) + t.assert.strictEqual(str, '(foo)') +}) + +test('works with only `pid` present', t => { + const str = prettifyMetadata({ log: { pid: '1234' }, context }) + t.assert.strictEqual(str, '(1234)') +}) + +test('works with only `hostname` present', t => { + const str = prettifyMetadata({ log: { hostname: 'bar' }, context }) + t.assert.strictEqual(str, '(on bar)') +}) + +test('works with only `name` & `pid` present', t => { + const str = prettifyMetadata({ log: { name: 'foo', pid: '1234' }, context }) + t.assert.strictEqual(str, '(foo/1234)') +}) + +test('works with only `name` & `hostname` present', t => { + const str = prettifyMetadata({ log: { name: 'foo', hostname: 'bar' }, context }) + t.assert.strictEqual(str, '(foo on bar)') +}) + +test('works with only `pid` & `hostname` present', t => { + const str = prettifyMetadata({ log: { pid: '1234', hostname: 'bar' }, context }) + t.assert.strictEqual(str, '(1234 on bar)') +}) + +test('works with only `name`, `pid`, & `hostname` present', t => { + const str = prettifyMetadata({ log: { name: 'foo', pid: '1234', hostname: 'bar' }, context }) + t.assert.strictEqual(str, '(foo/1234 on bar)') +}) + +test('works with only `name` & `caller` present', t => { + const str = prettifyMetadata({ log: { name: 'foo', caller: 'baz' }, context }) + t.assert.strictEqual(str, '(foo) ') +}) + +test('works with only `pid` & `caller` present', t => { + const str = prettifyMetadata({ log: { pid: '1234', caller: 'baz' }, context }) + t.assert.strictEqual(str, '(1234) ') +}) + +test('works with only `hostname` & `caller` present', t => { + const str = prettifyMetadata({ log: { hostname: 'bar', caller: 'baz' }, context }) + t.assert.strictEqual(str, '(on bar) ') +}) + +test('works with only `name`, `pid`, & `caller` present', t => { + const str = prettifyMetadata({ log: { name: 'foo', pid: '1234', caller: 'baz' }, context }) + t.assert.strictEqual(str, '(foo/1234) ') +}) + +test('works with only `name`, `hostname`, & `caller` present', t => { + const str = prettifyMetadata({ log: { name: 'foo', hostname: 'bar', caller: 'baz' }, context }) + t.assert.strictEqual(str, '(foo on bar) ') +}) + +test('works with only `caller` present', t => { + const str = prettifyMetadata({ log: { caller: 'baz' }, context }) + t.assert.strictEqual(str, '') +}) + +test('works with only `pid`, `hostname`, & `caller` present', t => { + const str = prettifyMetadata({ log: { pid: '1234', hostname: 'bar', caller: 'baz' }, context }) + t.assert.strictEqual(str, '(1234 on bar) ') +}) + +test('works with all four present', t => { + const str = prettifyMetadata({ log: { name: 'foo', pid: '1234', hostname: 'bar', caller: 'baz' }, context }) + t.assert.strictEqual(str, '(foo/1234 on bar) ') +}) + +test('uses prettifiers from passed prettifiers object', t => { + const prettifiers = { + name (input) { + return input.toUpperCase() + }, + pid (input) { + return input + '__' + }, + hostname (input) { + return input.toUpperCase() + }, + caller (input) { + return input.toUpperCase() + } + } + const str = prettifyMetadata({ + log: { pid: '1234', hostname: 'bar', caller: 'baz', name: 'joe' }, + context: { + customPrettifiers: prettifiers, + colorizer: { colors: {} } + } + }) + t.assert.strictEqual(str, '(JOE/1234__ on BAR) ') +}) + +test('uses colorizer from passed context to colorize metadata', t => { + const prettifiers = { + name (input, _key, _log, { colors }) { + return colors.blue(input) + }, + pid (input, _key, _log, { colors }) { + return colors.red(input) + }, + hostname (input, _key, _log, { colors }) { + return colors.green(input) + }, + caller (input, _key, _log, { colors }) { + return colors.cyan(input) + } + } + const log = { name: 'foo', pid: '1234', hostname: 'bar', caller: 'baz' } + const colorizer = getColorizer(true) + const context = { + customPrettifiers: prettifiers, + colorizer + } + + const result = prettifyMetadata({ log, context }) + + const colorizedName = colorizer.colors.blue(log.name) + const colorizedPid = colorizer.colors.red(log.pid) + const colorizedHostname = colorizer.colors.green(log.hostname) + const colorizedCaller = colorizer.colors.cyan(log.caller) + const expected = `(${colorizedName}/${colorizedPid} on ${colorizedHostname}) <${colorizedCaller}>` + + t.assert.strictEqual(result, expected) +}) diff --git a/node_modules/pino-pretty/lib/utils/prettify-object.js b/node_modules/pino-pretty/lib/utils/prettify-object.js new file mode 100644 index 0000000..b6ee0fc --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-object.js @@ -0,0 +1,112 @@ +'use strict' + +module.exports = prettifyObject + +const { + LOGGER_KEYS +} = require('../constants') + +const stringifySafe = require('fast-safe-stringify') +const joinLinesWithIndentation = require('./join-lines-with-indentation') +const prettifyError = require('./prettify-error') + +/** + * @typedef {object} PrettifyObjectParams + * @property {object} log The object to prettify. + * @property {boolean} [excludeLoggerKeys] Indicates if known logger specific + * keys should be excluded from prettification. Default: `true`. + * @property {string[]} [skipKeys] A set of object keys to exclude from the + * * prettified result. Default: `[]`. + * @property {PrettyContext} context The context object built from parsing + * the options. + */ + +/** + * Prettifies a standard object. Special care is taken when processing the object + * to handle child objects that are attached to keys known to contain error + * objects. + * + * @param {PrettifyObjectParams} input + * + * @returns {string} The prettified string. This can be as little as `''` if + * there was nothing to prettify. + */ +function prettifyObject ({ + log, + excludeLoggerKeys = true, + skipKeys = [], + context +}) { + const { + EOL: eol, + IDENT: ident, + customPrettifiers, + errorLikeObjectKeys: errorLikeKeys, + objectColorizer, + singleLine, + colorizer + } = context + const keysToIgnore = [].concat(skipKeys) + + /* istanbul ignore else */ + if (excludeLoggerKeys === true) Array.prototype.push.apply(keysToIgnore, LOGGER_KEYS) + + let result = '' + + // Split object keys into two categories: error and non-error + const { plain, errors } = Object.entries(log).reduce(({ plain, errors }, [k, v]) => { + if (keysToIgnore.includes(k) === false) { + // Pre-apply custom prettifiers, because all 3 cases below will need this + const pretty = typeof customPrettifiers[k] === 'function' + ? customPrettifiers[k](v, k, log, { colors: colorizer.colors }) + : v + if (errorLikeKeys.includes(k)) { + errors[k] = pretty + } else { + plain[k] = pretty + } + } + return { plain, errors } + }, { plain: {}, errors: {} }) + + if (singleLine) { + // Stringify the entire object as a single JSON line + /* istanbul ignore else */ + if (Object.keys(plain).length > 0) { + result += objectColorizer.greyMessage(stringifySafe(plain)) + } + result += eol + // Avoid printing the escape character on escaped backslashes. + result = result.replace(/\\\\/gi, '\\') + } else { + // Put each object entry on its own line + Object.entries(plain).forEach(([keyName, keyValue]) => { + // custom prettifiers are already applied above, so we can skip it now + let lines = typeof customPrettifiers[keyName] === 'function' + ? keyValue + : stringifySafe(keyValue, null, 2) + + if (lines === undefined) return + + // Avoid printing the escape character on escaped backslashes. + lines = lines.replace(/\\\\/gi, '\\') + + const joinedLines = joinLinesWithIndentation({ input: lines, ident, eol }) + result += `${ident}${objectColorizer.property(keyName)}:${joinedLines.startsWith(eol) ? '' : ' '}${joinedLines}${eol}` + }) + } + + // Errors + Object.entries(errors).forEach(([keyName, keyValue]) => { + // custom prettifiers are already applied above, so we can skip it now + const lines = typeof customPrettifiers[keyName] === 'function' + ? keyValue + : stringifySafe(keyValue, null, 2) + + if (lines === undefined) return + + result += prettifyError({ keyName, lines, eol, ident }) + }) + + return result +} diff --git a/node_modules/pino-pretty/lib/utils/prettify-object.test.js b/node_modules/pino-pretty/lib/utils/prettify-object.test.js new file mode 100644 index 0000000..673d915 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-object.test.js @@ -0,0 +1,193 @@ +'use strict' + +const { test } = require('node:test') +const colors = require('../colors') +const prettifyObject = require('./prettify-object') +const { + ERROR_LIKE_KEYS +} = require('../constants') + +const context = { + EOL: '\n', + IDENT: ' ', + customPrettifiers: {}, + errorLikeObjectKeys: ERROR_LIKE_KEYS, + objectColorizer: colors(), + singleLine: false, + colorizer: colors() +} + +test('returns empty string if no properties present', t => { + const str = prettifyObject({ log: {}, context }) + t.assert.strictEqual(str, '') +}) + +test('works with single level properties', t => { + const str = prettifyObject({ log: { foo: 'bar' }, context }) + t.assert.strictEqual(str, ' foo: "bar"\n') +}) + +test('works with multiple level properties', t => { + const str = prettifyObject({ log: { foo: { bar: 'baz' } }, context }) + t.assert.strictEqual(str, ' foo: {\n "bar": "baz"\n }\n') +}) + +test('skips specified keys', t => { + const str = prettifyObject({ + log: { foo: 'bar', hello: 'world' }, + skipKeys: ['foo'], + context + }) + t.assert.strictEqual(str, ' hello: "world"\n') +}) + +test('ignores predefined keys', t => { + const str = prettifyObject({ log: { foo: 'bar', pid: 12345 }, context }) + t.assert.strictEqual(str, ' foo: "bar"\n') +}) + +test('ignores escaped backslashes in string values', t => { + const str = prettifyObject({ log: { foo_regexp: '\\[^\\w\\s]\\' }, context }) + t.assert.strictEqual(str, ' foo_regexp: "\\[^\\w\\s]\\"\n') +}) + +test('ignores escaped backslashes in string values (singleLine option)', t => { + const str = prettifyObject({ + log: { foo_regexp: '\\[^\\w\\s]\\' }, + context: { + ...context, + singleLine: true + } + }) + t.assert.strictEqual(str, '{"foo_regexp":"\\[^\\w\\s]\\"}\n') +}) + +test('works with error props', t => { + const err = Error('Something went wrong') + const serializedError = { + message: err.message, + stack: err.stack + } + const str = prettifyObject({ log: { error: serializedError }, context }) + t.assert.ok(str.startsWith(' error:')) + t.assert.ok(str.includes(' "message": "Something went wrong",')) + t.assert.ok(str.includes(' Error: Something went wrong')) +}) + +test('customPrettifiers gets applied', t => { + const customPrettifiers = { + foo: v => v.toUpperCase() + } + const str = prettifyObject({ + log: { foo: 'foo' }, + context: { + ...context, + customPrettifiers + } + }) + t.assert.strictEqual(str.startsWith(' foo: FOO'), true) +}) + +test('skips lines omitted by customPrettifiers', t => { + const customPrettifiers = { + foo: () => { return undefined } + } + const str = prettifyObject({ + log: { foo: 'foo', bar: 'bar' }, + context: { + ...context, + customPrettifiers + } + }) + t.assert.strictEqual(str.includes('bar: "bar"'), true) + t.assert.strictEqual(str.includes('foo: "foo"'), false) +}) + +test('joined lines omits starting eol', t => { + const str = prettifyObject({ + log: { msg: 'doing work', calls: ['step 1', 'step 2', 'step 3'], level: 30 }, + context: { + ...context, + IDENT: '', + customPrettifiers: { + calls: val => '\n' + val.map(it => ' ' + it).join('\n') + } + } + }) + t.assert.strictEqual(str, [ + 'msg: "doing work"', + 'calls:', + ' step 1', + ' step 2', + ' step 3', + '' + ].join('\n')) +}) + +test('errors skips prettifiers', t => { + const customPrettifiers = { + err: () => { return 'is_err' } + } + const str = prettifyObject({ + log: { err: Error('boom') }, + context: { + ...context, + customPrettifiers + } + }) + t.assert.strictEqual(str.includes('err: is_err'), true) +}) + +test('errors skips prettifying if no lines are present', t => { + const customPrettifiers = { + err: () => { return undefined } + } + const str = prettifyObject({ + log: { err: Error('boom') }, + context: { + ...context, + customPrettifiers + } + }) + t.assert.strictEqual(str, '') +}) + +test('works with single level properties', t => { + const colorizer = colors(true) + const str = prettifyObject({ + log: { foo: 'bar' }, + context: { + ...context, + objectColorizer: colorizer, + colorizer + } + }) + t.assert.strictEqual(str, ` ${colorizer.colors.magenta('foo')}: "bar"\n`) +}) + +test('works with customColors', t => { + const colorizer = colors(true, []) + t.assert.doesNotThrow(() => { + prettifyObject({ + log: { foo: 'bar' }, + context: { + ...context, + objectColorizer: colorizer, + colorizer + } + }) + }) +}) + +test('customColors gets applied', t => { + const colorizer = colors(true, [['property', 'green']]) + const str = prettifyObject({ + log: { foo: 'bar' }, + context: { + ...context, + objectColorizer: colorizer, + colorizer + } + }) + t.assert.strictEqual(str, ` ${colorizer.colors.green('foo')}: "bar"\n`) +}) diff --git a/node_modules/pino-pretty/lib/utils/prettify-time.js b/node_modules/pino-pretty/lib/utils/prettify-time.js new file mode 100644 index 0000000..e876b35 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-time.js @@ -0,0 +1,42 @@ +'use strict' + +module.exports = prettifyTime + +const formatTime = require('./format-time') + +/** + * @typedef {object} PrettifyTimeParams + * @property {object} log The log object with the timestamp to be prettified. + * @property {PrettyContext} context The context object built from parsing + * the options. + */ + +/** + * Prettifies a timestamp if the given `log` has either `time`, `timestamp` or custom specified timestamp + * property. + * + * @param {PrettifyTimeParams} input + * + * @returns {undefined|string} If a timestamp property cannot be found then + * `undefined` is returned. Otherwise, the prettified time is returned as a + * string. + */ +function prettifyTime ({ log, context }) { + const { + timestampKey, + translateTime: translateFormat + } = context + const prettifier = context.customPrettifiers?.time + let time = null + + if (timestampKey in log) { + time = log[timestampKey] + } else if ('timestamp' in log) { + time = log.timestamp + } + + if (time === null) return undefined + const output = translateFormat ? formatTime(time, translateFormat) : time + + return prettifier ? prettifier(output) : `[${output}]` +} diff --git a/node_modules/pino-pretty/lib/utils/prettify-time.test.js b/node_modules/pino-pretty/lib/utils/prettify-time.test.js new file mode 100644 index 0000000..071aadf --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/prettify-time.test.js @@ -0,0 +1,227 @@ +'use strict' + +process.env.TZ = 'UTC' + +const { test } = require('node:test') +const prettifyTime = require('./prettify-time') +const { + TIMESTAMP_KEY +} = require('../constants') +const context = { + timestampKey: TIMESTAMP_KEY, + translateTime: true, + customPrettifiers: {} +} + +test('returns `undefined` if `time` or `timestamp` not in log', t => { + const str = prettifyTime({ log: {}, context }) + t.assert.strictEqual(str, undefined) +}) + +test('returns prettified formatted time from custom field', t => { + const log = { customtime: 1554642900000 } + let str = prettifyTime({ + log, + context: { + ...context, + timestampKey: 'customtime' + } + }) + t.assert.strictEqual(str, '[13:15:00.000]') + + str = prettifyTime({ + log, + context: { + ...context, + translateTime: false, + timestampKey: 'customtime' + } + }) + t.assert.strictEqual(str, '[1554642900000]') +}) + +test('returns prettified formatted time', t => { + let log = { time: 1554642900000 } + let str = prettifyTime({ + log, + context: { + ...context + } + }) + t.assert.strictEqual(str, '[13:15:00.000]') + + log = { timestamp: 1554642900000 } + str = prettifyTime({ + log, + context: { + ...context + } + }) + t.assert.strictEqual(str, '[13:15:00.000]') + + log = { time: '2019-04-07T09:15:00.000-04:00' } + str = prettifyTime({ + log, + context: { + ...context + } + }) + t.assert.strictEqual(str, '[13:15:00.000]') + + log = { timestamp: '2019-04-07T09:15:00.000-04:00' } + str = prettifyTime({ + log, + context: { + ...context + } + }) + t.assert.strictEqual(str, '[13:15:00.000]') + + log = { time: 1554642900000 } + str = prettifyTime({ + log, + context: { + ...context, + translateTime: 'd mmm yyyy H:MM' + } + }) + t.assert.strictEqual(str, '[7 Apr 2019 13:15]') + + log = { timestamp: 1554642900000 } + str = prettifyTime({ + log, + context: { + ...context, + translateTime: 'd mmm yyyy H:MM' + } + }) + t.assert.strictEqual(str, '[7 Apr 2019 13:15]') + + log = { time: '2019-04-07T09:15:00.000-04:00' } + str = prettifyTime({ + log, + context: { + ...context, + translateTime: 'd mmm yyyy H:MM' + } + }) + t.assert.strictEqual(str, '[7 Apr 2019 13:15]') + + log = { timestamp: '2019-04-07T09:15:00.000-04:00' } + str = prettifyTime({ + log, + context: { + ...context, + translateTime: 'd mmm yyyy H:MM' + } + }) + t.assert.strictEqual(str, '[7 Apr 2019 13:15]') +}) + +test('passes through value', t => { + let log = { time: 1554642900000 } + let str = prettifyTime({ + log, + context: { + ...context, + translateTime: undefined + } + }) + t.assert.strictEqual(str, '[1554642900000]') + + log = { timestamp: 1554642900000 } + str = prettifyTime({ + log, + context: { + ...context, + translateTime: undefined + } + }) + t.assert.strictEqual(str, '[1554642900000]') + + log = { time: '2019-04-07T09:15:00.000-04:00' } + str = prettifyTime({ + log, + context: { + ...context, + translateTime: undefined + } + }) + t.assert.strictEqual(str, '[2019-04-07T09:15:00.000-04:00]') + + log = { timestamp: '2019-04-07T09:15:00.000-04:00' } + str = prettifyTime({ + log, + context: { + ...context, + translateTime: undefined + } + }) + t.assert.strictEqual(str, '[2019-04-07T09:15:00.000-04:00]') +}) + +test('handles the 0 timestamp', t => { + let log = { time: 0 } + let str = prettifyTime({ + log, + context: { + ...context, + translateTime: undefined + } + }) + t.assert.strictEqual(str, '[0]') + + log = { timestamp: 0 } + str = prettifyTime({ + log, + context: { + ...context, + translateTime: undefined + } + }) + t.assert.strictEqual(str, '[0]') +}) + +test('works with epoch as a number or string', (t) => { + t.plan(3) + const epoch = 1522431328992 + const asNumber = prettifyTime({ + log: { time: epoch, msg: 'foo' }, + context: { + ...context, + translateTime: true + } + }) + const asString = prettifyTime({ + log: { time: `${epoch}`, msg: 'foo' }, + context: { + ...context, + translateTime: true + } + }) + const invalid = prettifyTime({ + log: { time: '2 days ago', msg: 'foo' }, + context: { + ...context, + translateTime: true + } + }) + t.assert.strictEqual(asString, '[17:35:28.992]') + t.assert.strictEqual(asNumber, '[17:35:28.992]') + t.assert.strictEqual(invalid, '[2 days ago]') +}) + +test('uses custom prettifier', t => { + const str = prettifyTime({ + log: { time: 0 }, + context: { + ...context, + customPrettifiers: { + time () { + return 'done' + } + } + } + }) + t.assert.strictEqual(str, 'done') +}) diff --git a/node_modules/pino-pretty/lib/utils/split-property-key.js b/node_modules/pino-pretty/lib/utils/split-property-key.js new file mode 100644 index 0000000..2cba034 --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/split-property-key.js @@ -0,0 +1,49 @@ +'use strict' + +module.exports = splitPropertyKey + +/** + * Splits the property key delimited by a dot character but not when it is preceded + * by a backslash. + * + * @param {string} key A string identifying the property. + * + * @returns {string[]} Returns a list of string containing each delimited property. + * e.g. `'prop2\.domain\.corp.prop2'` should return [ 'prop2.domain.com', 'prop2' ] + */ +function splitPropertyKey (key) { + const result = [] + let backslash = false + let segment = '' + + for (let i = 0; i < key.length; i++) { + const c = key.charAt(i) + + if (c === '\\') { + backslash = true + continue + } + + if (backslash) { + backslash = false + segment += c + continue + } + + /* Non-escaped dot, push to result */ + if (c === '.') { + result.push(segment) + segment = '' + continue + } + + segment += c + } + + /* Push last entry to result */ + if (segment.length) { + result.push(segment) + } + + return result +} diff --git a/node_modules/pino-pretty/lib/utils/split-property-key.test.js b/node_modules/pino-pretty/lib/utils/split-property-key.test.js new file mode 100644 index 0000000..4def1ef --- /dev/null +++ b/node_modules/pino-pretty/lib/utils/split-property-key.test.js @@ -0,0 +1,29 @@ +'use strict' + +const { test } = require('node:test') +const splitPropertyKey = require('./split-property-key') + +test('splitPropertyKey does not change key', t => { + const result = splitPropertyKey('data1') + t.assert.deepStrictEqual(result, ['data1']) +}) + +test('splitPropertyKey splits nested key', t => { + const result = splitPropertyKey('data1.data2.data-3') + t.assert.deepStrictEqual(result, ['data1', 'data2', 'data-3']) +}) + +test('splitPropertyKey splits nested keys ending with a dot', t => { + const result = splitPropertyKey('data1.data2.data-3.') + t.assert.deepStrictEqual(result, ['data1', 'data2', 'data-3']) +}) + +test('splitPropertyKey splits nested escaped key', t => { + const result = splitPropertyKey('logging\\.domain\\.corp/operation.foo.bar-2') + t.assert.deepStrictEqual(result, ['logging.domain.corp/operation', 'foo', 'bar-2']) +}) + +test('splitPropertyKey splits nested escaped key with special characters', t => { + const result = splitPropertyKey('logging\\.domain\\.corp/operation.!\t@#$%^&*()_+=-<>.bar\\.2') + t.assert.deepStrictEqual(result, ['logging.domain.corp/operation', '!\t@#$%^&*()_+=-<>', 'bar.2']) +}) diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/.github/dependabot.yml b/node_modules/pino-pretty/node_modules/pino-abstract-transport/.github/dependabot.yml new file mode 100644 index 0000000..dfa7fa6 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 10 diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/.github/workflows/ci.yml b/node_modules/pino-pretty/node_modules/pino-abstract-transport/.github/workflows/ci.yml new file mode 100644 index 0000000..b125c8d --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/.github/workflows/ci.yml @@ -0,0 +1,80 @@ +name: CI + +on: + push: + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +# This allows a subsequently queued workflow run to interrupt previous runs +concurrency: + group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" + cancel-in-progress: true + +jobs: + dependency-review: + name: Dependency Review + if: github.event_name == 'pull_request' + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v5 + with: + persist-credentials: false + + - name: Dependency review + uses: actions/dependency-review-action@v3 + + test: + name: Test + runs-on: ${{ matrix.os }} + permissions: + contents: read + strategy: + matrix: + node-version: [20, 22, 24] + os: [macos-latest, ubuntu-latest, windows-latest] + + steps: + - name: Check out repo + uses: actions/checkout@v5 + with: + persist-credentials: false + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v5 + with: + node-version: ${{ matrix.node-version }} + + - name: Restore cached dependencies + uses: actions/cache@v4 + with: + path: node_modules + key: node-modules-${{ hashFiles('package.json') }} + + - name: Install dependencies + run: npm i --ignore-scripts + + - name: Run Tests + run: npm run test-ci + + automerge: + name: Automerge Dependabot PRs + if: > + github.event_name == 'pull_request' && + github.event.pull_request.user.login == 'dependabot[bot]' + needs: test + permissions: + pull-requests: write + contents: write + runs-on: ubuntu-latest + steps: + - uses: fastify/github-action-merge-dependabot@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/.husky/pre-commit b/node_modules/pino-pretty/node_modules/pino-abstract-transport/.husky/pre-commit new file mode 100644 index 0000000..610c2a5 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +npm test diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/LICENSE b/node_modules/pino-pretty/node_modules/pino-abstract-transport/LICENSE new file mode 100644 index 0000000..9dbf149 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 pino + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/README.md b/node_modules/pino-pretty/node_modules/pino-abstract-transport/README.md new file mode 100644 index 0000000..74ff3b5 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/README.md @@ -0,0 +1,171 @@ +# pino-abstract-transport +[![npm version](https://img.shields.io/npm/v/pino-abstract-transport)](https://www.npmjs.com/package/pino-abstract-transport) +[![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino-abstract-transport/ci.yml?branch=main)](https://github.com/pinojs/pino-abstract-transport/actions) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) + +Write Pino transports easily. + +## Install + +```sh +npm i pino-abstract-transport +``` + +## Usage + +```js +import build from 'pino-abstract-transport' + +export default async function (opts) { + return build(async function (source) { + for await (let obj of source) { + console.log(obj) + } + }) +} +``` + +or in CommonJS and streams: + +```js +'use strict' + +const build = require('pino-abstract-transport') + +module.exports = function (opts) { + return build(function (source) { + source.on('data', function (obj) { + console.log(obj) + }) + }) +} +``` + +## Typescript usage + +Install the type definitions for node. Make sure the major version of the type definitions matches the node version you are using. + +#### Node 16 + +```sh +npm i -D @types/node@16 +``` + +## API + +### build(fn, opts) => Stream + +Create a [`split2`](http://npm.im/split2) instance and returns it. +This same instance is also passed to the given function, which is called +synchronously. + +If `opts.transform` is `true`, `pino-abstract-transform` will +wrap the split2 instance and the returned stream using [`duplexify`](https://www.npmjs.com/package/duplexify), +so they can be concatenated into multiple transports. + +#### Events emitted + +In addition to all events emitted by a [`Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable) +stream, it emits the following events: + +* `unknown` where an unparsable line is found, both the line and optional error is emitted. + +#### Options + +* `parse` an option to change to data format passed to build function. When this option is set to `lines`, + the data is passed as a string, otherwise the data is passed as an object. Default: `undefined`. + +* `close(err, cb)` a function that is called to shutdown the transport. It's called both on error and non-error shutdowns. + It can also return a promise. In this case discard the the `cb` argument. + +* `parseLine(line)` a function that is used to parse line received from `pino`. + +* `expectPinoConfig` a boolean that indicates if the transport expects Pino to add some of its configuration to the stream. Default: `false`. + +## Example + +### custom parseLine + +You can allow custom `parseLine` from users while providing a simple and safe default parseLine. + +```js +'use strict' + +const build = require('pino-abstract-transport') + +function defaultParseLine (line) { + const obj = JSON.parse(line) + // property foo will be added on each line + obj.foo = 'bar' + return obj +} + +module.exports = function (opts) { + const parseLine = typeof opts.parseLine === 'function' ? opts.parseLine : defaultParseLine + return build(function (source) { + source.on('data', function (obj) { + console.log(obj) + }) + }, { + parseLine: parseLine + }) +} +``` + +### Stream concatenation / pipeline + +You can pipeline multiple transports: + +```js +const build = require('pino-abstract-transport') +const { Transform, pipeline } = require('stream') + +function buildTransform () { + return build(function (source) { + return new Transform({ + objectMode: true, + autoDestroy: true, + transform (line, enc, cb) { + line.service = 'bob' + cb(null, JSON.stringify(line)) + } + }) + }, { enablePipelining: true }) +} + +function buildDestination () { + return build(function (source) { + source.on('data', function (obj) { + console.log(obj) + }) + }) +} + +pipeline(process.stdin, buildTransform(), buildDestination(), function (err) { + console.log('pipeline completed!', err) +}) +``` + +### Using pino config + +Setting `expectPinoConfig` to `true` will make the transport wait for pino to send its configuration before starting to process logs. It will add `levels`, `messageKey` and `errorKey` to the stream. + +When used with an incompatible version of pino, the stream will immediately error. + +```js +import build from 'pino-abstract-transport' + +export default function (opts) { + return build(async function (source) { + for await (const obj of source) { + console.log(`[${source.levels.labels[obj.level]}]: ${obj[source.messageKey]}`) + } + }, { + expectPinoConfig: true + }) +} +``` + +## License + +MIT diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/index.d.ts b/node_modules/pino-pretty/node_modules/pino-abstract-transport/index.d.ts new file mode 100644 index 0000000..1ac49a5 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/index.d.ts @@ -0,0 +1,122 @@ +// Type definitions for pino-abstract-transport 0.4.0 +// Project: https://github.com/pinojs/pino-abstract-transport#readme +// Definitions by: Diyar Oktay + +/// + +import { Transform } from "stream"; + +type BuildOptions = { + /** + * `parseLine(line)` a function that is used to parse line received from pino. + * @default JSON.parse + */ + parseLine?: (line: string) => unknown; + + /** + * `parse` an option to change to data format passed to build function. + * @default undefined + * + */ + parse?: "lines"; + + /** + * `close(err, cb)` a function that is called to shutdown the transport. + * It's called both on error and non-error shutdowns. It can also return + * a promise. In this case discard the the cb argument. + * + * @example + * ```typescript + * { + * close: function (err, cb) { + * process.nextTick(cb, err) + * } + * } + * ``` + * */ + close?: (err: Error, cb: Function) => void | Promise; + + /** + * `metadata` If set to false, do not add metadata properties to the returned stream + */ + metadata?: false; + + /** + * `expectPinoConfig` If set to true, the transport will wait for pino to send its + * configuration before starting to process logs. + */ + expectPinoConfig?: boolean; +}; + +/** + * Pass these options to wrap the split2 stream and + * the returned stream into a Duplex + */ +type EnablePipelining = BuildOptions & { + enablePipelining: true; +}; + +/** + * Create a split2 instance and returns it. This same instance is also passed + * to the given function, which is called after pino has sent its configuration. + * + * @returns {Promise} the split2 instance + */ +declare function build( + fn: (transform: Transform & build.OnUnknown) => void | Promise, + opts: BuildOptions & { expectPinoConfig: true } +): Promise; + +/** + * Create a split2 instance and returns it. This same instance is also passed + * to the given function, which is called synchronously. + * + * @returns {Transform} the split2 instance + */ +declare function build( + fn: (transform: Transform & build.OnUnknown) => void | Promise, + opts?: BuildOptions +): Transform & build.OnUnknown; + +/** + * Creates a split2 instance and passes it to the given function, which is called + * after pino has sent its configuration. Then wraps the split2 instance and + * the returned stream into a Duplex, so they can be concatenated into multiple + * transports. + * + * @returns {Promise} the wrapped split2 instance + */ +declare function build( + fn: (transform: Transform & build.OnUnknown) => Transform & build.OnUnknown, + opts: EnablePipelining & { expectPinoConfig: true } +): Promise; + +/** + * Creates a split2 instance and passes it to the given function, which is called + * synchronously. Then wraps the split2 instance and the returned stream into a + * Duplex, so they can be concatenated into multiple transports. + * + * @returns {Transform} the wrapped split2 instance + */ +declare function build( + fn: (transform: Transform & build.OnUnknown) => Transform & build.OnUnknown, + opts: EnablePipelining +): Transform; + +declare namespace build { + export interface OnUnknown { + /** + * `unknown` is the event emitted where an unparsable line is found + * + * @param event 'unknown' + * @param line the unparsable line + * @param error the error that was thrown when parsing the line + */ + on( + event: "unknown", + listener: (line: string, error: unknown) => void + ): void; + } +} + +export = build; diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/index.js b/node_modules/pino-pretty/node_modules/pino-abstract-transport/index.js new file mode 100644 index 0000000..009d1f2 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/index.js @@ -0,0 +1,128 @@ +'use strict' + +const metadata = Symbol.for('pino.metadata') +const split = require('split2') +const { Duplex } = require('stream') +const { parentPort, workerData } = require('worker_threads') + +function createDeferred () { + let resolve + let reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + promise.resolve = resolve + promise.reject = reject + return promise +} + +module.exports = function build (fn, opts = {}) { + const waitForConfig = opts.expectPinoConfig === true && workerData?.workerData?.pinoWillSendConfig === true + const parseLines = opts.parse === 'lines' + const parseLine = typeof opts.parseLine === 'function' ? opts.parseLine : JSON.parse + const close = opts.close || defaultClose + const stream = split(function (line) { + let value + + try { + value = parseLine(line) + } catch (error) { + this.emit('unknown', line, error) + return + } + + if (value === null) { + this.emit('unknown', line, 'Null value ignored') + return + } + + if (typeof value !== 'object') { + value = { + data: value, + time: Date.now() + } + } + + if (stream[metadata]) { + stream.lastTime = value.time + stream.lastLevel = value.level + stream.lastObj = value + } + + if (parseLines) { + return line + } + + return value + }, { autoDestroy: true }) + + stream._destroy = function (err, cb) { + const promise = close(err, cb) + if (promise && typeof promise.then === 'function') { + promise.then(cb, cb) + } + } + + if (opts.expectPinoConfig === true && workerData?.workerData?.pinoWillSendConfig !== true) { + setImmediate(() => { + stream.emit('error', new Error('This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.')) + }) + } + + if (opts.metadata !== false) { + stream[metadata] = true + stream.lastTime = 0 + stream.lastLevel = 0 + stream.lastObj = null + } + + if (waitForConfig) { + let pinoConfig = {} + const configReceived = createDeferred() + parentPort.on('message', function handleMessage (message) { + if (message.code === 'PINO_CONFIG') { + pinoConfig = message.config + configReceived.resolve() + parentPort.off('message', handleMessage) + } + }) + + Object.defineProperties(stream, { + levels: { + get () { return pinoConfig.levels } + }, + messageKey: { + get () { return pinoConfig.messageKey } + }, + errorKey: { + get () { return pinoConfig.errorKey } + } + }) + + return configReceived.then(finish) + } + + return finish() + + function finish () { + let res = fn(stream) + + if (res && typeof res.catch === 'function') { + res.catch((err) => { + stream.destroy(err) + }) + + // set it to null to not retain a reference to the promise + res = null + } else if (opts.enablePipelining && res) { + return Duplex.from({ writable: stream, readable: res }) + } + + return stream + } +} + +function defaultClose (err, cb) { + process.nextTick(cb, err) +} diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/package.json b/node_modules/pino-pretty/node_modules/pino-abstract-transport/package.json new file mode 100644 index 0000000..d93f8ea --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/package.json @@ -0,0 +1,41 @@ +{ + "name": "pino-abstract-transport", + "version": "3.0.0", + "description": "Write Pino transports easily", + "main": "index.js", + "scripts": { + "prepare": "husky install", + "test": "standard | snazzy && borp --check-coverage 'test/*.test.js' && tsd", + "test-ci": "npm test" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/pinojs/pino-abstract-transport.git" + }, + "keywords": [ + "pino", + "transport" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/pinojs/pino-abstract-transport/issues" + }, + "homepage": "https://github.com/pinojs/pino-abstract-transport#readme", + "dependencies": { + "split2": "^4.0.0" + }, + "devDependencies": { + "@matteo.collina/tspl": "^0.2.0", + "@types/node": "^20.1.0", + "borp": "^0.20.2", + "husky": "^9.0.6", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "thread-stream": "^3.1.0", + "tsd": "^0.31.0" + }, + "tsd": { + "directory": "./test/types" + } +} diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/base.test.js b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/base.test.js new file mode 100644 index 0000000..45aa72a --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/base.test.js @@ -0,0 +1,473 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { once } = require('node:events') +const { Transform, pipeline } = require('node:stream') +const tspl = require('@matteo.collina/tspl') + +const match = require('./match') +const build = require('../') + +test('parse newlined delimited JSON', async (t) => { + const plan = tspl(t, { plan: 2 }) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + match(expected.shift(), line, { assert: plan }) + }) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('parse newline delimited JSON', async (t) => { + const plan = tspl(t, { plan: 2 }) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + match(expected.shift(), line, { assert: plan }) + }) + }, { parse: 'json' }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() +}) + +test('null support', async (t) => { + const plan = tspl(t, { plan: 1 }) + const stream = build(function (source) { + source.on('unknown', function (line) { + match('null', line, { assert: plan }) + }) + }) + + stream.write('null\n') + stream.end() + + await plan +}) + +test('broken json', async (t) => { + const plan = tspl(t, { plan: 2 }) + const expected = '{ "truncated' + const stream = build(function (source) { + source.on('unknown', function (line, error) { + match(expected, line, { assert: plan }) + const regex = /^(Unexpected end of JSON input|Unterminated string in JSON at position 12)( \(line 1 column 13\))?$/ + plan.match(error.message, regex) + }) + }) + + stream.write(expected + '\n') + stream.end() + + await plan +}) + +test('pure values', async (t) => { + const plan = tspl(t, { plan: 3 }) + const stream = build(function (source) { + source.on('data', function (line) { + plan.equal(line.data, 42) + plan.ok(line.time) + plan.equal(new Date(line.time).getTime(), line.time) + }) + }) + + stream.write('42\n') + stream.end() + + await plan +}) + +test('support async iteration', async (t) => { + const plan = tspl(t, { plan: 2 }) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(async function (source) { + for await (const line of source) { + match(expected.shift(), line, { assert: plan }) + } + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('rejecting errors the stream', async () => { + const stream = build(async function (source) { + throw new Error('kaboom') + }) + + const [err] = await once(stream, 'error') + assert.equal(err.message, 'kaboom') +}) + +test('emits an error if the transport expects pino to send the config, but pino is not going to', async function () { + const stream = build(() => {}, { expectPinoConfig: true }) + const [err] = await once(stream, 'error') + assert.equal(err.message, 'This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.') +}) + +test('set metadata', async (t) => { + const plan = tspl(t, { plan: 9 }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected.shift() + plan.equal(this.lastLevel, obj.level) + plan.equal(this.lastTime, obj.time) + match(this.lastObj, obj, { assert: plan }) + match(obj, line, { assert: plan }) + }) + }, { metadata: true }) + + plan.equal(stream[Symbol.for('pino.metadata')], true) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('parse lines', async (t) => { + const plan = tspl(t, { plan: 9 }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected.shift() + plan.equal(this.lastLevel, obj.level) + plan.equal(this.lastTime, obj.time) + match(this.lastObj, obj, { assert: plan }) + match(JSON.stringify(obj), line, { assert: plan }) + }) + }, { metadata: true, parse: 'lines' }) + + plan.equal(stream[Symbol.for('pino.metadata')], true) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('custom parse line function', async (t) => { + const plan = tspl(t, { plan: 11 }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + let num = 0 + + function parseLine (str) { + const obj = JSON.parse(str) + match(expected[num], obj, { assert: plan }) + return obj + } + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected[num] + plan.equal(this.lastLevel, obj.level) + plan.equal(this.lastTime, obj.time) + match(this.lastObj, obj, { assert: plan }) + match(obj, line, { assert: plan }) + num++ + }) + }, { metadata: true, parseLine }) + + plan.equal(stream[Symbol.for('pino.metadata')], true) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('set metadata (default)', async (t) => { + const plan = tspl(t, { plan: 9 }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected.shift() + plan.equal(this.lastLevel, obj.level) + plan.equal(this.lastTime, obj.time) + match(this.lastObj, obj, { assert: plan }) + match(obj, line, { assert: plan }) + }) + }) + + plan.equal(stream[Symbol.for('pino.metadata')], true) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('do not set metadata', async (t) => { + const plan = tspl(t, { plan: 9 }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + const obj = expected.shift() + plan.equal(this.lastLevel, undefined) + plan.equal(this.lastTime, undefined) + plan.equal(this.lastObj, undefined) + match(obj, line, { assert: plan }) + }) + }, { metadata: false }) + + plan.equal(stream[Symbol.for('pino.metadata')], undefined) + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('close logic', async (t) => { + const plan = tspl(t, { plan: 3 }) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + match(expected.shift(), line, { assert: plan }) + }) + }, { + close (err, cb) { + plan.ok('close called') + process.nextTick(cb, err) + } + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('close with promises', async (t) => { + const plan = tspl(t, { plan: 3 }) + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const stream = build(function (source) { + source.on('data', function (line) { + match(expected.shift(), line, { assert: plan }) + }) + }, { + async close () { + plan.ok('close called') + } + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan +}) + +test('support Transform streams', async (t) => { + const plan = tspl(t, { plan: 7 }) + + const expected1 = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const expected2 = [] + + const stream1 = build(function (source) { + const transform = new Transform({ + objectMode: true, + autoDestroy: true, + transform (chunk, enc, cb) { + match(expected1.shift(), chunk, { assert: plan }) + chunk.service = 'from transform' + expected2.push(chunk) + cb(null, JSON.stringify(chunk) + '\n') + } + }) + + pipeline(source, transform, () => {}) + + return transform + }, { enablePipelining: true }) + + const stream2 = build(function (source) { + source.on('data', function (line) { + match(expected2.shift(), line, { assert: plan }) + }) + }) + + pipeline(stream1, stream2, function (err) { + plan.equal(err, undefined) + plan.deepStrictEqual(expected1, []) + plan.deepStrictEqual(expected2, []) + }) + + const lines = expected1.map(JSON.stringify).join('\n') + stream1.write(lines) + stream1.end() + + await plan +}) diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-async-iteration.js b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-async-iteration.js new file mode 100644 index 0000000..ddcdaf3 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-async-iteration.js @@ -0,0 +1,22 @@ +'use strict' + +const build = require('../..') + +module.exports = async function (threadStreamOpts) { + const { port, opts = {} } = threadStreamOpts + return build( + async function (source) { + for await (const obj of source) { + port.postMessage({ + data: obj, + pinoConfig: { + levels: source.levels, + messageKey: source.messageKey, + errorKey: source.errorKey + } + }) + } + }, + opts + ) +} diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-on-data.js b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-on-data.js new file mode 100644 index 0000000..58143fa --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-on-data.js @@ -0,0 +1,22 @@ +'use strict' + +const build = require('../..') + +module.exports = async function (threadStreamOpts) { + const { port, opts = {} } = threadStreamOpts + return build( + function (source) { + source.on('data', function (line) { + port.postMessage({ + data: line, + pinoConfig: { + levels: source.levels, + messageKey: source.messageKey, + errorKey: source.errorKey + } + }) + }) + }, + opts + ) +} diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-transform.js b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-transform.js new file mode 100644 index 0000000..66c84dc --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/transport-transform.js @@ -0,0 +1,24 @@ +'use strict' + +const { Transform, pipeline } = require('stream') +const build = require('../..') + +module.exports = function (threadStreamOpts) { + const { opts = {} } = threadStreamOpts + return build(function (source) { + const transform = new Transform({ + objectMode: true, + autoDestroy: true, + transform (chunk, enc, cb) { + chunk.service = 'from transform' + chunk.level = `${source.levels.labels[chunk.level]}(${chunk.level})` + chunk[source.messageKey] = chunk[source.messageKey].toUpperCase() + cb(null, JSON.stringify(chunk) + '\n') + } + }) + + pipeline(source, transform, () => {}) + + return transform + }, { ...opts, enablePipelining: true }) +} diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/worker-pipeline.js b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/worker-pipeline.js new file mode 100644 index 0000000..38af252 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/fixtures/worker-pipeline.js @@ -0,0 +1,15 @@ +'use strict' + +const { pipeline, PassThrough } = require('stream') + +module.exports = async function ({ targets }) { + const streams = await Promise.all(targets.map(async (t) => { + const fn = require(t.target) + const stream = await fn(t.options) + return stream + })) + + const stream = new PassThrough() + pipeline(stream, ...streams, () => {}) + return stream +} diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/match.js b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/match.js new file mode 100644 index 0000000..1cb792b --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/match.js @@ -0,0 +1,24 @@ +'use strict' + +module.exports = match + +/** + * match is a bare-bones object shape matcher. We should be able to replace + * this with `assert.partialDeepStrictEqual` when v22 is our minimum. + * + * @param {object} found + * @param {object} expected + */ +function match (found, expected, { assert = require('node:assert') } = {}) { + for (const [key, value] of Object.entries(expected)) { + if (Object.prototype.toString.call(value) === '[object Object]') { + match(found[key], value) + continue + } + if (value !== found[key]) { + throw Error(`expected "${value}" but found "${found[key]}"`) + } + } + + assert.ok('passed') +} diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/types/index.test-d.ts b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/types/index.test-d.ts new file mode 100644 index 0000000..b5f6a85 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/types/index.test-d.ts @@ -0,0 +1,31 @@ +import build, { OnUnknown } from "../../index"; +import { expectType } from "tsd"; +import { Transform } from "stream"; + +/** + * If enablePipelining is set to true, the function passed as an argument + * must return a transform. The unknown event should be listened to on the + * stream passed in the first argument. + */ +expectType(build((source) => source, { enablePipelining: true })); + +/** + * If expectPinoConfig is set with enablePipelining, build returns a promise + */ +expectType<(Promise)>(build((source) => source, { enablePipelining: true, expectPinoConfig: true })); + +/** + * If enablePipelining is not set the unknown event can be listened to on + * the returned stream. + */ +expectType(build((source) => {})); + +/** + * If expectPinoConfig is set, build returns a promise + */ +expectType<(Promise)>(build((source) => {}, { expectPinoConfig: true })); + +/** + * build also accepts an async function + */ +expectType(build(async (source) => {})); diff --git a/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/worker.test.js b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/worker.test.js new file mode 100644 index 0000000..9bed933 --- /dev/null +++ b/node_modules/pino-pretty/node_modules/pino-abstract-transport/test/worker.test.js @@ -0,0 +1,372 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { once } = require('node:events') +const { join } = require('node:path') +const { MessageChannel } = require('node:worker_threads') +const ThreadStream = require('thread-stream') +const tspl = require('@matteo.collina/tspl') + +const match = require('./match') + +workerTest('transport-on-data.js') +workerTest('transport-async-iteration.js', ' when using async iteration') + +function workerTest (filename, description = '') { + test(`does not wait for pino to send config by default${description}`, async function (t) { + const plan = tspl(t, { plan: 4 }) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { port: port1 }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const emptyPinoConfig = { + levels: undefined, + messageKey: undefined, + errorKey: undefined + } + + port2.on('message', function (message) { + match(expected.shift(), message.data, { assert: plan }) + match(emptyPinoConfig, message.pinoConfig, { assert: plan }) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan + }) + + test(`does not wait for pino to send config if transport is not expecting it${description}`, async function (t) { + const plan = tspl(t, { plan: 4 }) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { + port: port1, + pinoWillSendConfig: true + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const emptyPinoConfig = { + levels: undefined, + messageKey: undefined, + errorKey: undefined + } + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + + port2.on('message', function (message) { + match(expected.shift(), message.data, { assert: plan }) + match(emptyPinoConfig, message.pinoConfig, { assert: plan }) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.end() + + await plan + }) + + test(`waits for the pino config when pino intends to send it and the transport requests it${description}`, async function (t) { + const plan = tspl(t, { plan: 4 }) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { + port: port1, + pinoWillSendConfig: true, + opts: { + expectPinoConfig: true + } + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + port2.on('message', function (message) { + match(expected.shift(), message.data, { assert: plan }) + match(pinoConfig, message.pinoConfig, { assert: plan }) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + stream.write(lines) + stream.end() + + await plan + }) + + test(`continues to listen if it receives a message that is not PINO_CONFIG${description}`, async function (t) { + const plan = tspl(t, { plan: 4 }) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', 'transport-on-data.js'), + workerData: { + port: port1, + pinoWillSendConfig: true, + opts: { + expectPinoConfig: true + } + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + port2.on('message', function (message) { + match(expected.shift(), message.data, { assert: plan }) + match(pinoConfig, message.pinoConfig, { assert: plan }) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.emit('message', 'not a PINO_CONFIG') + stream.emit('message', { code: 'NOT_PINO_CONFIG', config: { levels: 'foo', messageKey: 'bar', errorKey: 'baz' } }) + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + stream.write(lines) + stream.end() + + await plan + }) + + test(`waits for the pino config even if it is sent after write${description}`, async function (t) { + const plan = tspl(t, { plan: 4 }) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { + port: port1, + pinoWillSendConfig: true, + opts: { + expectPinoConfig: true + } + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }] + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + port2.on('message', function (message) { + match(expected.shift(), message.data, { assert: plan }) + match(pinoConfig, message.pinoConfig, { assert: plan }) + }) + + const lines = expected.map(JSON.stringify).join('\n') + stream.write(lines) + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + stream.end() + + await plan + }) + + test(`emits an error if the transport expects pino to send the config, but pino is not going to${description}`, async function () { + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', filename), + workerData: { + opts: { + expectPinoConfig: true + } + } + }) + const [err] = await once(stream, 'error') + assert.equal(err.message, 'This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.') + assert.ok(stream.destroyed) + }) +} + +test('waits for the pino config when pipelining', async function (t) { + const plan = tspl(t, { plan: 2 }) + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'fixtures', 'worker-pipeline.js'), + workerData: { + pinoWillSendConfig: true, + targets: [{ + target: './transport-transform.js', + options: { + opts: { expectPinoConfig: true } + } + }, { + target: './transport-on-data.js', + options: { + port: port1 + } + }] + }, + workerOpts: { + transferList: [port1] + } + }) + + const expected = [{ + level: 'info(30)', + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'HELLO WORLD', + service: 'from transform' + }, { + level: 'info(30)', + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'ANOTHER MESSAGE', + prop: 42, + service: 'from transform' + }] + + const lines = [{ + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'hello world' + }, { + level: 30, + time: 1617955768092, + pid: 2942, + hostname: 'MacBook-Pro.local', + msg: 'another message', + prop: 42 + }].map(JSON.stringify).join('\n') + + const pinoConfig = { + levels: { + labels: { 30: 'info' }, + values: { info: 30 } + }, + messageKey: 'msg', + errorKey: 'err' + } + + port2.on('message', function (message) { + match(expected.shift(), message.data, { assert: plan }) + }) + + stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig }) + stream.write(lines) + stream.end() + + await plan +}) diff --git a/node_modules/pino-pretty/package.json b/node_modules/pino-pretty/package.json new file mode 100644 index 0000000..ac35839 --- /dev/null +++ b/node_modules/pino-pretty/package.json @@ -0,0 +1,67 @@ +{ + "name": "pino-pretty", + "version": "13.1.3", + "description": "Prettifier for Pino log lines", + "type": "commonjs", + "main": "index.js", + "types": "index.d.ts", + "bin": { + "pino-pretty": "./bin.js" + }, + "scripts": { + "ci": "eslint && borp --check-coverage && npm run test-types", + "lint": "eslint", + "lint:fix": "eslint --fix", + "test": "borp", + "test-types": "tsc && tsd && attw --pack .", + "test:watch": "borp -w --reporter gh", + "test:report": "c8 --reporter html borp" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/pinojs/pino-pretty.git" + }, + "keywords": [ + "pino" + ], + "author": "James Sumners ", + "license": "MIT", + "bugs": { + "url": "https://github.com/pinojs/pino-pretty/issues" + }, + "homepage": "https://github.com/pinojs/pino-pretty#readme", + "dependencies": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^4.0.0", + "fast-safe-stringify": "^2.1.1", + "help-me": "^5.0.0", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^3.0.0", + "pump": "^3.0.0", + "secure-json-parse": "^4.0.0", + "sonic-boom": "^4.0.1", + "strip-json-comments": "^5.0.2" + }, + "devDependencies": { + "@arethetypeswrong/cli": "^0.18.1", + "@jsumners/assert-match": "^1.0.0", + "@jsumners/line-reporter": "^1.0.1", + "@types/node": "^24.0.8", + "borp": "^0.21.0", + "eslint": "^9.37.0", + "fastbench": "^1.0.1", + "neostandard": "^0.12.2", + "pino": "^10.1.0", + "rimraf": "^6.0.1", + "semver": "^7.6.0", + "tap": "^16.0.0", + "tsd": "^0.33.0", + "typescript": "~5.9.2" + }, + "tsd": { + "directory": "./test/types" + } +} diff --git a/node_modules/pino-pretty/test/basic.test.js b/node_modules/pino-pretty/test/basic.test.js new file mode 100644 index 0000000..2ea0e61 --- /dev/null +++ b/node_modules/pino-pretty/test/basic.test.js @@ -0,0 +1,1304 @@ +'use strict' + +process.env.TZ = 'UTC' + +const { Writable } = require('node:stream') +const os = require('node:os') +const { describe, test, beforeEach, afterEach } = require('node:test') +const match = require('@jsumners/assert-match') +const pino = require('pino') +const dateformat = require('dateformat') +const rimraf = require('rimraf') +const { join } = require('node:path') +const fs = require('node:fs') +const semver = require('semver') +const pinoPretty = require('..') +const SonicBoom = require('sonic-boom') +const _prettyFactory = pinoPretty.prettyFactory + +// Disable pino warnings +process.removeAllListeners('warning') + +function prettyFactory (opts) { + if (!opts) { + opts = { colorize: false } + } else if (!Object.prototype.hasOwnProperty.call(opts, 'colorize')) { + opts.colorize = false + } + return _prettyFactory(opts) +} + +const Empty = function () {} +Empty.prototype = Object.create(null) + +// All dates are computed from 'Fri, 30 Mar 2018 17:35:28 GMT' +const epoch = 1522431328992 +const formattedEpoch = '17:35:28.992' +const pid = process.pid +const hostname = os.hostname() + +describe('basic prettifier tests', () => { + beforeEach(() => { + Date.originalNow = Date.now + Date.now = () => epoch + }) + afterEach(() => { + Date.now = Date.originalNow + delete Date.originalNow + }) + + test('preserves output if not valid JSON', (t) => { + t.plan(1) + const pretty = prettyFactory() + const formatted = pretty('this is not json\nit\'s just regular output\n') + t.assert.strictEqual(formatted, 'this is not json\nit\'s just regular output\n\n') + }) + + test('formats a line without any extra options', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('will add color codes', (t) => { + t.plan(1) + const pretty = prettyFactory({ colorize: true }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] \u001B[32mINFO\u001B[39m (${pid}): \u001B[36mfoo\u001B[39m\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('will omit color codes from objects when colorizeObjects = false', (t) => { + t.plan(1) + const pretty = prettyFactory({ colorize: true, singleLine: true, colorizeObjects: false }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] \u001B[32mINFO\u001B[39m (${pid}): \u001B[36mfoo\u001B[39m {"foo":"bar"}\n` + ) + cb() + } + })) + log.info({ foo: 'bar' }, 'foo') + }) + + test('can swap date and level position', (t) => { + t.plan(1) + const destination = new Writable({ + write (formatted, enc, cb) { + t.assert.strictEqual( + formatted.toString(), + `INFO [${formattedEpoch}] (${pid}): foo\n` + ) + cb() + } + }) + const pretty = pinoPretty({ + destination, + levelFirst: true, + colorize: false + }) + const log = pino({}, pretty) + log.info('foo') + }) + + test('can print message key value when its a string', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}): baz\n` + ) + cb() + } + })) + log.info('baz') + }) + + test('can print message key value when its a number', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}): 42\n` + ) + cb() + } + })) + log.info(42) + }) + + test('can print message key value when its a Number(0)', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}): 0\n` + ) + cb() + } + })) + log.info(0) + }) + + test('can print message key value when its a boolean', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}): true\n` + ) + cb() + } + })) + log.info(true) + }) + + test('can use different message keys', (t) => { + t.plan(1) + const pretty = prettyFactory({ messageKey: 'bar' }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}): baz\n` + ) + cb() + } + })) + log.info({ bar: 'baz' }) + }) + + test('can use different level keys', (t) => { + t.plan(1) + const pretty = prettyFactory({ levelKey: 'bar' }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] WARN (${pid}): foo\n` + ) + cb() + } + })) + log.info({ msg: 'foo', bar: 'warn' }) + }) + + test('can use nested level keys', (t) => { + t.plan(1) + const pretty = prettyFactory({ levelKey: 'log\\.level' }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] WARN (${pid}): foo\n` + ) + cb() + } + })) + log.info({ msg: 'foo', 'log.level': 'warn' }) + }) + + test('can use a customPrettifier on default level output', (t) => { + t.plan(1) + const veryCustomLevels = { + 30: 'ok', + 40: 'not great' + } + const customPrettifiers = { + level: (level) => `LEVEL: ${veryCustomLevels[level]}` + } + const pretty = prettyFactory({ customPrettifiers }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] LEVEL: ok (${pid}): foo\n` + ) + cb() + } + })) + log.info({ msg: 'foo' }) + }) + + test('can use a customPrettifier on different-level-key output', (t) => { + t.plan(1) + const customPrettifiers = { + level: (level) => `LEVEL: ${level.toUpperCase()}` + } + const pretty = prettyFactory({ levelKey: 'bar', customPrettifiers }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] LEVEL: WARN (${pid}): foo\n` + ) + cb() + } + })) + log.info({ msg: 'foo', bar: 'warn' }) + }) + + test('can use a customPrettifier to get final level label (no color)', (t) => { + t.plan(1) + const customPrettifiers = { + level: (level, key, logThis, { label }) => { + return `LEVEL: ${label}` + } + } + const pretty = prettyFactory({ customPrettifiers, colorize: false, useOnlyCustomProps: false }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] LEVEL: INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info({ msg: 'foo' }) + }) + + test('can use a customPrettifier to get final level label (colorized)', (t) => { + t.plan(1) + const customPrettifiers = { + level: (level, key, logThis, { label, labelColorized }) => { + return `LEVEL: ${labelColorized}` + } + } + const pretty = prettyFactory({ customPrettifiers, colorize: true, useOnlyCustomProps: false }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] LEVEL: INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info({ msg: 'foo' }) + }) + + test('can use a customPrettifier on name output', (t) => { + t.plan(1) + const customPrettifiers = { + name: (hostname) => `NAME: ${hostname}` + } + const pretty = prettyFactory({ customPrettifiers }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (NAME: logger/${pid}): foo\n` + ) + cb() + } + })) + const child = log.child({ name: 'logger' }) + child.info({ msg: 'foo' }) + }) + + test('can use a customPrettifier on hostname and pid output', (t) => { + t.plan(1) + const customPrettifiers = { + hostname: (hostname) => `HOSTNAME: ${hostname}`, + pid: (pid) => `PID: ${pid}` + } + const pretty = prettyFactory({ customPrettifiers, ignore: '' }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (PID: ${pid} on HOSTNAME: ${hostname}): foo\n` + ) + cb() + } + })) + log.info({ msg: 'foo' }) + }) + + test('can use a customPrettifier on default time output', (t) => { + t.plan(1) + const customPrettifiers = { + time: (timestamp) => `TIME: ${timestamp}` + } + const pretty = prettyFactory({ customPrettifiers }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `TIME: ${formattedEpoch} INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('can use a customPrettifier on the caller', (t) => { + t.plan(1) + const customPrettifiers = { + caller: (caller) => `CALLER: ${caller}` + } + const pretty = prettyFactory({ customPrettifiers }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}) : foo\n` + ) + cb() + } + })) + log.info({ msg: 'foo', caller: 'test.js:10' }) + }) + + test('can use a customPrettifier on translateTime-time output', (t) => { + t.plan(1) + const customPrettifiers = { + time: (timestamp) => `TIME: ${timestamp}` + } + const pretty = prettyFactory({ customPrettifiers, translateTime: true }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `TIME: ${formattedEpoch} INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('will format time to UTC', (t) => { + t.plan(1) + const pretty = prettyFactory({ translateTime: true }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('will format time to UTC in custom format', (t) => { + t.plan(1) + const pretty = prettyFactory({ translateTime: 'HH:MM:ss o' }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const utcHour = dateformat(epoch, 'UTC:' + 'HH') + const offset = dateformat(epoch, 'UTC:' + 'o') + t.assert.strictEqual( + formatted, + `[${utcHour}:35:28 ${offset}] INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('will format time to local systemzone in ISO 8601 format', (t) => { + t.plan(1) + const pretty = prettyFactory({ translateTime: 'sys:standard' }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const localHour = dateformat(epoch, 'HH') + const localMinute = dateformat(epoch, 'MM') + const localDate = dateformat(epoch, 'yyyy-mm-dd') + const offset = dateformat(epoch, 'o') + t.assert.strictEqual( + formatted, + `[${localDate} ${localHour}:${localMinute}:28.992 ${offset}] INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('will format time to local systemzone in custom format', (t) => { + t.plan(1) + const pretty = prettyFactory({ + translateTime: 'SYS:yyyy/mm/dd HH:MM:ss o' + }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const localHour = dateformat(epoch, 'HH') + const localMinute = dateformat(epoch, 'MM') + const localDate = dateformat(epoch, 'yyyy/mm/dd') + const offset = dateformat(epoch, 'o') + t.assert.strictEqual( + formatted, + `[${localDate} ${localHour}:${localMinute}:28 ${offset}] INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info('foo') + }) + + // TODO: 2019-03-30 -- We don't really want the indentation in this case? Or at least some better formatting. + test('handles missing time', (t) => { + t.plan(1) + const pretty = prettyFactory() + const formatted = pretty('{"hello":"world"}') + t.assert.strictEqual(formatted, ' hello: "world"\n') + }) + + test('handles missing pid, hostname and name', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({ base: null }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.match(formatted, /\[.*\] INFO: hello world/) + cb() + } + })) + log.info('hello world') + }) + + test('handles missing pid', (t) => { + t.plan(1) + const pretty = prettyFactory() + const name = 'test' + const msg = 'hello world' + const regex = new RegExp('\\[.*\\] INFO \\(' + name + '\\): ' + msg) + + const opts = { + base: { + name, + hostname + } + } + const log = pino(opts, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.match(formatted, regex) + cb() + } + })) + + log.info(msg) + }) + + test('handles missing hostname', (t) => { + t.plan(1) + const pretty = prettyFactory() + const name = 'test' + const msg = 'hello world' + const regex = new RegExp('\\[.*\\] INFO \\(' + name + '/' + pid + '\\): ' + msg) + + const opts = { + base: { + name, + pid: process.pid + } + } + const log = pino(opts, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.match(formatted, regex) + cb() + } + })) + + log.info(msg) + }) + + test('handles missing name', (t) => { + t.plan(1) + const pretty = prettyFactory() + const msg = 'hello world' + const regex = new RegExp('\\[.*\\] INFO \\(' + process.pid + '\\): ' + msg) + + const opts = { + base: { + hostname, + pid: process.pid + } + } + const log = pino(opts, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.match(formatted, regex) + cb() + } + })) + + log.info(msg) + }) + + test('works without time', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({ timestamp: null }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual(formatted, `INFO (${pid}): hello world\n`) + cb() + } + })) + log.info('hello world') + }) + + test('prettifies properties', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + match(formatted, ' a: "b"', t) + cb() + } + })) + log.info({ a: 'b' }, 'hello world') + }) + + test('prettifies nested properties', (t) => { + t.plan(6) + const expectedLines = [ + ' a: {', + ' "b": {', + ' "c": "d"', + ' }', + ' }' + ] + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expectedLines.length + 2) + lines.shift(); lines.pop() + for (let i = 0; i < lines.length; i += 1) { + t.assert.strictEqual(lines[i], expectedLines[i]) + } + cb() + } + })) + log.info({ a: { b: { c: 'd' } } }, 'hello world') + }) + + test('treats the name with care', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({ name: 'matteo' }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual(formatted, `[${formattedEpoch}] INFO (matteo/${pid}): hello world\n`) + cb() + } + })) + log.info('hello world') + }) + + test('handles spec allowed primitives', (t) => { + const pretty = prettyFactory() + let formatted = pretty(null) + t.assert.strictEqual(formatted, 'null\n') + + formatted = pretty(true) + t.assert.strictEqual(formatted, 'true\n') + + formatted = pretty(false) + t.assert.strictEqual(formatted, 'false\n') + }) + + test('handles numbers', (t) => { + const pretty = prettyFactory() + let formatted = pretty(2) + t.assert.strictEqual(formatted, '2\n') + + formatted = pretty(-2) + t.assert.strictEqual(formatted, '-2\n') + + formatted = pretty(0.2) + t.assert.strictEqual(formatted, '0.2\n') + + formatted = pretty(Infinity) + t.assert.strictEqual(formatted, 'Infinity\n') + + formatted = pretty(NaN) + t.assert.strictEqual(formatted, 'NaN\n') + }) + + test('handles `undefined` input', (t) => { + t.plan(1) + const pretty = prettyFactory() + const formatted = pretty(undefined) + t.assert.strictEqual(formatted, 'undefined\n') + }) + + test('handles customLogLevel', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({ customLevels: { testCustom: 35 } }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.match(formatted, /USERLVL/) + cb() + } + })) + log.testCustom('test message') + }) + + test('filter some lines based on minimumLevel', (t) => { + t.plan(3) + const pretty = prettyFactory({ minimumLevel: 'info' }) + const expected = [ + undefined, + undefined, + `[${formattedEpoch}] INFO (${pid}): baz\n` + ] + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + expected.shift() + ) + cb() + } + })) + log.info({ msg: 'foo', level: 10 }) + log.info({ msg: 'bar', level: 20 }) + // only this line will be formatted + log.info({ msg: 'baz', level: 30 }) + }) + + test('filter lines based on minimumLevel using custom levels and level key', (t) => { + t.plan(3) + const pretty = prettyFactory({ minimumLevel: 20, levelKey: 'bar' }) + const expected = [ + undefined, + `[${formattedEpoch}] DEBUG (${pid}): bar\n`, + `[${formattedEpoch}] INFO (${pid}): baz\n` + ] + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + expected.shift() + ) + cb() + } + })) + log.info({ msg: 'foo', bar: 10 }) + log.info({ msg: 'bar', bar: 20 }) + log.info({ msg: 'baz', bar: 30 }) + }) + + test('formats a line with an undefined field', (t) => { + t.plan(1) + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const obj = JSON.parse(chunk.toString()) + // weird hack, but we should not crash + obj.a = undefined + const formatted = pretty(obj) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}): foo\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('prettifies msg object', (t) => { + t.plan(6) + const expectedLines = [ + ' msg: {', + ' "b": {', + ' "c": "d"', + ' }', + ' }' + ] + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expectedLines.length + 2) + lines.shift(); lines.pop() + for (let i = 0; i < lines.length; i += 1) { + t.assert.strictEqual(lines[i], expectedLines[i]) + } + cb() + } + })) + log.info({ msg: { b: { c: 'd' } } }) + }) + + test('prettifies msg object with circular references', (t) => { + t.plan(7) + const expectedLines = [ + ' msg: {', + ' "a": "[Circular]",', + ' "b": {', + ' "c": "d"', + ' }', + ' }' + ] + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expectedLines.length + 2) + lines.shift(); lines.pop() + for (let i = 0; i < lines.length; i += 1) { + t.assert.strictEqual(lines[i], expectedLines[i]) + } + cb() + } + })) + + const msg = { b: { c: 'd' } } + msg.a = msg + log.info({ msg }) + }) + + test('prettifies custom key', (t) => { + t.plan(1) + const pretty = prettyFactory({ + customPrettifiers: { + foo: val => `${val}_baz\nmultiline`, + cow: val => val.toUpperCase() + } + }) + const arst = pretty('{"msg":"hello world", "foo": "bar", "cow": "moo", "level":30}') + t.assert.strictEqual(arst, 'INFO: hello world\n foo: bar_baz\n multiline\n cow: MOO\n') + }) + + test('does not add trailing space if prettified value begins with eol', (t) => { + t.plan(1) + const pretty = prettyFactory({ + customPrettifiers: { + calls: val => '\n' + val.map(it => ' ' + it).join('\n') + } + }) + const arst = pretty('{"msg":"doing work","calls":["step 1","step 2","step 3"],"level":30}') + t.assert.strictEqual(arst, 'INFO: doing work\n calls:\n step 1\n step 2\n step 3\n') + }) + + test('does not prettify custom key that does not exists', (t) => { + t.plan(1) + const pretty = prettyFactory({ + customPrettifiers: { + foo: val => `${val}_baz`, + cow: val => val.toUpperCase() + } + }) + const arst = pretty('{"msg":"hello world", "foo": "bar", "level":30}') + t.assert.strictEqual(arst, 'INFO: hello world\n foo: bar_baz\n') + }) + + test('prettifies object with some undefined values', (t) => { + t.plan(1) + const destination = new Writable({ + write (chunk, _, cb) { + t.assert.strictEqual( + chunk + '', + `[${formattedEpoch}] INFO (${pid}):\n a: {\n "b": "c"\n }\n n: null\n` + ) + cb() + } + }) + const pretty = pinoPretty({ + destination, + colorize: false + }) + const log = pino({}, pretty) + log.info({ + a: { b: 'c' }, + s: Symbol.for('s'), + f: f => f, + c: class C {}, + n: null, + err: { toJSON () {} } + }) + }) + + test('ignores multiple keys', (t) => { + t.plan(1) + const pretty = prettyFactory({ ignore: 'pid,hostname' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, `[${formattedEpoch}] INFO: hello world\n`) + }) + + test('ignores a single key', (t) => { + t.plan(1) + const pretty = prettyFactory({ ignore: 'pid' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, `[${formattedEpoch}] INFO (on ${hostname}): hello world\n`) + }) + + test('ignores time', (t) => { + t.plan(1) + const pretty = prettyFactory({ ignore: 'time' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, `INFO (${pid} on ${hostname}): hello world\n`) + }) + + test('ignores time and level', (t) => { + t.plan(1) + const pretty = prettyFactory({ ignore: 'time,level' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, `(${pid} on ${hostname}): hello world\n`) + }) + + test('ignores all keys but message', (t) => { + t.plan(1) + const pretty = prettyFactory({ ignore: 'time,level,name,pid,hostname' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, 'hello world\n') + }) + + test('include nothing', (t) => { + t.plan(1) + const pretty = prettyFactory({ include: '' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, 'hello world\n') + }) + + test('include multiple keys', (t) => { + t.plan(1) + const pretty = prettyFactory({ include: 'time,level' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, `[${formattedEpoch}] INFO: hello world\n`) + }) + + test('include a single key', (t) => { + t.plan(1) + const pretty = prettyFactory({ include: 'level' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, 'INFO: hello world\n') + }) + + test('log error-like object', (t) => { + t.plan(7) + const expectedLines = [ + ' type: "Error"', + ' message: "m"', + ' stack: [', + ' "line1",', + ' "line2"', + ' ]' + ] + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expectedLines.length + 2) + lines.shift(); lines.pop() + for (let i = 0; i < lines.length; i += 1) { + t.assert.strictEqual(lines[i], expectedLines[i]) + } + cb() + } + })) + log.error({ type: 'Error', message: 'm', stack: ['line1', 'line2'] }) + }) + + test('include should override ignore', (t) => { + t.plan(1) + const pretty = prettyFactory({ ignore: 'time,level', include: 'time,level' }) + const arst = pretty(`{"msg":"hello world", "pid":"${pid}", "hostname":"${hostname}", "time":${epoch}, "level":30}`) + t.assert.strictEqual(arst, `[${formattedEpoch}] INFO: hello world\n`) + }) + + test('include a single key with null object', (t) => { + t.plan(1) + const pretty = prettyFactory({ include: 'level' }) + const obj = new Empty() + obj.nested = 'property' + const arst = pretty({ + msg: 'hello world', + pid: `${pid}`, + hostname, + time: epoch, + obj, + level: 30 + }) + t.assert.strictEqual(arst, 'INFO: hello world\n') + }) + + test('prettifies trace caller', (t) => { + t.plan(1) + const traceCaller = (instance) => { + const { symbols: { asJsonSym } } = pino + const get = (target, name) => name === asJsonSym ? asJson : target[name] + + function asJson (...args) { + args[0] = args[0] || {} + args[0].caller = '/tmp/script.js' + return instance[asJsonSym].apply(this, args) + } + + return new Proxy(instance, { get }) + } + + const pretty = prettyFactory() + const log = traceCaller(pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] INFO (${pid}) : foo\n` + ) + cb() + } + }))) + log.info('foo') + }) + + test('handles specified timestampKey', (t) => { + t.plan(1) + const pretty = prettyFactory({ timestampKey: '@timestamp' }) + const arst = pretty(`{"msg":"hello world", "@timestamp":${epoch}, "level":30}`) + t.assert.strictEqual(arst, `[${formattedEpoch}] INFO: hello world\n`) + }) + + test('keeps "v" key in log', (t) => { + t.plan(1) + const pretty = prettyFactory({ ignore: 'time' }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual(formatted, `INFO (${pid} on ${hostname}):\n v: 1\n`) + cb() + } + })) + log.info({ v: 1 }) + }) + + test('Hide object `{ key: "value" }` from output when flag `hideObject` is set', (t) => { + t.plan(1) + const pretty = prettyFactory({ hideObject: true }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual(formatted, `[${formattedEpoch}] INFO (${pid}): hello world\n`) + cb() + } + })) + log.info({ key: 'value' }, 'hello world') + }) + + test('Prints extra objects on one line with singleLine=true', (t) => { + t.plan(1) + const pretty = prettyFactory({ + singleLine: true, + colorize: false, + customPrettifiers: { + upper: val => val.toUpperCase(), + undef: () => undefined + } + }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual(formatted, `[${formattedEpoch}] INFO (${pid}): message {"extra":{"foo":"bar","number":42},"upper":"FOOBAR"}\n`) + + cb() + } + })) + log.info({ msg: 'message', extra: { foo: 'bar', number: 42 }, upper: 'foobar', undef: 'this will not show up' }) + }) + + test('Does not print empty object with singleLine=true', (t) => { + t.plan(1) + const pretty = prettyFactory({ singleLine: true, colorize: false }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual(formatted, `[${formattedEpoch}] INFO (${pid}): message\n`) + cb() + } + })) + log.info({ msg: 'message' }) + }) + + test('default options', (t) => { + t.plan(1) + t.assert.doesNotThrow(pinoPretty) + }) + + test('does not call fs.close on stdout stream', (t) => { + t.plan(2) + const destination = pino.destination({ minLength: 4096, sync: true }) + const prettyDestination = pinoPretty({ destination, colorize: false }) + const log = pino(prettyDestination) + log.info('this message has been buffered') + const chunks = [] + const { close, writeSync } = fs + let closeCalled = false + fs.close = new Proxy(close, { + apply: (target, self, args) => { + closeCalled = true + } + }) + fs.writeSync = new Proxy(writeSync, { + apply: (target, self, args) => { + chunks.push(args[1]) + return args[1].length + } + }) + destination.end() + Object.assign(fs, { close, writeSync }) + t.assert.match(chunks.join(''), /INFO .+: this message has been buffered/) + t.assert.strictEqual(closeCalled, false) + }) + + test('wait for close event from destination', (t, end) => { + t.plan(2) + const destination = pino.destination({ minLength: 4096, sync: true }) + const prettyDestination = pinoPretty({ destination, colorize: false }) + const log = pino(prettyDestination) + log.info('this message has been buffered') + const chunks = [] + const { close, writeSync } = fs + fs.close = new Proxy(close, { + apply: (target, self, args) => { + } + }) + fs.writeSync = new Proxy(writeSync, { + apply: (target, self, args) => { + chunks.push(args[1]) + return args[1].length + } + }) + t.after(() => { + Object.assign(fs, { close, writeSync }) + }) + let destinationClosed = false + destination.on('close', () => { + destinationClosed = true + }) + prettyDestination.on('close', () => { + t.assert.match(chunks.join(''), /INFO .+: this message has been buffered/) + t.assert.strictEqual(destinationClosed, true) + end() + }) + prettyDestination.end() + }) + + test('stream usage', async (t) => { + t.plan(1) + const tmpDir = join(__dirname, '.tmp_' + Date.now()) + t.after(() => rimraf.sync(tmpDir)) + + const destination = join(tmpDir, 'output') + + const pretty = pinoPretty({ + singleLine: true, + colorize: false, + mkdir: true, + append: false, + destination: new SonicBoom({ dest: destination, async: false, mkdir: true, append: true }), + customPrettifiers: { + upper: val => val.toUpperCase(), + undef: () => undefined + } + }) + const log = pino(pretty) + log.info({ msg: 'message', extra: { foo: 'bar', number: 42 }, upper: 'foobar', undef: 'this will not show up' }) + + await watchFileCreated(destination) + + const formatted = fs.readFileSync(destination, 'utf8') + + t.assert.strictEqual(formatted, `[${formattedEpoch}] INFO (${pid}): message {"extra":{"foo":"bar","number":42},"upper":"FOOBAR"}\n`) + }) + + test('sync option', async (t) => { + t.plan(1) + const tmpDir = join(__dirname, '.tmp_' + Date.now()) + t.after(() => rimraf.sync(tmpDir)) + + const destination = join(tmpDir, 'output') + + const log = pino(pino.transport({ + target: '..', + options: { + singleLine: true, + colorize: false, + mkdir: true, + append: false, + sync: true, + destination + } + })) + log.info({ msg: 'message', extra: { foo: 'bar', number: 43 }, upper: 'foobar' }) + + await watchFileCreated(destination) + + const formatted = fs.readFileSync(destination, 'utf8') + + t.assert.strictEqual(formatted, `[${formattedEpoch}] INFO (${pid}): message {"extra":{"foo":"bar","number":43},"upper":"foobar"}\n`) + }) + + test('support custom colors object', async (t) => { + t.plan(1) + const pretty = prettyFactory({ + colorize: true, + customColors: { + trace: 'cyan', + debug: 'blue', + info: 'green', + warn: 'yellow', + error: 'red', + fatal: 'red' + } + }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.strictEqual( + formatted, + `[${formattedEpoch}] \u001B[32mINFO\u001B[39m (${pid}): \u001B[36mfoo\u001B[39m\n` + ) + cb() + } + })) + log.info('foo') + }) + + test('check support for colors', (t) => { + t.plan(1) + const isColorSupported = pinoPretty.isColorSupported + t.assert.strictEqual(typeof isColorSupported, 'boolean') + }) +}) + +if (semver.gte(pino.version, '8.21.0')) { + describe('using pino config', () => { + beforeEach(() => { + Date.originalNow = Date.now + Date.now = () => epoch + }) + afterEach(() => { + Date.now = Date.originalNow + delete Date.originalNow + }) + + test('can use different message keys', (t) => { + t.plan(1) + const destination = new Writable({ + write (formatted, enc, cb) { + t.assert.strictEqual( + formatted.toString(), + `[${formattedEpoch}] INFO (${pid}): baz\n` + ) + cb() + } + }) + const pretty = pinoPretty({ + destination, + colorize: false + }) + const log = pino({ messageKey: 'bar' }, pretty) + log.info({ bar: 'baz' }) + }) + + test('handles customLogLevels', (t) => { + t.plan(1) + const destination = new Writable({ + write (formatted, enc, cb) { + t.assert.strictEqual( + formatted.toString(), + `[${formattedEpoch}] TESTCUSTOM (${pid}): test message\n` + ) + cb() + } + }) + const pretty = pinoPretty({ + destination, + colorize: false + }) + const log = pino({ customLevels: { testCustom: 35 } }, pretty) + log.testCustom('test message') + }) + }) +} + +function watchFileCreated (filename) { + return new Promise((resolve, reject) => { + const TIMEOUT = 2000 + const INTERVAL = 100 + const threshold = TIMEOUT / INTERVAL + let counter = 0 + const interval = setInterval(() => { + // On some CI runs file is created but not filled + if (fs.existsSync(filename) && fs.statSync(filename).size !== 0) { + clearInterval(interval) + resolve() + } else if (counter <= threshold) { + counter++ + } else { + clearInterval(interval) + reject(new Error(`${filename} was not created.`)) + } + }, INTERVAL) + }) +} diff --git a/node_modules/pino-pretty/test/cli-rc.test.js b/node_modules/pino-pretty/test/cli-rc.test.js new file mode 100644 index 0000000..7a41fe0 --- /dev/null +++ b/node_modules/pino-pretty/test/cli-rc.test.js @@ -0,0 +1,275 @@ +'use strict' + +process.env.TZ = 'UTC' + +const path = require('node:path') +const { spawn } = require('node:child_process') +const { describe, after, test } = require('node:test') +const match = require('@jsumners/assert-match') +const fs = require('node:fs') +const { rimraf } = require('rimraf') +const { once } = require('./helper') + +const bin = require.resolve('../bin') +const logLine = '{"level":30,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n' + +describe('cli', () => { + const tmpDir = path.join(__dirname, '.tmp_' + Date.now()) + fs.mkdirSync(tmpDir) + + after(() => rimraf(tmpDir)) + + test('loads and applies default config file: pino-pretty.config.js', async (t) => { + t.plan(1) + // Set translateTime: true on run configuration + const configFile = path.join(tmpDir, 'pino-pretty.config.js') + fs.writeFileSync(configFile, 'module.exports = { translateTime: true }') + const env = { TERM: 'dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin], { env, cwd: tmpDir }) + // Validate that the time has been translated + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[17:35:28.992] INFO (42): hello world\n') + }) + child.stdin.write(logLine) + await endPromise + t.after(() => { + fs.unlinkSync(configFile) + child.kill() + }) + }) + + test('loads and applies default config file: pino-pretty.config.cjs', async (t) => { + t.plan(1) + // Set translateTime: true on run configuration + const configFile = path.join(tmpDir, 'pino-pretty.config.cjs') + fs.writeFileSync(configFile, 'module.exports = { translateTime: true }') + // Tell the loader to expect ESM modules + const packageJsonFile = path.join(tmpDir, 'package.json') + fs.writeFileSync(packageJsonFile, JSON.stringify({ type: 'module' }, null, 4)) + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin], { env, cwd: tmpDir }) + // Validate that the time has been translated + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[17:35:28.992] INFO (42): hello world\n') + }) + child.stdin.write(logLine) + await endPromise + t.after(() => { + fs.unlinkSync(configFile) + fs.unlinkSync(packageJsonFile) + child.kill() + }) + }) + + test('loads and applies default config file: .pino-prettyrc', async (t) => { + t.plan(1) + // Set translateTime: true on run configuration + const configFile = path.join(tmpDir, '.pino-prettyrc') + fs.writeFileSync(configFile, JSON.stringify({ translateTime: true }, null, 4)) + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin], { env, cwd: tmpDir }) + // Validate that the time has been translated + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[17:35:28.992] INFO (42): hello world\n') + }) + child.stdin.write(logLine) + await endPromise + t.after(() => { + fs.unlinkSync(configFile) + child.kill() + }) + }) + + test('loads and applies default config file: .pino-prettyrc.json', async (t) => { + t.plan(1) + // Set translateTime: true on run configuration + const configFile = path.join(tmpDir, '.pino-prettyrc.json') + fs.writeFileSync(configFile, JSON.stringify({ translateTime: true }, null, 4)) + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin], { env, cwd: tmpDir }) + // Validate that the time has been translated + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[17:35:28.992] INFO (42): hello world\n') + }) + child.stdin.write(logLine) + await endPromise + t.after(() => { + fs.unlinkSync(configFile) + child.kill() + }) + }) + + test('loads and applies custom config file: pino-pretty.config.test.json', async (t) => { + t.plan(1) + // Set translateTime: true on run configuration + const configFile = path.join(tmpDir, 'pino-pretty.config.test.json') + fs.writeFileSync(configFile, JSON.stringify({ translateTime: true }, null, 4)) + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin, '--config', configFile], { env, cwd: tmpDir }) + // Validate that the time has been translated + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[17:35:28.992] INFO (42): hello world\n') + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test('loads and applies custom config file: pino-pretty.config.test.js', async (t) => { + t.plan(1) + // Set translateTime: true on run configuration + const configFile = path.join(tmpDir, 'pino-pretty.config.test.js') + fs.writeFileSync(configFile, 'module.exports = { translateTime: true }') + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin, '--config', configFile], { env, cwd: tmpDir }) + // Validate that the time has been translated + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[17:35:28.992] INFO (42): hello world\n') + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + for (const optionName of ['--messageKey', '-m']) { + test(`cli options override config options via ${optionName}`, async (t) => { + t.plan(1) + // Set translateTime: true on run configuration + const configFile = path.join(tmpDir, 'pino-pretty.config.js') + fs.writeFileSync(configFile, ` + module.exports = { + translateTime: true, + messageKey: 'custom_msg' + } + `.trim()) + // Set messageKey: 'new_msg' using command line option + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin, optionName, 'new_msg'], { env, cwd: tmpDir }) + // Validate that the time has been translated and correct message key has been used + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[17:35:28.992] INFO (42): hello world\n') + }) + child.stdin.write(logLine.replace(/"msg"/, '"new_msg"')) + await endPromise + t.after(() => { + fs.unlinkSync(configFile) + child.kill() + }) + }) + } + + test('cli options with defaults can be overridden by config', async (t) => { + t.plan(1) + // Set errorProps: '*' on run configuration + const configFile = path.join(tmpDir, 'pino-pretty.config.js') + fs.writeFileSync(configFile, ` + module.exports = { + errorProps: '*' + } + `.trim()) + // Set messageKey: 'new_msg' using command line option + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin], { env, cwd: tmpDir }) + // Validate that the time has been translated and correct message key has been used + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[21:31:36.006] FATAL: There was an error starting the process.\n QueryError: Error during sql query: syntax error at or near SELECTT\n at /home/me/projects/example/sql.js\n at /home/me/projects/example/index.js\n querySql: SELECTT * FROM "test" WHERE id = $1;\n queryArgs: 12\n') + }) + child.stdin.write('{"level":60,"time":1594416696006,"msg":"There was an error starting the process.","type":"Error","stack":"QueryError: Error during sql query: syntax error at or near SELECTT\\n at /home/me/projects/example/sql.js\\n at /home/me/projects/example/index.js","querySql":"SELECTT * FROM \\"test\\" WHERE id = $1;","queryArgs":[12]}\n') + await endPromise + t.after(() => { + fs.unlinkSync(configFile) + child.kill() + }) + }) + + test('throws on missing config file', async (t) => { + t.plan(2) + const args = [bin, '--config', 'pino-pretty.config.missing.json'] + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], args, { env, cwd: tmpDir }) + const endPromise1 = once(child, 'close', (code) => { + t.assert.strictEqual(code, 1) + }) + child.stdout.pipe(process.stdout) + child.stderr.setEncoding('utf8') + let data = '' + child.stderr.on('data', (chunk) => { + data += chunk + }) + const endPromise2 = once(child, 'close', () => { + match( + data.toString(), 'Error: Failed to load runtime configuration file: pino-pretty.config.missing.json', t) + }) + await Promise.all([endPromise1, endPromise2]) + t.after(() => child.kill()) + }) + + test('throws on invalid default config file', async (t) => { + t.plan(2) + const configFile = path.join(tmpDir, 'pino-pretty.config.js') + fs.writeFileSync(configFile, 'module.exports = () => {}') + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin], { env, cwd: tmpDir }) + const endPromise1 = once(child, 'close', (code) => { + t.assert.strictEqual(code, 1) + }) + child.stdout.pipe(process.stdout) + child.stderr.setEncoding('utf8') + let data = '' + child.stderr.on('data', (chunk) => { + data += chunk + }) + const endPromise2 = once(child, 'close', () => { + match(data, 'Error: Invalid runtime configuration file: pino-pretty.config.js', t) + }) + await Promise.all([endPromise1, endPromise2]) + t.after(() => child.kill()) + }) + + test('throws on invalid custom config file', async (t) => { + t.plan(2) + const configFile = path.join(tmpDir, 'pino-pretty.config.invalid.js') + fs.writeFileSync(configFile, 'module.exports = () => {}') + const args = [bin, '--config', path.relative(tmpDir, configFile)] + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], args, { env, cwd: tmpDir }) + const endPromise1 = once(child, 'close', (code) => { + t.assert.strictEqual(code, 1) + }) + child.stdout.pipe(process.stdout) + child.stderr.setEncoding('utf8') + let data = '' + child.stderr.on('data', (chunk) => { + data += chunk + }) + const endPromise2 = once(child, 'close', () => { + match(data, 'Error: Invalid runtime configuration file: pino-pretty.config.invalid.js', t) + }) + await Promise.all([endPromise1, endPromise2]) + t.after(() => child.kill()) + }) + + test('test help', async (t) => { + t.plan(1) + const env = { TERM: ' dumb', TZ: 'UTC' } + const child = spawn(process.argv[0], [bin, '--help'], { env }) + const file = fs.readFileSync('help/help.txt').toString() + child.on('error', t.assert.fail) + + await new Promise(resolve => { + child.stdout.on('data', (data) => { + t.assert.strictEqual(data.toString(), file) + resolve() + }) + }) + t.after(() => child.kill()) + }) +}) diff --git a/node_modules/pino-pretty/test/cli.test.js b/node_modules/pino-pretty/test/cli.test.js new file mode 100644 index 0000000..7055a92 --- /dev/null +++ b/node_modules/pino-pretty/test/cli.test.js @@ -0,0 +1,360 @@ +'use strict' + +process.env.TZ = 'UTC' + +const path = require('node:path') +const { spawn } = require('node:child_process') +const { describe, test } = require('node:test') +const { once } = require('./helper') + +const bin = require.resolve(path.join(__dirname, '..', 'bin.js')) +const epoch = 1522431328992 +const logLine = '{"level":30,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n' +const env = { TERM: 'dumb', TZ: 'UTC' } +const formattedEpoch = '17:35:28.992' + +describe('cli', () => { + test('does basic reformatting', async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + for (const optionName of ['--levelFirst', '-l']) { + test(`flips epoch and level via ${optionName}`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, optionName], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `INFO [${formattedEpoch}] (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + } + + for (const optionName of ['--translateTime', '-t']) { + test(`translates time to default format via ${optionName}`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, optionName], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + } + + for (const optionName of ['--ignore', '-i']) { + test('does ignore multiple keys', async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, optionName, 'pid,hostname'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO: hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + } + + for (const optionName of ['--customLevels', '-x']) { + test(`customize levels via ${optionName}`, async (t) => { + t.plan(1) + const logLine = '{"level":1,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n' + const child = spawn(process.argv[0], [bin, optionName, 'err:99,info:1'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} without index`, async (t) => { + t.plan(1) + const logLine = '{"level":1,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n' + const child = spawn(process.argv[0], [bin, optionName, 'err:99,info'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} with minimumLevel`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, '--minimumLevel', 'err', optionName, 'err:99,info:1'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] ERR (42): hello world\n`) + }) + child.stdin.write('{"level":1,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n') + child.stdin.write('{"level":99,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n') + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} with minimumLevel, customLevels and useOnlyCustomProps false`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, '--minimumLevel', 'custom', '--useOnlyCustomProps', 'false', optionName, 'custom:99,info:1'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] CUSTOM (42): hello world\n`) + }) + child.stdin.write('{"level":1,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n') + child.stdin.write('{"level":99,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n') + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} with minimumLevel, customLevels and useOnlyCustomProps true`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, '--minimumLevel', 'custom', '--useOnlyCustomProps', 'true', optionName, 'custom:99,info:1'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] CUSTOM (42): hello world\n`) + }) + child.stdin.write('{"level":1,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n') + child.stdin.write('{"level":99,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n') + await endPromise + t.after(() => child.kill()) + }) + } + + for (const optionName of ['--customColors', '-X']) { + test(`customize levels via ${optionName}`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, optionName, 'info:blue,message:red'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} with customLevels`, async (t) => { + t.plan(1) + const logLine = '{"level":1,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n' + const child = spawn(process.argv[0], [bin, '--customLevels', 'err:99,info', optionName, 'info:blue,message:red'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + } + + for (const optionName of ['--useOnlyCustomProps', '-U']) { + test(`customize levels via ${optionName} false and customColors`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, '--customColors', 'err:blue,info:red', optionName, 'false'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} true and customColors`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, '--customColors', 'err:blue,info:red', optionName, 'true'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} true and customLevels`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, '--customLevels', 'err:99,custom:30', optionName, 'true'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] CUSTOM (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} true and no customLevels`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, optionName, 'true'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} false and customLevels`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, '--customLevels', 'err:99,custom:25', optionName, 'false'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test(`customize levels via ${optionName} false and no customLevels`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, optionName, 'false'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world\n`) + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + } + + test('does ignore escaped keys', async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, '-i', 'log\\.domain\\.corp/foo'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO: hello world\n`) + }) + const logLine = '{"level":30,"time":1522431328992,"msg":"hello world","log.domain.corp/foo":"bar"}\n' + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + + test('passes through stringified date as string', async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin], { env }) + child.on('error', t.assert.fail) + + const date = JSON.stringify(new Date(epoch)) + + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), date + '\n') + }) + + child.stdin.write(date) + child.stdin.write('\n') + + await endPromise + + t.after(() => child.kill()) + }) + + test('end stdin does not end the destination', async (t) => { + t.plan(2) + const child = spawn(process.argv[0], [bin], { env }) + child.on('error', t.assert.fail) + + const endPromise1 = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), 'aaa\n') + }) + + child.stdin.end('aaa\n') + + const endPromise2 = once(child, 'exit', (code) => { + t.assert.strictEqual(code, 0) + }) + await Promise.all([endPromise1, endPromise2]) + + t.after(() => child.kill()) + }) + + for (const optionName of ['--timestampKey', '-a']) { + test(`uses specified timestamp key via ${optionName}`, async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin, optionName, '@timestamp'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO: hello world\n`) + }) + const logLine = '{"level":30,"@timestamp":1522431328992,"msg":"hello world"}\n' + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) + } + + for (const optionName of ['--singleLine', '-S']) { + test(`singleLine=true via ${optionName}`, async (t) => { + t.plan(1) + const logLineWithExtra = JSON.stringify(Object.assign(JSON.parse(logLine), { + extra: { + foo: 'bar', + number: 42 + } + })) + '\n' + + const child = spawn(process.argv[0], [bin, optionName], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42): hello world {"extra":{"foo":"bar","number":42}}\n`) + }) + child.stdin.write(logLineWithExtra) + await endPromise + t.after(() => child.kill()) + }) + } + + test('does ignore nested keys', async (t) => { + t.plan(1) + + const logLineNested = JSON.stringify(Object.assign(JSON.parse(logLine), { + extra: { + foo: 'bar', + number: 42, + nested: { + foo2: 'bar2' + } + } + })) + '\n' + + const child = spawn(process.argv[0], [bin, '-S', '-i', 'extra.foo,extra.nested,extra.nested.miss'], { env }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), `[${formattedEpoch}] INFO (42 on foo): hello world {"extra":{"number":42}}\n`) + }) + child.stdin.write(logLineNested) + await endPromise + t.after(() => child.kill()) + }) + + test('change TZ', async (t) => { + t.plan(1) + const child = spawn(process.argv[0], [bin], { env: { ...env, TZ: 'Europe/Amsterdam' } }) + child.on('error', t.assert.fail) + const endPromise = once(child.stdout, 'data', (data) => { + t.assert.strictEqual(data.toString(), '[19:35:28.992] INFO (42): hello world\n') + }) + child.stdin.write(logLine) + await endPromise + t.after(() => child.kill()) + }) +}) diff --git a/node_modules/pino-pretty/test/crlf.test.js b/node_modules/pino-pretty/test/crlf.test.js new file mode 100644 index 0000000..de00a35 --- /dev/null +++ b/node_modules/pino-pretty/test/crlf.test.js @@ -0,0 +1,33 @@ +'use strict' + +process.env.TZ = 'UTC' + +const { describe, test } = require('node:test') +const _prettyFactory = require('../').prettyFactory + +function prettyFactory (opts) { + if (!opts) { + opts = { colorize: false } + } else if (!Object.prototype.hasOwnProperty.call(opts, 'colorize')) { + opts.colorize = false + } + return _prettyFactory(opts) +} + +const logLine = '{"level":30,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo"}\n' + +describe('crlf', () => { + test('uses LF by default', (t) => { + t.plan(1) + const pretty = prettyFactory() + const formatted = pretty(logLine) + t.assert.strictEqual(formatted.substr(-2), 'd\n') + }) + + test('can use CRLF', (t) => { + t.plan(1) + const pretty = prettyFactory({ crlf: true }) + const formatted = pretty(logLine) + t.assert.strictEqual(formatted.substr(-3), 'd\r\n') + }) +}) diff --git a/node_modules/pino-pretty/test/error-objects.test.js b/node_modules/pino-pretty/test/error-objects.test.js new file mode 100644 index 0000000..060551b --- /dev/null +++ b/node_modules/pino-pretty/test/error-objects.test.js @@ -0,0 +1,494 @@ +'use strict' + +process.env.TZ = 'UTC' + +const { Writable } = require('node:stream') +const { describe, test, afterEach, beforeEach } = require('node:test') +const pino = require('pino') +const semver = require('semver') +const serializers = pino.stdSerializers +const pinoPretty = require('../') +const _prettyFactory = pinoPretty.prettyFactory + +function prettyFactory (opts) { + if (!opts) { + opts = { colorize: false } + } else if (!Object.prototype.hasOwnProperty.call(opts, 'colorize')) { + opts.colorize = false + } + return _prettyFactory(opts) +} + +// All dates are computed from 'Fri, 30 Mar 2018 17:35:28 GMT' +const epoch = 1522431328992 +const formattedEpoch = '17:35:28.992' +const pid = process.pid + +describe('error like objects tests', () => { + beforeEach(() => { + Date.originalNow = Date.now + Date.now = () => epoch + }) + afterEach(() => { + Date.now = Date.originalNow + delete Date.originalNow + }) + + test('pino transform prettifies Error', (t) => { + t.plan(2) + const pretty = prettyFactory() + const err = Error('hello world') + const expected = err.stack.split('\n') + expected.unshift(err.message) + + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expected.length + 6) + t.assert.strictEqual(lines[0], `[${formattedEpoch}] INFO (${pid}): hello world`) + cb() + } + })) + + log.info(err) + }) + + test('errorProps recognizes user specified properties', (t) => { + t.plan(3) + const pretty = prettyFactory({ errorProps: 'statusCode,originalStack' }) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.match(formatted, /\s{4}error stack/) + t.assert.match(formatted, /"statusCode": 500/) + t.assert.match(formatted, /"originalStack": "original stack"/) + cb() + } + })) + + const error = Error('error message') + error.stack = 'error stack' + error.statusCode = 500 + error.originalStack = 'original stack' + + log.error(error) + }) + + test('prettifies ignores undefined errorLikeObject', (t) => { + const pretty = prettyFactory() + pretty({ err: undefined }) + pretty({ error: undefined }) + }) + + test('prettifies Error in property within errorLikeObjectKeys', (t) => { + t.plan(8) + const pretty = prettyFactory({ + errorLikeObjectKeys: ['err'] + }) + + const err = Error('hello world') + const expected = err.stack.split('\n') + expected.unshift(err.message) + + const log = pino({ serializers: { err: serializers.err } }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expected.length + 6) + t.assert.strictEqual(lines[0], `[${formattedEpoch}] INFO (${pid}): hello world`) + t.assert.match(lines[1], /\s{4}err: {/) + t.assert.match(lines[2], /\s{6}"type": "Error",/) + t.assert.match(lines[3], /\s{6}"message": "hello world",/) + t.assert.match(lines[4], /\s{6}"stack":/) + t.assert.match(lines[5], /\s{6}Error: hello world/) + // Node 12 labels the test `` + t.assert.match(lines[6], /\s{10}at TestContext./) + cb() + } + })) + + log.info({ err }) + }) + + test('prettifies Error in property with singleLine=true', (t) => { + // singleLine=true doesn't apply to errors + t.plan(8) + const pretty = prettyFactory({ + singleLine: true, + errorLikeObjectKeys: ['err'] + }) + + const err = Error('hello world') + const expected = [ + '{"extra":{"a":1,"b":2}}', + err.message, + ...err.stack.split('\n') + ] + + const log = pino({ serializers: { err: serializers.err } }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expected.length + 5) + t.assert.strictEqual(lines[0], `[${formattedEpoch}] INFO (${pid}): hello world {"extra":{"a":1,"b":2}}`) + t.assert.match(lines[1], /\s{4}err: {/) + t.assert.match(lines[2], /\s{6}"type": "Error",/) + t.assert.match(lines[3], /\s{6}"message": "hello world",/) + t.assert.match(lines[4], /\s{6}"stack":/) + t.assert.match(lines[5], /\s{6}Error: hello world/) + // Node 12 labels the test `` + t.assert.match(lines[6], /\s{10}at TestContext./) + cb() + } + })) + + log.info({ err, extra: { a: 1, b: 2 } }) + }) + + test('prettifies Error in property within errorLikeObjectKeys with custom function', (t) => { + t.plan(4) + const pretty = prettyFactory({ + errorLikeObjectKeys: ['err'], + customPrettifiers: { + err: val => `error is ${val.message}` + } + }) + + const err = Error('hello world') + err.stack = 'Error: hello world\n at anonymous (C:\\project\\node_modules\\example\\index.js)' + const expected = err.stack.split('\n') + expected.unshift(err.message) + + const log = pino({ serializers: { err: serializers.err } }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, 3) + t.assert.strictEqual(lines[0], `[${formattedEpoch}] INFO (${pid}): hello world`) + t.assert.strictEqual(lines[1], ' err: error is hello world') + t.assert.strictEqual(lines[2], '') + + cb() + } + })) + + log.info({ err }) + }) + + test('prettifies Error in property within errorLikeObjectKeys when stack has escaped characters', (t) => { + t.plan(8) + const pretty = prettyFactory({ + errorLikeObjectKeys: ['err'] + }) + + const err = Error('hello world') + err.stack = 'Error: hello world\n at anonymous (C:\\project\\node_modules\\example\\index.js)' + const expected = err.stack.split('\n') + expected.unshift(err.message) + + const log = pino({ serializers: { err: serializers.err } }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expected.length + 6) + t.assert.strictEqual(lines[0], `[${formattedEpoch}] INFO (${pid}): hello world`) + t.assert.match(lines[1], /\s{4}err: {$/) + t.assert.match(lines[2], /\s{6}"type": "Error",$/) + t.assert.match(lines[3], /\s{6}"message": "hello world",$/) + t.assert.match(lines[4], /\s{6}"stack":$/) + t.assert.match(lines[5], /\s{10}Error: hello world$/) + t.assert.match(lines[6], /\s{10}at anonymous \(C:\\project\\node_modules\\example\\index.js\)$/) + cb() + } + })) + + log.info({ err }) + }) + + test('prettifies Error in property within errorLikeObjectKeys when stack is not the last property', (t) => { + t.plan(9) + const pretty = prettyFactory({ + errorLikeObjectKeys: ['err'] + }) + + const err = Error('hello world') + err.anotherField = 'dummy value' + const expected = err.stack.split('\n') + expected.unshift(err.message) + + const log = pino({ serializers: { err: serializers.err } }, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expected.length + 7) + t.assert.strictEqual(lines[0], `[${formattedEpoch}] INFO (${pid}): hello world`) + t.assert.match(lines[1], /\s{4}err: {/) + t.assert.match(lines[2], /\s{6}"type": "Error",/) + t.assert.match(lines[3], /\s{6}"message": "hello world",/) + t.assert.match(lines[4], /\s{6}"stack":/) + t.assert.match(lines[5], /\s{6}Error: hello world/) + // Node 12 labels the test `` + t.assert.match(lines[6], /\s{10}at TestContext./) + t.assert.match(lines[lines.length - 3], /\s{6}"anotherField": "dummy value"/) + cb() + } + })) + + log.info({ err }) + }) + + test('errorProps flag with "*" (print all nested props)', function (t) { + const pretty = prettyFactory({ errorProps: '*' }) + const expectedLines = [ + ' err: {', + ' "type": "Error",', + ' "message": "error message",', + ' "stack":', + ' error stack', + ' "statusCode": 500,', + ' "originalStack": "original stack",', + ' "dataBaseSpecificError": {', + ' "erroMessage": "some database error message",', + ' "evenMoreSpecificStuff": {', + ' "someErrorRelatedObject": "error"', + ' }', + ' }', + ' }' + ] + t.plan(expectedLines.length) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + lines.shift(); lines.pop() + for (let i = 0; i < lines.length; i += 1) { + t.assert.strictEqual(lines[i], expectedLines[i]) + } + cb() + } + })) + + const error = Error('error message') + error.stack = 'error stack' + error.statusCode = 500 + error.originalStack = 'original stack' + error.dataBaseSpecificError = { + erroMessage: 'some database error message', + evenMoreSpecificStuff: { + someErrorRelatedObject: 'error' + } + } + + log.error(error) + }) + + test('prettifies legacy error object at top level when singleLine=true', function (t) { + t.plan(4) + const pretty = prettyFactory({ singleLine: true }) + const err = Error('hello world') + const expected = err.stack.split('\n') + expected.unshift(err.message) + + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expected.length + 1) + t.assert.strictEqual(lines[0], `[${formattedEpoch}] INFO (${pid}): ${expected[0]}`) + t.assert.strictEqual(lines[1], ` ${expected[1]}`) + t.assert.strictEqual(lines[2], ` ${expected[2]}`) + cb() + } + })) + + log.info({ type: 'Error', stack: err.stack, msg: err.message }) + }) + + test('errorProps: legacy error object at top level', function (t) { + const pretty = prettyFactory({ errorProps: '*' }) + const expectedLines = [ + 'INFO:', + ' error stack', + ' message: hello message', + ' statusCode: 500', + ' originalStack: original stack', + ' dataBaseSpecificError: {', + ' errorMessage: "some database error message"', + ' evenMoreSpecificStuff: {', + ' "someErrorRelatedObject": "error"', + ' }', + ' }', + '' + ] + + t.plan(expectedLines.length) + + const error = {} + error.level = 30 + error.message = 'hello message' + error.type = 'Error' + error.stack = 'error stack' + error.statusCode = 500 + error.originalStack = 'original stack' + error.dataBaseSpecificError = { + errorMessage: 'some database error message', + evenMoreSpecificStuff: { + someErrorRelatedObject: 'error' + } + } + + const formatted = pretty(JSON.stringify(error)) + const lines = formatted.split('\n') + for (let i = 0; i < lines.length; i += 1) { + t.assert.strictEqual(lines[i], expectedLines[i]) + } + }) + + test('errorProps flag with a single property', function (t) { + const pretty = prettyFactory({ errorProps: 'originalStack' }) + const expectedLines = [ + 'INFO:', + ' error stack', + ' originalStack: original stack', + '' + ] + t.plan(expectedLines.length) + + const error = {} + error.level = 30 + error.message = 'hello message' + error.type = 'Error' + error.stack = 'error stack' + error.statusCode = 500 + error.originalStack = 'original stack' + error.dataBaseSpecificError = { + erroMessage: 'some database error message', + evenMoreSpecificStuff: { + someErrorRelatedObject: 'error' + } + } + + const formatted = pretty(JSON.stringify(error)) + const lines = formatted.split('\n') + for (let i = 0; i < lines.length; i += 1) { + t.assert.strictEqual(lines[i], expectedLines[i]) + } + }) + + test('errorProps flag with a single property non existent', function (t) { + const pretty = prettyFactory({ errorProps: 'originalStackABC' }) + const expectedLines = [ + 'INFO:', + ' error stack', + '' + ] + t.plan(expectedLines.length) + + const error = {} + error.level = 30 + error.message = 'hello message' + error.type = 'Error' + error.stack = 'error stack' + error.statusCode = 500 + error.originalStack = 'original stack' + error.dataBaseSpecificError = { + erroMessage: 'some database error message', + evenMoreSpecificStuff: { + someErrorRelatedObject: 'error' + } + } + + const formatted = pretty(JSON.stringify(error)) + const lines = formatted.split('\n') + for (let i = 0; i < lines.length; i += 1) { + t.assert.strictEqual(lines[i], expectedLines[i]) + } + }) + + test('handles errors with a null stack', (t) => { + t.plan(2) + const pretty = prettyFactory() + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + t.assert.match(formatted, /\s{4}message: "foo"/) + t.assert.match(formatted, /\s{4}stack: null/) + cb() + } + })) + + const error = { message: 'foo', stack: null } + log.error(error) + }) + + test('handles errors with a null stack for Error object', (t) => { + const pretty = prettyFactory() + const expectedLines = [ + ' "type": "Error",', + ' "message": "error message",', + ' "stack":', + ' ', + ' "some": "property"' + ] + t.plan(expectedLines.length) + const log = pino({}, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + const lines = formatted.split('\n') + lines.shift(); lines.shift(); lines.pop(); lines.pop() + for (let i = 0; i < lines.length; i += 1) { + t.assert.ok(lines[i].includes(expectedLines[i])) + } + cb() + } + })) + + const error = Error('error message') + error.stack = null + error.some = 'property' + + log.error(error) + }) +}) + +if (semver.gte(pino.version, '8.21.0')) { + describe('using pino config', () => { + beforeEach(() => { + Date.originalNow = Date.now + Date.now = () => epoch + }) + afterEach(() => { + Date.now = Date.originalNow + delete Date.originalNow + }) + + test('prettifies Error in custom errorKey', (t) => { + t.plan(8) + const destination = new Writable({ + write (chunk, enc, cb) { + const formatted = chunk.toString() + const lines = formatted.split('\n') + t.assert.strictEqual(lines.length, expected.length + 7) + t.assert.strictEqual(lines[0], `[${formattedEpoch}] INFO (${pid}): hello world`) + t.assert.match(lines[1], /\s{4}customErrorKey: {/) + t.assert.match(lines[2], /\s{6}"type": "Error",/) + t.assert.match(lines[3], /\s{6}"message": "hello world",/) + t.assert.match(lines[4], /\s{6}"stack":/) + t.assert.match(lines[5], /\s{6}Error: hello world/) + // Node 12 labels the test `` + t.assert.match(lines[6], /\s{10}(at Test.await t.test|at Test.)/) + cb() + } + }) + const pretty = pinoPretty({ + destination, + colorize: false + }) + const log = pino({ errorKey: 'customErrorKey' }, pretty) + const err = Error('hello world') + const expected = err.stack.split('\n') + log.info({ customErrorKey: err }) + }) + }) +} diff --git a/node_modules/pino-pretty/test/example/example.js b/node_modules/pino-pretty/test/example/example.js new file mode 100644 index 0000000..6eed85d --- /dev/null +++ b/node_modules/pino-pretty/test/example/example.js @@ -0,0 +1,33 @@ +'use strict' + +// Run this to see how colouring works + +const _prettyFactory = require('../../') +const pino = require('pino') +const { Writable } = require('node:stream') + +function prettyFactory () { + return _prettyFactory({ + colorize: true + }) +} + +const pretty = prettyFactory() +const formatted = pretty('this is not json\nit\'s just regular output\n') +console.log(formatted) + +const opts = { + base: { + hostname: 'localhost', + pid: process.pid + } +} +const log = pino(opts, new Writable({ + write (chunk, enc, cb) { + const formatted = pretty(chunk.toString()) + console.log(formatted) + cb() + } +})) + +log.info('foobar') diff --git a/node_modules/pino-pretty/test/helper.js b/node_modules/pino-pretty/test/helper.js new file mode 100644 index 0000000..bd53a35 --- /dev/null +++ b/node_modules/pino-pretty/test/helper.js @@ -0,0 +1,19 @@ +'use strict' + +/** + * Listens for an event on an object and resolves a promise when the event is emitted. + * @param {Object} emitter - The object to listen to. + * @param {string} event - The name of the event to listen for. + * @param {Function} fn - The function to call when the event is emitted. + * @returns {Promise} A promise that resolves when the event is emitted. + */ +function once (emitter, event, fn) { + return new Promise(resolve => { + emitter.on(event, (...args) => { + fn(...args) + resolve() + }) + }) +} + +module.exports = { once } diff --git a/node_modules/pino-pretty/test/types/pino-pretty.test-d.ts b/node_modules/pino-pretty/test/types/pino-pretty.test-d.ts new file mode 100644 index 0000000..8649edc --- /dev/null +++ b/node_modules/pino-pretty/test/types/pino-pretty.test-d.ts @@ -0,0 +1,58 @@ +import { expectType } from "tsd"; + +import pretty from "../../"; +import PinoPretty, { + PinoPretty as PinoPrettyNamed, + PrettyOptions, + colorizerFactory, + prettyFactory +} from "../../"; +import PinoPrettyDefault from "../../"; +import * as PinoPrettyStar from "../../"; +import PinoPrettyCjsImport = require("../../"); +import PrettyStream = PinoPretty.PrettyStream; +const PinoPrettyCjs = require("../../"); + +const options: PinoPretty.PrettyOptions = { + colorize: true, + crlf: false, + errorLikeObjectKeys: ["err", "error"], + errorProps: "", + hideObject: true, + levelKey: "level", + levelLabel: "foo", + messageFormat: false, + ignore: "", + levelFirst: false, + messageKey: "msg", + timestampKey: "timestamp", + minimumLevel: "trace", + translateTime: "UTC:h:MM:ss TT Z", + singleLine: false, + customPrettifiers: { + key: (value) => { + return value.toString().toUpperCase(); + }, + level: (level, levelKey, log, { label, labelColorized, colors }) => { + return level.toString(); + }, + foo: (value, key, log, { colors }) => { + return value.toString(); + } + }, + customLevels: 'verbose:5', + customColors: 'default:white,verbose:gray', + sync: false, + destination: 2, + append: true, + mkdir: true, + useOnlyCustomProps: false, +}; + +expectType(pretty()); // #326 +expectType(pretty(options)); +expectType(PinoPrettyNamed(options)); +expectType(PinoPrettyDefault(options)); +expectType(PinoPrettyStar.PinoPretty(options)); +expectType(PinoPrettyCjsImport.PinoPretty(options)); +expectType(PinoPrettyCjs(options)); diff --git a/node_modules/pino-pretty/tsconfig.json b/node_modules/pino-pretty/tsconfig.json new file mode 100644 index 0000000..6762960 --- /dev/null +++ b/node_modules/pino-pretty/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "es6", + "lib": [ "es2015" ], + "module": "commonjs", + "noEmit": true, + "strict": true + }, + "include": [ + "./test/types/pino-pretty.test.d.ts", + "./index.d.ts" + ] +} diff --git a/node_modules/pino-std-serializers/.editorconfig b/node_modules/pino-std-serializers/.editorconfig new file mode 100644 index 0000000..3fec5c5 --- /dev/null +++ b/node_modules/pino-std-serializers/.editorconfig @@ -0,0 +1,13 @@ + +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# [*.md] +# trim_trailing_whitespace = false diff --git a/node_modules/pino-std-serializers/.eslintignore b/node_modules/pino-std-serializers/.eslintignore new file mode 100644 index 0000000..2abb4c2 --- /dev/null +++ b/node_modules/pino-std-serializers/.eslintignore @@ -0,0 +1,2 @@ +index.d.ts +test/types/index.test-d.ts diff --git a/node_modules/pino-std-serializers/.github/dependabot.yml b/node_modules/pino-std-serializers/.github/dependabot.yml new file mode 100644 index 0000000..dfa7fa6 --- /dev/null +++ b/node_modules/pino-std-serializers/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 10 diff --git a/node_modules/pino-std-serializers/.github/workflows/ci.yml b/node_modules/pino-std-serializers/.github/workflows/ci.yml new file mode 100644 index 0000000..1310c86 --- /dev/null +++ b/node_modules/pino-std-serializers/.github/workflows/ci.yml @@ -0,0 +1,81 @@ +name: CI + +on: + push: + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +# This allows a subsequently queued workflow run to interrupt previous runs +concurrency: + group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" + cancel-in-progress: true + +jobs: + dependency-review: + name: Dependency Review + if: github.event_name == 'pull_request' + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Dependency review + uses: actions/dependency-review-action@v4 + + test: + name: Test + runs-on: ubuntu-latest + permissions: + contents: read + strategy: + fail-fast: false + matrix: + node-version: [18, 20] + steps: + - name: Check out repo + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Install dependencies + run: npm install --ignore-scripts + env: + NODE_ENV: development + + - name: Lint-CI + run: npm run lint-ci + + - name: Test-Types + run: npm run test-types + + - name: Test-CI + run: npm run test-ci + + automerge: + name: Automerge Dependabot PRs + if: > + github.event_name == 'pull_request' && + github.event.pull_request.user.login == 'dependabot[bot]' + needs: test + permissions: + pull-requests: write + contents: write + runs-on: ubuntu-latest + steps: + - uses: fastify/github-action-merge-dependabot@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/pino-std-serializers/LICENSE b/node_modules/pino-std-serializers/LICENSE new file mode 100644 index 0000000..69957c5 --- /dev/null +++ b/node_modules/pino-std-serializers/LICENSE @@ -0,0 +1,7 @@ +Copyright Mateo Collina, David Mark Clements, James Sumners + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pino-std-serializers/Readme.md b/node_modules/pino-std-serializers/Readme.md new file mode 100644 index 0000000..e068053 --- /dev/null +++ b/node_modules/pino-std-serializers/Readme.md @@ -0,0 +1,182 @@ +# pino-std-serializers  [![CI](https://github.com/pinojs/pino-std-serializers/workflows/CI/badge.svg)](https://github.com/pinojs/pino-std-serializers/actions?query=workflow%3ACI) + +This module provides a set of standard object serializers for the +[Pino](https://getpino.io) logger. + +## Serializers + +### `exports.err(error)` +Serializes an `Error` like object. Returns an object: + +```js +{ + type: 'string', // The name of the object's constructor. + message: 'string', // The supplied error message. + stack: 'string', // The stack when the error was generated. + raw: Error // Non-enumerable, i.e. will not be in the output, original + // Error object. This is available for subsequent serializers + // to use. + [...any additional Enumerable property the original Error had] +} +``` + +Any other extra properties, e.g. `statusCode`, that have been attached to the +object will also be present on the serialized object. + +If the error object has a [`cause`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error/cause) property, the `cause`'s `message` and `stack` will be appended to the top-level `message` and `stack`. All other parameters that belong to the `error.cause` object will be omitted. + +Example: + +```js +const serializer = require('pino-std-serializers').err; + +const innerError = new Error("inner error"); +innerError.isInner = true; +const outerError = new Error("outer error", { cause: innerError }); +outerError.isInner = false; + +const serialized = serializer(outerError); +/* Result: +{ + "type": "Error", + "message": "outer error: inner error", + "isInner": false, + "stack": "Error: outer error + at <...omitted..> + caused by: Error: inner error + at <...omitted..> +} + */ +``` + +### `exports.errWithCause(error)` +Serializes an `Error` like object, including any `error.cause`. Returns an object: + +```js +{ + type: 'string', // The name of the object's constructor. + message: 'string', // The supplied error message. + stack: 'string', // The stack when the error was generated. + cause?: Error, // If the original error had an error.cause, it will be serialized here + raw: Error // Non-enumerable, i.e. will not be in the output, original + // Error object. This is available for subsequent serializers + // to use. + [...any additional Enumerable property the original Error had] +} +``` + +Any other extra properties, e.g. `statusCode`, that have been attached to the object will also be present on the serialized object. + +Example: +```javascript +const serializer = require('pino-std-serializers').errWithCause; + +const innerError = new Error("inner error"); +innerError.isInner = true; +const outerError = new Error("outer error", { cause: innerError }); +outerError.isInner = false; + +const serialized = serializer(outerError); +/* Result: +{ + "type": "Error", + "message": "outer error", + "isInner": false, + "stack": "Error: outer error + at <...omitted..>", + "cause": { + "type": "Error", + "message": "inner error", + "isInner": true, + "stack": "Error: inner error + at <...omitted..>" + }, +} + */ +``` + +### `exports.mapHttpResponse(response)` +Used internally by Pino for general response logging. Returns an object: + +```js +{ + res: {} +} +``` + +Where `res` is the `response` as serialized by the standard response serializer. + +### `exports.mapHttpRequest(request)` +Used internall by Pino for general request logging. Returns an object: + +```js +{ + req: {} +} +``` + +Where `req` is the `request` as serialized by the standard request serializer. + +### `exports.req(request)` +The default `request` serializer. Returns an object: + +```js +{ + id: 'string', // Defaults to `undefined`, unless there is an `id` property + // already attached to the `request` object or to the `request.info` + // object. Attach a synchronous function + // to the `request.id` that returns an identifier to have + // the value filled. + method: 'string', + url: 'string', // the request pathname (as per req.url in core HTTP) + query: 'object', // the request query (as per req.query in express or hapi) + params: 'object', // the request params (as per req.params in express or hapi) + headers: Object, // a reference to the `headers` object from the request + // (as per req.headers in core HTTP) + remoteAddress: 'string', + remotePort: Number, + raw: Object // Non-enumerable, i.e. will not be in the output, original + // request object. This is available for subsequent serializers + // to use. In cases where the `request` input already has + // a `raw` property this will replace the original `request.raw` + // property +} +``` + +### `exports.res(response)` +The default `response` serializer. Returns an object: + +```js +{ + statusCode: Number, // Response status code, will be null before headers are flushed + headers: Object, // The headers to be sent in the response. + raw: Object // Non-enumerable, i.e. will not be in the output, original + // response object. This is available for subsequent serializers + // to use. +} +``` + +### `exports.wrapErrorSerializer(customSerializer)` +A utility method for wrapping the default error serializer. This allows +custom serializers to work with the already serialized object. + +The `customSerializer` accepts one parameter — the newly serialized error +object — and returns the new (or updated) error object. + +### `exports.wrapRequestSerializer(customSerializer)` +A utility method for wrapping the default request serializer. This allows +custom serializers to work with the already serialized object. + +The `customSerializer` accepts one parameter — the newly serialized request +object — and returns the new (or updated) request object. + +### `exports.wrapResponseSerializer(customSerializer)` +A utility method for wrapping the default response serializer. This allows +custom serializers to work with the already serialized object. + +The `customSerializer` accepts one parameter — the newly serialized response +object — and returns the new (or updated) response object. + +## License + +MIT License diff --git a/node_modules/pino-std-serializers/index.d.ts b/node_modules/pino-std-serializers/index.d.ts new file mode 100644 index 0000000..b4b35fe --- /dev/null +++ b/node_modules/pino-std-serializers/index.d.ts @@ -0,0 +1,145 @@ +// Type definitions for pino-std-serializers 2.4 +// Definitions by: Connor Fitzgerald +// Igor Savin +// TypeScript Version: 2.7 + +/// +import { IncomingMessage, ServerResponse } from 'http'; + +export interface SerializedError { + /** + * The name of the object's constructor. + */ + type: string; + /** + * The supplied error message. + */ + message: string; + /** + * The stack when the error was generated. + */ + stack: string; + /** + * Non-enumerable. The original Error object. This will not be included in the logged output. + * This is available for subsequent serializers to use. + */ + raw: Error; + /** + * `cause` is never included in the log output, if you need the `cause`, use {@link raw.cause} + */ + cause?: never; + /** + * Any other extra properties that have been attached to the object will also be present on the serialized object. + */ + [key: string]: any; + [key: number]: any; +} + +/** + * Serializes an Error object. Does not serialize "err.cause" fields (will append the err.cause.message to err.message + * and err.cause.stack to err.stack) + */ +export function err(err: Error): SerializedError; + +/** + * Serializes an Error object, including full serialization for any err.cause fields recursively. + */ +export function errWithCause(err: Error): SerializedError; + +export interface SerializedRequest { + /** + * Defaults to `undefined`, unless there is an `id` property already attached to the `request` object or + * to the `request.info` object. Attach a synchronous function to the `request.id` that returns an + * identifier to have the value filled. + */ + id: string | undefined; + /** + * HTTP method. + */ + method: string; + /** + * Request pathname (as per req.url in core HTTP). + */ + url: string; + /** + * Reference to the `headers` object from the request (as per req.headers in core HTTP). + */ + headers: Record; + remoteAddress: string; + remotePort: number; + params: Record; + query: Record; + + /** + * Non-enumerable, i.e. will not be in the output, original request object. This is available for subsequent + * serializers to use. In cases where the `request` input already has a `raw` property this will + * replace the original `request.raw` property. + */ + raw: IncomingMessage; +} + +/** + * Serializes a Request object. + */ +export function req(req: IncomingMessage): SerializedRequest; + +/** + * Used internally by Pino for general request logging. + */ +export function mapHttpRequest(req: IncomingMessage): { + req: SerializedRequest +}; + +export interface SerializedResponse { + /** + * HTTP status code. + */ + statusCode: number; + /** + * The headers to be sent in the response. + */ + headers: Record; + /** + * Non-enumerable, i.e. will not be in the output, original response object. This is available for subsequent serializers to use. + */ + raw: ServerResponse; +} + +/** + * Serializes a Response object. + */ +export function res(res: ServerResponse): SerializedResponse; + +/** + * Used internally by Pino for general response logging. + */ +export function mapHttpResponse(res: ServerResponse): { + res: SerializedResponse +}; + +export type CustomErrorSerializer = (err: SerializedError) => Record; + +/** + * A utility method for wrapping the default error serializer. + * This allows custom serializers to work with the already serialized object. + * The customSerializer accepts one parameter — the newly serialized error object — and returns the new (or updated) error object. + */ +export function wrapErrorSerializer(customSerializer: CustomErrorSerializer): (err: Error) => Record; + +export type CustomRequestSerializer = (req: SerializedRequest) => Record; + +/** + * A utility method for wrapping the default request serializer. + * This allows custom serializers to work with the already serialized object. + * The customSerializer accepts one parameter — the newly serialized request object — and returns the new (or updated) request object. + */ +export function wrapRequestSerializer(customSerializer: CustomRequestSerializer): (req: IncomingMessage) => Record; + +export type CustomResponseSerializer = (res: SerializedResponse) => Record; + +/** + * A utility method for wrapping the default response serializer. + * This allows custom serializers to work with the already serialized object. + * The customSerializer accepts one parameter — the newly serialized response object — and returns the new (or updated) response object. + */ +export function wrapResponseSerializer(customSerializer: CustomResponseSerializer): (res: ServerResponse) => Record; diff --git a/node_modules/pino-std-serializers/index.js b/node_modules/pino-std-serializers/index.js new file mode 100644 index 0000000..ef2b660 --- /dev/null +++ b/node_modules/pino-std-serializers/index.js @@ -0,0 +1,36 @@ +'use strict' + +const errSerializer = require('./lib/err') +const errWithCauseSerializer = require('./lib/err-with-cause') +const reqSerializers = require('./lib/req') +const resSerializers = require('./lib/res') + +module.exports = { + err: errSerializer, + errWithCause: errWithCauseSerializer, + mapHttpRequest: reqSerializers.mapHttpRequest, + mapHttpResponse: resSerializers.mapHttpResponse, + req: reqSerializers.reqSerializer, + res: resSerializers.resSerializer, + + wrapErrorSerializer: function wrapErrorSerializer (customSerializer) { + if (customSerializer === errSerializer) return customSerializer + return function wrapErrSerializer (err) { + return customSerializer(errSerializer(err)) + } + }, + + wrapRequestSerializer: function wrapRequestSerializer (customSerializer) { + if (customSerializer === reqSerializers.reqSerializer) return customSerializer + return function wrappedReqSerializer (req) { + return customSerializer(reqSerializers.reqSerializer(req)) + } + }, + + wrapResponseSerializer: function wrapResponseSerializer (customSerializer) { + if (customSerializer === resSerializers.resSerializer) return customSerializer + return function wrappedResSerializer (res) { + return customSerializer(resSerializers.resSerializer(res)) + } + } +} diff --git a/node_modules/pino-std-serializers/lib/err-helpers.js b/node_modules/pino-std-serializers/lib/err-helpers.js new file mode 100644 index 0000000..efdec2c --- /dev/null +++ b/node_modules/pino-std-serializers/lib/err-helpers.js @@ -0,0 +1,118 @@ +'use strict' + +// ************************************************************** +// * Code initially copied/adapted from "pony-cause" npm module * +// * Please upstream improvements there * +// ************************************************************** + +const isErrorLike = (err) => { + return err && typeof err.message === 'string' +} + +/** + * @param {Error|{ cause?: unknown|(()=>err)}} err + * @returns {Error|Object|undefined} + */ +const getErrorCause = (err) => { + if (!err) return + + /** @type {unknown} */ + // @ts-ignore + const cause = err.cause + + // VError / NError style causes + if (typeof cause === 'function') { + // @ts-ignore + const causeResult = err.cause() + + return isErrorLike(causeResult) + ? causeResult + : undefined + } else { + return isErrorLike(cause) + ? cause + : undefined + } +} + +/** + * Internal method that keeps a track of which error we have already added, to avoid circular recursion + * + * @private + * @param {Error} err + * @param {Set} seen + * @returns {string} + */ +const _stackWithCauses = (err, seen) => { + if (!isErrorLike(err)) return '' + + const stack = err.stack || '' + + // Ensure we don't go circular or crazily deep + if (seen.has(err)) { + return stack + '\ncauses have become circular...' + } + + const cause = getErrorCause(err) + + if (cause) { + seen.add(err) + return (stack + '\ncaused by: ' + _stackWithCauses(cause, seen)) + } else { + return stack + } +} + +/** + * @param {Error} err + * @returns {string} + */ +const stackWithCauses = (err) => _stackWithCauses(err, new Set()) + +/** + * Internal method that keeps a track of which error we have already added, to avoid circular recursion + * + * @private + * @param {Error} err + * @param {Set} seen + * @param {boolean} [skip] + * @returns {string} + */ +const _messageWithCauses = (err, seen, skip) => { + if (!isErrorLike(err)) return '' + + const message = skip ? '' : (err.message || '') + + // Ensure we don't go circular or crazily deep + if (seen.has(err)) { + return message + ': ...' + } + + const cause = getErrorCause(err) + + if (cause) { + seen.add(err) + + // @ts-ignore + const skipIfVErrorStyleCause = typeof err.cause === 'function' + + return (message + + (skipIfVErrorStyleCause ? '' : ': ') + + _messageWithCauses(cause, seen, skipIfVErrorStyleCause)) + } else { + return message + } +} + +/** + * @param {Error} err + * @returns {string} + */ +const messageWithCauses = (err) => _messageWithCauses(err, new Set()) + +module.exports = { + isErrorLike, + getErrorCause, + stackWithCauses, + messageWithCauses +} diff --git a/node_modules/pino-std-serializers/lib/err-proto.js b/node_modules/pino-std-serializers/lib/err-proto.js new file mode 100644 index 0000000..a01447d --- /dev/null +++ b/node_modules/pino-std-serializers/lib/err-proto.js @@ -0,0 +1,48 @@ +'use strict' + +const seen = Symbol('circular-ref-tag') +const rawSymbol = Symbol('pino-raw-err-ref') + +const pinoErrProto = Object.create({}, { + type: { + enumerable: true, + writable: true, + value: undefined + }, + message: { + enumerable: true, + writable: true, + value: undefined + }, + stack: { + enumerable: true, + writable: true, + value: undefined + }, + aggregateErrors: { + enumerable: true, + writable: true, + value: undefined + }, + raw: { + enumerable: false, + get: function () { + return this[rawSymbol] + }, + set: function (val) { + this[rawSymbol] = val + } + } +}) +Object.defineProperty(pinoErrProto, rawSymbol, { + writable: true, + value: {} +}) + +module.exports = { + pinoErrProto, + pinoErrorSymbols: { + seen, + rawSymbol + } +} diff --git a/node_modules/pino-std-serializers/lib/err-with-cause.js b/node_modules/pino-std-serializers/lib/err-with-cause.js new file mode 100644 index 0000000..29939e0 --- /dev/null +++ b/node_modules/pino-std-serializers/lib/err-with-cause.js @@ -0,0 +1,48 @@ +'use strict' + +module.exports = errWithCauseSerializer + +const { isErrorLike } = require('./err-helpers') +const { pinoErrProto, pinoErrorSymbols } = require('./err-proto') +const { seen } = pinoErrorSymbols + +const { toString } = Object.prototype + +function errWithCauseSerializer (err) { + if (!isErrorLike(err)) { + return err + } + + err[seen] = undefined // tag to prevent re-looking at this + const _err = Object.create(pinoErrProto) + _err.type = toString.call(err.constructor) === '[object Function]' + ? err.constructor.name + : err.name + _err.message = err.message + _err.stack = err.stack + + if (Array.isArray(err.errors)) { + _err.aggregateErrors = err.errors.map(err => errWithCauseSerializer(err)) + } + + if (isErrorLike(err.cause) && !Object.prototype.hasOwnProperty.call(err.cause, seen)) { + _err.cause = errWithCauseSerializer(err.cause) + } + + for (const key in err) { + if (_err[key] === undefined) { + const val = err[key] + if (isErrorLike(val)) { + if (!Object.prototype.hasOwnProperty.call(val, seen)) { + _err[key] = errWithCauseSerializer(val) + } + } else { + _err[key] = val + } + } + } + + delete err[seen] // clean up tag in case err is serialized again later + _err.raw = err + return _err +} diff --git a/node_modules/pino-std-serializers/lib/err.js b/node_modules/pino-std-serializers/lib/err.js new file mode 100644 index 0000000..338b230 --- /dev/null +++ b/node_modules/pino-std-serializers/lib/err.js @@ -0,0 +1,45 @@ +'use strict' + +module.exports = errSerializer + +const { messageWithCauses, stackWithCauses, isErrorLike } = require('./err-helpers') +const { pinoErrProto, pinoErrorSymbols } = require('./err-proto') +const { seen } = pinoErrorSymbols + +const { toString } = Object.prototype + +function errSerializer (err) { + if (!isErrorLike(err)) { + return err + } + + err[seen] = undefined // tag to prevent re-looking at this + const _err = Object.create(pinoErrProto) + _err.type = toString.call(err.constructor) === '[object Function]' + ? err.constructor.name + : err.name + _err.message = messageWithCauses(err) + _err.stack = stackWithCauses(err) + + if (Array.isArray(err.errors)) { + _err.aggregateErrors = err.errors.map(err => errSerializer(err)) + } + + for (const key in err) { + if (_err[key] === undefined) { + const val = err[key] + if (isErrorLike(val)) { + // We append cause messages and stacks to _err, therefore skipping causes here + if (key !== 'cause' && !Object.prototype.hasOwnProperty.call(val, seen)) { + _err[key] = errSerializer(val) + } + } else { + _err[key] = val + } + } + } + + delete err[seen] // clean up tag in case err is serialized again later + _err.raw = err + return _err +} diff --git a/node_modules/pino-std-serializers/lib/req.js b/node_modules/pino-std-serializers/lib/req.js new file mode 100644 index 0000000..4a58034 --- /dev/null +++ b/node_modules/pino-std-serializers/lib/req.js @@ -0,0 +1,100 @@ +'use strict' + +module.exports = { + mapHttpRequest, + reqSerializer +} + +const rawSymbol = Symbol('pino-raw-req-ref') +const pinoReqProto = Object.create({}, { + id: { + enumerable: true, + writable: true, + value: '' + }, + method: { + enumerable: true, + writable: true, + value: '' + }, + url: { + enumerable: true, + writable: true, + value: '' + }, + query: { + enumerable: true, + writable: true, + value: '' + }, + params: { + enumerable: true, + writable: true, + value: '' + }, + headers: { + enumerable: true, + writable: true, + value: {} + }, + remoteAddress: { + enumerable: true, + writable: true, + value: '' + }, + remotePort: { + enumerable: true, + writable: true, + value: '' + }, + raw: { + enumerable: false, + get: function () { + return this[rawSymbol] + }, + set: function (val) { + this[rawSymbol] = val + } + } +}) +Object.defineProperty(pinoReqProto, rawSymbol, { + writable: true, + value: {} +}) + +function reqSerializer (req) { + // req.info is for hapi compat. + const connection = req.info || req.socket + const _req = Object.create(pinoReqProto) + _req.id = (typeof req.id === 'function' ? req.id() : (req.id || (req.info ? req.info.id : undefined))) + _req.method = req.method + // req.originalUrl is for expressjs compat. + if (req.originalUrl) { + _req.url = req.originalUrl + } else { + const path = req.path + // path for safe hapi compat. + _req.url = typeof path === 'string' ? path : (req.url ? req.url.path || req.url : undefined) + } + + if (req.query) { + _req.query = req.query + } + + if (req.params) { + _req.params = req.params + } + + _req.headers = req.headers + _req.remoteAddress = connection && connection.remoteAddress + _req.remotePort = connection && connection.remotePort + // req.raw is for hapi compat/equivalence + _req.raw = req.raw || req + return _req +} + +function mapHttpRequest (req) { + return { + req: reqSerializer(req) + } +} diff --git a/node_modules/pino-std-serializers/lib/res.js b/node_modules/pino-std-serializers/lib/res.js new file mode 100644 index 0000000..e48004b --- /dev/null +++ b/node_modules/pino-std-serializers/lib/res.js @@ -0,0 +1,47 @@ +'use strict' + +module.exports = { + mapHttpResponse, + resSerializer +} + +const rawSymbol = Symbol('pino-raw-res-ref') +const pinoResProto = Object.create({}, { + statusCode: { + enumerable: true, + writable: true, + value: 0 + }, + headers: { + enumerable: true, + writable: true, + value: '' + }, + raw: { + enumerable: false, + get: function () { + return this[rawSymbol] + }, + set: function (val) { + this[rawSymbol] = val + } + } +}) +Object.defineProperty(pinoResProto, rawSymbol, { + writable: true, + value: {} +}) + +function resSerializer (res) { + const _res = Object.create(pinoResProto) + _res.statusCode = res.headersSent ? res.statusCode : null + _res.headers = res.getHeaders ? res.getHeaders() : res._headers + _res.raw = res + return _res +} + +function mapHttpResponse (res) { + return { + res: resSerializer(res) + } +} diff --git a/node_modules/pino-std-serializers/package.json b/node_modules/pino-std-serializers/package.json new file mode 100644 index 0000000..be6fbde --- /dev/null +++ b/node_modules/pino-std-serializers/package.json @@ -0,0 +1,47 @@ +{ + "name": "pino-std-serializers", + "version": "7.0.0", + "description": "A collection of standard object serializers for Pino", + "main": "index.js", + "type": "commonjs", + "types": "index.d.ts", + "scripts": { + "lint": "standard | snazzy", + "lint-ci": "standard", + "test": "borp -p 'test/**/*.js'", + "test-ci": "borp --coverage -p 'test/**/*.js'", + "test-types": "tsc && tsd" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/pinojs/pino-std-serializers.git" + }, + "keywords": [ + "pino", + "logging" + ], + "author": "James Sumners ", + "license": "MIT", + "bugs": { + "url": "https://github.com/pinojs/pino-std-serializers/issues" + }, + "homepage": "https://github.com/pinojs/pino-std-serializers#readme", + "precommit": [ + "lint", + "test", + "test-types" + ], + "devDependencies": { + "@matteo.collina/tspl": "^0.1.1", + "@types/node": "^20.11.17", + "borp": "^0.9.1", + "pre-commit": "^1.2.2", + "snazzy": "^9.0.0", + "standard": "^17.1.0", + "tsd": "^0.31.0", + "typescript": "^5.3.3" + }, + "tsd": { + "directory": "test/types" + } +} diff --git a/node_modules/pino-std-serializers/test/err-with-cause.test.js b/node_modules/pino-std-serializers/test/err-with-cause.test.js new file mode 100644 index 0000000..15f356a --- /dev/null +++ b/node_modules/pino-std-serializers/test/err-with-cause.test.js @@ -0,0 +1,187 @@ +'use strict' + +const { test } = require('node:test') +const assert = require('node:assert') +const serializer = require('../lib/err-with-cause') +const { wrapErrorSerializer } = require('../') + +test('serializes Error objects', () => { + const serialized = serializer(Error('foo')) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err-with-cause\.test\.js:/) +}) + +test('serializes Error objects with extra properties', () => { + const err = Error('foo') + err.statusCode = 500 + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.ok(serialized.statusCode) + assert.strictEqual(serialized.statusCode, 500) + assert.match(serialized.stack, /err-with-cause\.test\.js:/) +}) + +test('serializes Error objects with subclass "type"', () => { + class MyError extends Error {} + + const err = new MyError('foo') + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'MyError') +}) + +test('serializes nested errors', () => { + const err = Error('foo') + err.inner = Error('bar') + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err-with-cause\.test\.js:/) + assert.strictEqual(serialized.inner.type, 'Error') + assert.strictEqual(serialized.inner.message, 'bar') + assert.match(serialized.inner.stack, /Error: bar/) + assert.match(serialized.inner.stack, /err-with-cause\.test\.js:/) +}) + +test('serializes error causes', () => { + const innerErr = Error('inner') + const middleErr = Error('middle') + middleErr.cause = innerErr + const outerErr = Error('outer') + outerErr.cause = middleErr + + const serialized = serializer(outerErr) + + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'outer') + assert.match(serialized.stack, /err-with-cause\.test\.js:/) + + assert.strictEqual(serialized.cause.type, 'Error') + assert.strictEqual(serialized.cause.message, 'middle') + assert.match(serialized.cause.stack, /err-with-cause\.test\.js:/) + + assert.strictEqual(serialized.cause.cause.type, 'Error') + assert.strictEqual(serialized.cause.cause.message, 'inner') + assert.match(serialized.cause.cause.stack, /err-with-cause\.test\.js:/) +}) + +test('keeps non-error cause', () => { + const err = Error('foo') + err.cause = 'abc' + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.strictEqual(serialized.cause, 'abc') +}) + +test('prevents infinite recursion', () => { + const err = Error('foo') + err.inner = err + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err-with-cause\.test\.js:/) + assert.ok(!serialized.inner) +}) + +test('cleans up infinite recursion tracking', () => { + const err = Error('foo') + const bar = Error('bar') + err.inner = bar + bar.inner = err + + serializer(err) + const serialized = serializer(err) + + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err-with-cause\.test\.js:/) + assert.ok(serialized.inner) + assert.strictEqual(serialized.inner.type, 'Error') + assert.strictEqual(serialized.inner.message, 'bar') + assert.match(serialized.inner.stack, /Error: bar/) + assert.ok(!serialized.inner.inner) +}) + +test('err.raw is available', () => { + const err = Error('foo') + const serialized = serializer(err) + assert.strictEqual(serialized.raw, err) +}) + +test('redefined err.constructor doesnt crash serializer', () => { + function check (a, name) { + assert.strictEqual(a.type, name) + assert.strictEqual(a.message, 'foo') + } + + const err1 = TypeError('foo') + err1.constructor = '10' + + const err2 = TypeError('foo') + err2.constructor = undefined + + const err3 = Error('foo') + err3.constructor = null + + const err4 = Error('foo') + err4.constructor = 10 + + class MyError extends Error {} + + const err5 = new MyError('foo') + err5.constructor = undefined + + check(serializer(err1), 'TypeError') + check(serializer(err2), 'TypeError') + check(serializer(err3), 'Error') + check(serializer(err4), 'Error') + // We do not expect 'MyError' because err5.constructor has been blown away. + // `err5.name` is 'Error' from the base class prototype. + check(serializer(err5), 'Error') +}) + +test('pass through anything that does not look like an Error', () => { + function check (a) { + assert.strictEqual(serializer(a), a) + } + + check('foo') + check({ hello: 'world' }) + check([1, 2]) +}) + +test('can wrap err serializers', () => { + const err = Error('foo') + err.foo = 'foo' + const serializer = wrapErrorSerializer(function (err) { + delete err.foo + err.bar = 'bar' + return err + }) + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err-with-cause\.test\.js:/) + assert.ok(!serialized.foo) + assert.strictEqual(serialized.bar, 'bar') +}) + +test('serializes aggregate errors', { skip: !global.AggregateError }, () => { + const foo = new Error('foo') + const bar = new Error('bar') + for (const aggregate of [ + new AggregateError([foo, bar], 'aggregated message'), // eslint-disable-line no-undef + { errors: [foo, bar], message: 'aggregated message', stack: 'err-with-cause.test.js:' } + ]) { + const serialized = serializer(aggregate) + assert.strictEqual(serialized.message, 'aggregated message') + assert.strictEqual(serialized.aggregateErrors.length, 2) + assert.strictEqual(serialized.aggregateErrors[0].message, 'foo') + assert.strictEqual(serialized.aggregateErrors[1].message, 'bar') + assert.match(serialized.aggregateErrors[0].stack, /^Error: foo/) + assert.match(serialized.aggregateErrors[1].stack, /^Error: bar/) + assert.match(serialized.stack, /err-with-cause\.test\.js:/) + } +}) diff --git a/node_modules/pino-std-serializers/test/err.test.js b/node_modules/pino-std-serializers/test/err.test.js new file mode 100644 index 0000000..0aecb07 --- /dev/null +++ b/node_modules/pino-std-serializers/test/err.test.js @@ -0,0 +1,200 @@ +'use strict' + +const assert = require('node:assert') +const { test } = require('node:test') +const serializer = require('../lib/err') +const { wrapErrorSerializer } = require('../') + +test('serializes Error objects', () => { + const serialized = serializer(Error('foo')) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err\.test\.js:/) +}) + +test('serializes Error objects with extra properties', () => { + const err = Error('foo') + err.statusCode = 500 + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.ok(serialized.statusCode) + assert.strictEqual(serialized.statusCode, 500) + assert.match(serialized.stack, /err\.test\.js:/) +}) + +test('serializes Error objects with subclass "type"', () => { + class MyError extends Error {} + const err = new MyError('foo') + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'MyError') +}) + +test('serializes nested errors', () => { + const err = Error('foo') + err.inner = Error('bar') + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err\.test\.js:/) + assert.strictEqual(serialized.inner.type, 'Error') + assert.strictEqual(serialized.inner.message, 'bar') + assert.match(serialized.inner.stack, /Error: bar/) + assert.match(serialized.inner.stack, /err\.test\.js:/) +}) + +test('serializes error causes', () => { + for (const cause of [ + Error('bar'), + { message: 'bar', stack: 'Error: bar: err.test.js:' } + ]) { + const err = Error('foo') + err.cause = cause + err.cause.cause = Error('abc') + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo: bar: abc') + assert.match(serialized.stack, /err\.test\.js:/) + assert.match(serialized.stack, /Error: foo/) + assert.match(serialized.stack, /Error: bar/) + assert.match(serialized.stack, /Error: abc/) + assert.ok(!serialized.cause) + } +}) + +test('serializes error causes with VError support', function (t) { + // Fake VError-style setup + const err = Error('foo: bar') + err.foo = 'abc' + err.cause = function () { + const err = Error('bar') + err.cause = Error(this.foo) + return err + } + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo: bar: abc') + assert.match(serialized.stack, /err\.test\.js:/) + assert.match(serialized.stack, /Error: foo/) + assert.match(serialized.stack, /Error: bar/) + assert.match(serialized.stack, /Error: abc/) +}) + +test('keeps non-error cause', () => { + const err = Error('foo') + err.cause = 'abc' + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.strictEqual(serialized.cause, 'abc') +}) + +test('prevents infinite recursion', () => { + const err = Error('foo') + err.inner = err + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err\.test\.js:/) + assert.ok(!serialized.inner) +}) + +test('cleans up infinite recursion tracking', () => { + const err = Error('foo') + const bar = Error('bar') + err.inner = bar + bar.inner = err + + serializer(err) + const serialized = serializer(err) + + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err\.test\.js:/) + assert.ok(serialized.inner) + assert.strictEqual(serialized.inner.type, 'Error') + assert.strictEqual(serialized.inner.message, 'bar') + assert.match(serialized.inner.stack, /Error: bar/) + assert.ok(!serialized.inner.inner) +}) + +test('err.raw is available', () => { + const err = Error('foo') + const serialized = serializer(err) + assert.strictEqual(serialized.raw, err) +}) + +test('redefined err.constructor doesnt crash serializer', () => { + function check (a, name) { + assert.strictEqual(a.type, name) + assert.strictEqual(a.message, 'foo') + } + + const err1 = TypeError('foo') + err1.constructor = '10' + + const err2 = TypeError('foo') + err2.constructor = undefined + + const err3 = Error('foo') + err3.constructor = null + + const err4 = Error('foo') + err4.constructor = 10 + + class MyError extends Error {} + const err5 = new MyError('foo') + err5.constructor = undefined + + check(serializer(err1), 'TypeError') + check(serializer(err2), 'TypeError') + check(serializer(err3), 'Error') + check(serializer(err4), 'Error') + // We do not expect 'MyError' because err5.constructor has been blown away. + // `err5.name` is 'Error' from the base class prototype. + check(serializer(err5), 'Error') +}) + +test('pass through anything that does not look like an Error', () => { + function check (a) { + assert.strictEqual(serializer(a), a) + } + + check('foo') + check({ hello: 'world' }) + check([1, 2]) +}) + +test('can wrap err serializers', () => { + const err = Error('foo') + err.foo = 'foo' + const serializer = wrapErrorSerializer(function (err) { + delete err.foo + err.bar = 'bar' + return err + }) + const serialized = serializer(err) + assert.strictEqual(serialized.type, 'Error') + assert.strictEqual(serialized.message, 'foo') + assert.match(serialized.stack, /err\.test\.js:/) + assert.ok(!serialized.foo) + assert.strictEqual(serialized.bar, 'bar') +}) + +test('serializes aggregate errors', { skip: !global.AggregateError }, () => { + const foo = new Error('foo') + const bar = new Error('bar') + for (const aggregate of [ + new AggregateError([foo, bar], 'aggregated message'), // eslint-disable-line no-undef + { errors: [foo, bar], message: 'aggregated message', stack: 'err.test.js:' } + ]) { + const serialized = serializer(aggregate) + assert.strictEqual(serialized.message, 'aggregated message') + assert.strictEqual(serialized.aggregateErrors.length, 2) + assert.strictEqual(serialized.aggregateErrors[0].message, 'foo') + assert.strictEqual(serialized.aggregateErrors[1].message, 'bar') + assert.match(serialized.aggregateErrors[0].stack, /^Error: foo/) + assert.match(serialized.aggregateErrors[1].stack, /^Error: bar/) + assert.match(serialized.stack, /err\.test\.js:/) + } +}) diff --git a/node_modules/pino-std-serializers/test/req.test.js b/node_modules/pino-std-serializers/test/req.test.js new file mode 100644 index 0000000..d8a6486 --- /dev/null +++ b/node_modules/pino-std-serializers/test/req.test.js @@ -0,0 +1,477 @@ +'use strict' + +const { tspl } = require('@matteo.collina/tspl') +const http = require('node:http') +const { test } = require('node:test') +const serializers = require('../lib/req') +const { wrapRequestSerializer } = require('../') + +test('maps request', async (t) => { + const p = tspl(t, { plan: 2 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + const serialized = serializers.mapHttpRequest(req) + p.ok(serialized.req) + p.ok(serialized.req.method) + res.end() + } + + await p.completed +}) + +test('does not return excessively long object', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + const serialized = serializers.reqSerializer(req) + p.strictEqual(Object.keys(serialized).length, 6) + res.end() + } + + await p.completed +}) + +test('req.raw is available', async (t) => { + const p = tspl(t, { plan: 2 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.foo = 'foo' + const serialized = serializers.reqSerializer(req) + p.ok(serialized.raw) + p.strictEqual(serialized.raw.foo, 'foo') + res.end() + } + + await p.completed +}) + +test('req.raw will be obtained in from input request raw property if input request raw property is truthy', async (t) => { + const p = tspl(t, { plan: 2 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.raw = { req: { foo: 'foo' }, res: {} } + const serialized = serializers.reqSerializer(req) + p.ok(serialized.raw) + p.strictEqual(serialized.raw.req.foo, 'foo') + res.end() + } + + await p.completed +}) + +test('req.id defaults to undefined', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.id, undefined) + res.end() + } + + await p.completed +}) + +test('req.id has a non-function value', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + const serialized = serializers.reqSerializer(req) + p.strictEqual(typeof serialized.id === 'function', false) + res.end() + } + + await p.completed +}) + +test('req.id will be obtained from input request info.id when input request id does not exist', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.info = { id: 'test' } + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.id, 'test') + res.end() + } + + await p.completed +}) + +test('req.id has a non-function value with custom id function', async (t) => { + const p = tspl(t, { plan: 2 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.id = function () { return 42 } + const serialized = serializers.reqSerializer(req) + p.strictEqual(typeof serialized.id === 'function', false) + p.strictEqual(serialized.id, 42) + res.end() + } + + await p.completed +}) + +test('req.url will be obtained from input request req.path when input request url is an object', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.path = '/test' + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.url, '/test') + res.end() + } + + await p.completed +}) + +test('req.url will be obtained from input request url.path when input request url is an object', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.url = { path: '/test' } + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.url, '/test') + res.end() + } + + await p.completed +}) + +test('req.url will be obtained from input request url when input request url is not an object', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.url = '/test' + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.url, '/test') + res.end() + } + + await p.completed +}) + +test('req.url will be empty when input request path and url are not defined', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.url, '/') + res.end() + } + + await p.completed +}) + +test('req.url will be obtained from input request originalUrl when available', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.originalUrl = '/test' + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.url, '/test') + res.end() + } + + await p.completed +}) + +test('req.url will be obtained from input request url when req path is a function', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.path = function () { + throw new Error('unexpected invocation') + } + req.url = '/test' + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.url, '/test') + res.end() + } + + await p.completed +}) + +test('req.url being undefined does not throw an error', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.url = undefined + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.url, undefined) + res.end() + } + + await p.completed +}) + +test('can wrap request serializers', async (t) => { + const p = tspl(t, { plan: 3 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + const serailizer = wrapRequestSerializer(function (req) { + p.ok(req.method) + p.strictEqual(req.method, 'GET') + delete req.method + return req + }) + + function handler (req, res) { + const serialized = serailizer(req) + p.ok(!serialized.method) + res.end() + } + + await p.completed +}) + +test('req.remoteAddress will be obtained from request socket.remoteAddress as fallback', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.socket = { remoteAddress: 'http://localhost' } + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.remoteAddress, 'http://localhost') + res.end() + } + + await p.completed +}) + +test('req.remoteAddress will be obtained from request info.remoteAddress if available', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.info = { remoteAddress: 'http://localhost' } + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.remoteAddress, 'http://localhost') + res.end() + } + + await p.completed +}) + +test('req.remotePort will be obtained from request socket.remotePort as fallback', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.socket = { remotePort: 3000 } + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.remotePort, 3000) + res.end() + } + + await p.completed +}) + +test('req.remotePort will be obtained from request info.remotePort if available', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.info = { remotePort: 3000 } + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.remotePort, 3000) + res.end() + } + + await p.completed +}) + +test('req.query is available', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.query = '/foo?bar=foobar&bar=foo' + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.query, '/foo?bar=foobar&bar=foo') + res.end() + } + + await p.completed +}) + +test('req.params is available', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (req, res) { + req.params = '/foo/bar' + const serialized = serializers.reqSerializer(req) + p.strictEqual(serialized.params, '/foo/bar') + res.end() + } + + await p.completed +}) diff --git a/node_modules/pino-std-serializers/test/res.test.js b/node_modules/pino-std-serializers/test/res.test.js new file mode 100644 index 0000000..638afaf --- /dev/null +++ b/node_modules/pino-std-serializers/test/res.test.js @@ -0,0 +1,120 @@ +'use strict' + +/* eslint-disable no-prototype-builtins */ + +const { tspl } = require('@matteo.collina/tspl') +const http = require('node:http') +const { test } = require('node:test') +const serializers = require('../lib/res') +const { wrapResponseSerializer } = require('../') + +test('res.raw is not enumerable', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (_req, res) { + const serialized = serializers.resSerializer(res) + p.strictEqual(serialized.propertyIsEnumerable('raw'), false) + res.end() + } + + await p.completed +}) + +test('res.raw is available', async (t) => { + const p = tspl(t, { plan: 2 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (_req, res) { + res.statusCode = 200 + const serialized = serializers.resSerializer(res) + p.ok(serialized.raw) + p.strictEqual(serialized.raw.statusCode, 200) + res.end() + } + + await p.completed +}) + +test('can wrap response serializers', async (t) => { + const p = tspl(t, { plan: 3 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + const serializer = wrapResponseSerializer(function (res) { + p.ok(res.statusCode) + p.strictEqual(res.statusCode, 200) + delete res.statusCode + return res + }) + + function handler (_req, res) { + res.end() + res.statusCode = 200 + const serialized = serializer(res) + p.ok(!serialized.statusCode) + } + + await p.completed +}) + +test('res.headers is serialized', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (_req, res) { + res.setHeader('x-custom', 'y') + const serialized = serializers.resSerializer(res) + p.strictEqual(serialized.headers['x-custom'], 'y') + res.end() + } + + await p.completed +}) + +test('req.url will be obtained from input request url when input request url is not an object', async (t) => { + const p = tspl(t, { plan: 1 }) + + const server = http.createServer(handler) + server.unref() + server.listen(0, () => { + http.get(server.address(), () => {}) + }) + + t.after(() => server.close()) + + function handler (_req, res) { + const serialized = serializers.resSerializer(res) + p.strictEqual(serialized.statusCode, null) + res.end() + } + + await p.completed +}) diff --git a/node_modules/pino-std-serializers/test/types/index.test-d.ts b/node_modules/pino-std-serializers/test/types/index.test-d.ts new file mode 100644 index 0000000..f896d29 --- /dev/null +++ b/node_modules/pino-std-serializers/test/types/index.test-d.ts @@ -0,0 +1,71 @@ +import {IncomingMessage, ServerResponse} from "http"; +import { + err, + errWithCause, + req, + res, + SerializedError, + SerializedRequest, + wrapErrorSerializer, + wrapRequestSerializer, + wrapResponseSerializer, + SerializedResponse +} from '../../'; + +const customErrorSerializer = (error: SerializedError) => { + return { + myOwnError: { + data: `${error.type}-${error.message}\n\n${error.stack}`, + } + }; +}; + +const customRequestSerializer = (req: SerializedRequest) => { + const { + headers, + id, + method, + raw, + remoteAddress, + remotePort, + url, + query, + params, + } = req; + return { + myOwnRequest: { + data: `${method}-${id}-${remoteAddress}-${remotePort}-${url}`, + headers, + raw, + } + }; +}; + +const customResponseSerializer = (res: SerializedResponse) => { + const {headers, raw, statusCode} = res; + return { + myOwnResponse: { + data: statusCode, + headers, + raw, + } + }; +}; + +const fakeError = new Error('A fake error for testing'); +const serializedError: SerializedError = err(fakeError); +const mySerializer = wrapErrorSerializer(customErrorSerializer); + +const fakeErrorWithCause = new Error('A fake error for testing with cause', { cause: new Error('An inner fake error') }); +const serializedErrorWithCause: SerializedError = errWithCause(fakeError); + +const request: IncomingMessage = {} as IncomingMessage +const serializedRequest: SerializedRequest = req(request); +const myReqSerializer = wrapRequestSerializer(customRequestSerializer); + +const response: ServerResponse = {} as ServerResponse +const myResSerializer = wrapResponseSerializer(customResponseSerializer); +const serializedResponse = res(response); + +myResSerializer(response) + diff --git a/node_modules/pino-std-serializers/tsconfig.json b/node_modules/pino-std-serializers/tsconfig.json new file mode 100644 index 0000000..d3be182 --- /dev/null +++ b/node_modules/pino-std-serializers/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "es6", + "lib": [ "es2022" ], + "module": "commonjs", + "noEmit": true, + "strict": true + }, + "include": [ + "./test/types/*.test-d.ts", + "./index.d.ts" + ] +} diff --git a/node_modules/pino/.nojekyll b/node_modules/pino/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/pino/.prettierignore b/node_modules/pino/.prettierignore new file mode 100644 index 0000000..72e8ffc --- /dev/null +++ b/node_modules/pino/.prettierignore @@ -0,0 +1 @@ +* diff --git a/node_modules/pino/CNAME b/node_modules/pino/CNAME new file mode 100644 index 0000000..6a32cdb --- /dev/null +++ b/node_modules/pino/CNAME @@ -0,0 +1 @@ +getpino.io \ No newline at end of file diff --git a/node_modules/pino/CONTRIBUTING.md b/node_modules/pino/CONTRIBUTING.md new file mode 100644 index 0000000..a6c44ff --- /dev/null +++ b/node_modules/pino/CONTRIBUTING.md @@ -0,0 +1,30 @@ +# Pino is an OPEN Open Source Project + +## What? + +Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project. + +## Rules + +Before you start coding, please read [Contributing to projects with git](https://jrfom.com/posts/2017/03/08/a-primer-on-contributing-to-projects-with-git/). + +Notice that as long as you don't have commit-access to the project, you have to fork the project and open PRs from the feature branches of the forked project. + +There are a few basic ground-rules for contributors: + +1. **No `--force` pushes** on `master` or modifying the Git history in any way after a PR has been merged. +1. **Non-master branches** ought to be used for ongoing work. +1. **Non-trivial changes** ought to be subject to an **internal pull-request** to solicit feedback from other contributors. +1. All pull-requests for new features **must** target the `master` branch. PRs to fix bugs in LTS releases are also allowed. +1. Contributors should attempt to adhere to the prevailing code-style. +1. 100% code coverage + +## Releases + +Declaring formal releases remains the prerogative of the project maintainer. + +## Changes to this arrangement + +This is an experiment and feedback is welcome! This document may also be subject to pull-requests or changes by contributors where you believe you have something valuable to add or change. + +----------------------------------------- diff --git a/node_modules/pino/LICENSE b/node_modules/pino/LICENSE new file mode 100644 index 0000000..91d47c9 --- /dev/null +++ b/node_modules/pino/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016-2025 Matteo Collina, David Mark Clements and the Pino contributors listed at and in the README file. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pino/README.md b/node_modules/pino/README.md new file mode 100644 index 0000000..5232c36 --- /dev/null +++ b/node_modules/pino/README.md @@ -0,0 +1,177 @@ +![banner](pino-banner.png) + +# pino +[![npm version](https://img.shields.io/npm/v/pino)](https://www.npmjs.com/package/pino) +[![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino/ci.yml)](https://github.com/pinojs/pino/actions) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) + +[Very low overhead](#low-overhead) JavaScript logger. + +## Documentation + +* [Benchmarks ⇗](/docs/benchmarks.md) +* [API ⇗](/docs/api.md) +* [Browser API ⇗](/docs/browser.md) +* [Redaction ⇗](/docs/redaction.md) +* [Child Loggers ⇗](/docs/child-loggers.md) +* [Transports ⇗](/docs/transports.md) +* [Diagnostics ⇗](/docs/diagnostics.md) +* [Web Frameworks ⇗](/docs/web.md) +* [Pretty Printing ⇗](/docs/pretty.md) +* [Asynchronous Logging ⇗](/docs/asynchronous.md) +* [Ecosystem ⇗](/docs/ecosystem.md) +* [Help ⇗](/docs/help.md) +* [Long Term Support Policy ⇗](/docs/lts.md) + +## Runtimes + +### Node.js + +Pino is built to run on [Node.js](http://nodejs.org). + +### Bare + +Pino works on [Bare](https://github.com/holepunchto/bare) with the [`pino-bare`](https://github.com/pinojs/pino-bare) compatability module. + +### Pear + +Pino works on [Pear](https://docs.pears.com), which is built on [Bare](https://github.com/holepunchto/bare), with the [`pino-bare`](https://github.com/pinojs/pino-bare) compatibility module. + + +## Install + +Using NPM: +``` +$ npm install pino +``` + +Using YARN: +``` +$ yarn add pino +``` + +If you would like to install pino v6, refer to https://github.com/pinojs/pino/tree/v6.x. + +## Usage + +```js +const logger = require('pino')() + +logger.info('hello world') + +const child = logger.child({ a: 'property' }) +child.info('hello child!') +``` + +This produces: + +``` +{"level":30,"time":1531171074631,"msg":"hello world","pid":657,"hostname":"Davids-MBP-3.fritz.box"} +{"level":30,"time":1531171082399,"msg":"hello child!","pid":657,"hostname":"Davids-MBP-3.fritz.box","a":"property"} +``` + +For using Pino with a web framework see: + +* [Pino with Fastify](docs/web.md#fastify) +* [Pino with Express](docs/web.md#express) +* [Pino with Hapi](docs/web.md#hapi) +* [Pino with Restify](docs/web.md#restify) +* [Pino with Koa](docs/web.md#koa) +* [Pino with Node core `http`](docs/web.md#http) +* [Pino with Nest](docs/web.md#nest) +* [Pino with Hono](docs/web.md#hono) + + +## Essentials + +### Development Formatting + +The [`pino-pretty`](https://github.com/pinojs/pino-pretty) module can be used to +format logs during development: + +![pretty demo](pretty-demo.png) + +### Transports & Log Processing + +Due to Node's single-threaded event-loop, it's highly recommended that sending, +alert triggering, reformatting, and all forms of log processing +are conducted in a separate process or thread. + +In Pino terminology, we call all log processors "transports" and recommend that the +transports be run in a worker thread using our `pino.transport` API. + +For more details see our [Transports⇗](docs/transports.md) document. + +### Low overhead + +Using minimum resources for logging is very important. Log messages +tend to get added over time and this can lead to a throttling effect +on applications – such as reduced requests per second. + +In many cases, Pino is over 5x faster than alternatives. + +See the [Benchmarks](docs/benchmarks.md) document for comparisons. + +### Bundling support + +Pino supports being bundled using tools like webpack or esbuild. + +See [Bundling](docs/bundling.md) document for more information. + + +## The Team + +### Matteo Collina + + + + + + + +### David Mark Clements + + + + + + + +### James Sumners + + + + + + + +### Thomas Watson Steen + + + + + + + +## Contributing + +Pino is an **OPEN Open Source Project**. This means that: + +> Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project. + +See the [CONTRIBUTING.md](https://github.com/pinojs/pino/blob/main/CONTRIBUTING.md) file for more details. + + +## Acknowledgments + +This project was kindly sponsored by [nearForm](https://nearform.com). +This project is kindly sponsored by [Platformatic](https://platformatic.dev). + +Logo and identity designed by Cosmic Fox Design: https://www.behance.net/cosmicfox. + +## License + +Licensed under [MIT](./LICENSE). + +[elasticsearch]: https://www.elastic.co/products/elasticsearch +[kibana]: https://www.elastic.co/products/kibana diff --git a/node_modules/pino/SECURITY.md b/node_modules/pino/SECURITY.md new file mode 100644 index 0000000..966533a --- /dev/null +++ b/node_modules/pino/SECURITY.md @@ -0,0 +1,68 @@ +# Security Policy + +This document describes the management of vulnerabilities for the +Pino project and all modules within the Pino organization. + +## Reporting vulnerabilities + +Individuals who find potential vulnerabilities in Pino are invited +to report them via email at matteo.collina@gmail.com. + +### Strict measures when reporting vulnerabilities + +Avoid creating new "informative" reports. Only create new +report a potential vulnerability if you are absolutely sure this +should be tagged as an actual vulnerability. Be careful on the maintainers time. + +## Handling vulnerability reports + +When a potential vulnerability is reported, the following actions are taken: + +### Triage + +**Delay:** 5 business days + +Within 5 business days, a member of the security team provides a first answer to the +individual who submitted the potential vulnerability. The possible responses +can be: + +* Acceptance: what was reported is considered as a new vulnerability +* Rejection: what was reported is not considered as a new vulnerability +* Need more information: the security team needs more information in order to evaluate what was reported. + +Triaging should include updating issue fields: +* Asset - set/create the module affected by the report +* Severity - TBD, currently left empty + +### Correction follow-up + +**Delay:** 90 days + +When a vulnerability is confirmed, a member of the security team volunteers to follow +up on this report. + +With the help of the individual who reported the vulnerability, they contact +the maintainers of the vulnerable package to make them aware of the +vulnerability. The maintainers can be invited as participants to the reported issue. + +With the package maintainer, they define a release date for the publication +of the vulnerability. Ideally, this release date should not happen before +the package has been patched. + +The report's vulnerable versions upper limit should be set to: +* `*` if there is no fixed version available by the time of publishing the report. +* the last vulnerable version. For example: `<=1.2.3` if a fix exists in `1.2.4` + +### Publication + +**Delay:** 90 days + +Within 90 days after the triage date, the vulnerability must be made public. + +**Severity**: Vulnerability severity is assessed using [CVSS v.3](https://www.first.org/cvss/user-guide). + +If the package maintainer is actively developing a patch, an additional delay +can be added with the approval of the security team and the individual who +reported the vulnerability. + +At this point, a CVE will be requested by the team. diff --git a/node_modules/pino/benchmarks/basic.bench.js b/node_modules/pino/benchmarks/basic.bench.js new file mode 100644 index 0000000..a1e27d4 --- /dev/null +++ b/node_modules/pino/benchmarks/basic.bench.js @@ -0,0 +1,95 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../') +const bunyan = require('bunyan') +const bole = require('bole')('bench') +const winston = require('winston') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const loglevel = require('./utils/wrap-log-level')(dest) +const plogNodeStream = pino(dest) +delete require.cache[require.resolve('../')] +const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', minLength: 4096 })) +delete require.cache[require.resolve('../')] +const plogDest = require('../')(pino.destination('/dev/null')) + +process.env.DEBUG = 'dlog' +const debug = require('debug') +const dlog = debug('dlog') +dlog.log = function (s) { dest.write(s) } + +const max = 10 +const blog = bunyan.createLogger({ + name: 'myapp', + streams: [{ + level: 'trace', + stream: dest + }] +}) + +require('bole').output({ + level: 'info', + stream: dest +}).setFastTime(true) + +const chill = winston.createLogger({ + transports: [ + new winston.transports.Stream({ + stream: fs.createWriteStream('/dev/null') + }) + ] +}) + +const run = bench([ + function benchBunyan (cb) { + for (var i = 0; i < max; i++) { + blog.info('hello world') + } + setImmediate(cb) + }, + function benchWinston (cb) { + for (var i = 0; i < max; i++) { + chill.log('info', 'hello world') + } + setImmediate(cb) + }, + function benchBole (cb) { + for (var i = 0; i < max; i++) { + bole.info('hello world') + } + setImmediate(cb) + }, + function benchDebug (cb) { + for (var i = 0; i < max; i++) { + dlog('hello world') + } + setImmediate(cb) + }, + function benchLogLevel (cb) { + for (var i = 0; i < max; i++) { + loglevel.info('hello world') + } + setImmediate(cb) + }, + function benchPino (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello world') + } + setImmediate(cb) + }, + function benchPinoMinLength (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info('hello world') + } + setImmediate(cb) + }, + function benchPinoNodeStream (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info('hello world') + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/child-child.bench.js b/node_modules/pino/benchmarks/child-child.bench.js new file mode 100644 index 0000000..05da997 --- /dev/null +++ b/node_modules/pino/benchmarks/child-child.bench.js @@ -0,0 +1,52 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../') +const bunyan = require('bunyan') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plogNodeStream = pino(dest).child({ a: 'property' }).child({ sub: 'child' }) +delete require.cache[require.resolve('../')] +const plogDest = require('../')(pino.destination('/dev/null')).child({ a: 'property' }).child({ sub: 'child' }) +delete require.cache[require.resolve('../')] +const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) + .child({ a: 'property' }) + .child({ sub: 'child' }) + +const max = 10 +const blog = bunyan.createLogger({ + name: 'myapp', + streams: [{ + level: 'trace', + stream: dest + }] +}).child({ a: 'property' }).child({ sub: 'child' }) + +const run = bench([ + function benchBunyanChildChild (cb) { + for (var i = 0; i < max; i++) { + blog.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildChild (cb) { + for (var i = 0; i < max; i++) { + plogDest.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoMinLengthChildChild (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoNodeStreamChildChild (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info({ hello: 'world' }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/child-creation.bench.js b/node_modules/pino/benchmarks/child-creation.bench.js new file mode 100644 index 0000000..fe6825e --- /dev/null +++ b/node_modules/pino/benchmarks/child-creation.bench.js @@ -0,0 +1,73 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../') +const bunyan = require('bunyan') +const bole = require('bole')('bench') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plogNodeStream = pino(dest) +const plogDest = pino(pino.destination(('/dev/null'))) +delete require.cache[require.resolve('../')] +const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) + +const max = 10 +const blog = bunyan.createLogger({ + name: 'myapp', + streams: [{ + level: 'trace', + stream: dest + }] +}) + +require('bole').output({ + level: 'info', + stream: dest +}).setFastTime(true) + +const run = bench([ + function benchBunyanCreation (cb) { + const child = blog.child({ a: 'property' }) + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchBoleCreation (cb) { + const child = bole('child') + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoCreation (cb) { + const child = plogDest.child({ a: 'property' }) + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoMinLengthCreation (cb) { + const child = plogMinLength.child({ a: 'property' }) + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoNodeStreamCreation (cb) { + const child = plogNodeStream.child({ a: 'property' }) + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoCreationWithOption (cb) { + const child = plogDest.child({ a: 'property' }, { redact: [] }) + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/child.bench.js b/node_modules/pino/benchmarks/child.bench.js new file mode 100644 index 0000000..efe2d66 --- /dev/null +++ b/node_modules/pino/benchmarks/child.bench.js @@ -0,0 +1,62 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../') +const bunyan = require('bunyan') +const bole = require('bole')('bench')('child') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plogNodeStream = pino(dest).child({ a: 'property' }) +delete require.cache[require.resolve('../')] +const plogDest = require('../')(pino.destination('/dev/null')).child({ a: 'property' }) +delete require.cache[require.resolve('../')] +const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) + +const max = 10 +const blog = bunyan.createLogger({ + name: 'myapp', + streams: [{ + level: 'trace', + stream: dest + }] +}).child({ a: 'property' }) + +require('bole').output({ + level: 'info', + stream: dest +}).setFastTime(true) + +const run = bench([ + function benchBunyanChild (cb) { + for (var i = 0; i < max; i++) { + blog.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchBoleChild (cb) { + for (var i = 0; i < max; i++) { + bole.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChild (cb) { + for (var i = 0; i < max; i++) { + plogDest.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoMinLengthChild (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoNodeStreamChild (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info({ hello: 'world' }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/deep-object.bench.js b/node_modules/pino/benchmarks/deep-object.bench.js new file mode 100644 index 0000000..44f6c34 --- /dev/null +++ b/node_modules/pino/benchmarks/deep-object.bench.js @@ -0,0 +1,88 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../') +const bunyan = require('bunyan') +const bole = require('bole')('bench') +const winston = require('winston') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plogNodeStream = pino(dest) +delete require.cache[require.resolve('../')] +const plogDest = require('../')(pino.destination('/dev/null')) +delete require.cache[require.resolve('../')] +const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) +delete require.cache[require.resolve('../')] + +const loglevel = require('./utils/wrap-log-level')(dest) + +const deep = Object.assign({}, require('../package.json'), { level: 'info' }) + +const max = 10 +const blog = bunyan.createLogger({ + name: 'myapp', + streams: [{ + level: 'trace', + stream: dest + }] +}) + +require('bole').output({ + level: 'info', + stream: dest +}).setFastTime(true) + +const chill = winston.createLogger({ + transports: [ + new winston.transports.Stream({ + stream: fs.createWriteStream('/dev/null') + }) + ] +}) + +const run = bench([ + function benchBunyanDeepObj (cb) { + for (var i = 0; i < max; i++) { + blog.info(deep) + } + setImmediate(cb) + }, + function benchWinstonDeepObj (cb) { + for (var i = 0; i < max; i++) { + chill.log(deep) + } + setImmediate(cb) + }, + function benchBoleDeepObj (cb) { + for (var i = 0; i < max; i++) { + bole.info(deep) + } + setImmediate(cb) + }, + function benchLogLevelDeepObj (cb) { + for (var i = 0; i < max; i++) { + loglevel.info(deep) + } + setImmediate(cb) + }, + function benchPinoDeepObj (cb) { + for (var i = 0; i < max; i++) { + plogDest.info(deep) + } + setImmediate(cb) + }, + function benchPinoMinLengthDeepObj (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info(deep) + } + setImmediate(cb) + }, + function benchPinoNodeStreamDeepObj (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info(deep) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/formatters.bench.js b/node_modules/pino/benchmarks/formatters.bench.js new file mode 100644 index 0000000..e6cc861 --- /dev/null +++ b/node_modules/pino/benchmarks/formatters.bench.js @@ -0,0 +1,50 @@ +'use strict' + +const formatters = { + level (label, number) { + return { + log: { + level: label + } + } + }, + bindings (bindings) { + return { + process: { + pid: bindings.pid + }, + host: { + name: bindings.hostname + } + } + }, + log (obj) { + return { foo: 'bar', ...obj } + } +} + +const bench = require('fastbench') +const pino = require('../') +delete require.cache[require.resolve('../')] +const pinoNoFormatters = require('../')(pino.destination('/dev/null')) +delete require.cache[require.resolve('../')] +const pinoFormatters = require('../')({ formatters }, pino.destination('/dev/null')) + +const max = 10 + +const run = bench([ + function benchPinoNoFormatters (cb) { + for (var i = 0; i < max; i++) { + pinoNoFormatters.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoFormatters (cb) { + for (var i = 0; i < max; i++) { + pinoFormatters.info({ hello: 'world' }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/internal/custom-levels.js b/node_modules/pino/benchmarks/internal/custom-levels.js new file mode 100644 index 0000000..afb1cf1 --- /dev/null +++ b/node_modules/pino/benchmarks/internal/custom-levels.js @@ -0,0 +1,67 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../../') + +const base = pino(pino.destination('/dev/null')) +const baseCl = pino({ + customLevels: { foo: 31 } +}, pino.destination('/dev/null')) +const child = base.child({}) +const childCl = base.child({ + customLevels: { foo: 31 } +}) +const childOfBaseCl = baseCl.child({}) + +const max = 100 + +const run = bench([ + function benchPinoNoCustomLevel (cb) { + for (var i = 0; i < max; i++) { + base.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoCustomLevel (cb) { + for (var i = 0; i < max; i++) { + baseCl.foo({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchChildNoCustomLevel (cb) { + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildCustomLevel (cb) { + for (var i = 0; i < max; i++) { + childCl.foo({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildInheritedCustomLevel (cb) { + for (var i = 0; i < max; i++) { + childOfBaseCl.foo({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildCreation (cb) { + const child = base.child({}) + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildCreationCustomLevel (cb) { + const child = base.child({ + customLevels: { foo: 31 } + }) + for (var i = 0; i < max; i++) { + child.foo({ hello: 'world' }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/internal/just-pino-heavy.bench.js b/node_modules/pino/benchmarks/internal/just-pino-heavy.bench.js new file mode 100644 index 0000000..55efc85 --- /dev/null +++ b/node_modules/pino/benchmarks/internal/just-pino-heavy.bench.js @@ -0,0 +1,76 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../../') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plog = pino(dest) +delete require.cache[require.resolve('../../')] +const plogDest = require('../../')(pino.destination('/dev/null')) +delete require.cache[require.resolve('../../')] +const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false })) +const deep = require('../../package.json') +deep.deep = JSON.parse(JSON.stringify(deep)) +deep.deep.deep = JSON.parse(JSON.stringify(deep)) +const longStr = JSON.stringify(deep) + +const max = 10 + +const run = bench([ + function benchPinoLongString (cb) { + for (var i = 0; i < max; i++) { + plog.info(longStr) + } + setImmediate(cb) + }, + function benchPinoDestLongString (cb) { + for (var i = 0; i < max; i++) { + plogDest.info(longStr) + } + setImmediate(cb) + }, + function benchPinoAsyncLongString (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info(longStr) + } + setImmediate(cb) + }, + function benchPinoDeepObj (cb) { + for (var i = 0; i < max; i++) { + plog.info(deep) + } + setImmediate(cb) + }, + function benchPinoDestDeepObj (cb) { + for (var i = 0; i < max; i++) { + plogDest.info(deep) + } + setImmediate(cb) + }, + function benchPinoAsyncDeepObj (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info(deep) + } + setImmediate(cb) + }, + function benchPinoInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + plog.info('hello %j', deep) + } + setImmediate(cb) + }, + function benchPinoDestInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %j', deep) + } + setImmediate(cb) + }, + function benchPinoAsyncInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info('hello %j', deep) + } + setImmediate(cb) + } +], 1000) + +run(run) diff --git a/node_modules/pino/benchmarks/internal/just-pino.bench.js b/node_modules/pino/benchmarks/internal/just-pino.bench.js new file mode 100644 index 0000000..04bbe23 --- /dev/null +++ b/node_modules/pino/benchmarks/internal/just-pino.bench.js @@ -0,0 +1,182 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../../') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plog = pino(dest) +delete require.cache[require.resolve('../../')] +const plogDest = require('../../')(pino.destination('/dev/null')) +delete require.cache[require.resolve('../../')] +const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false })) +const plogChild = plog.child({ a: 'property' }) +const plogDestChild = plogDest.child({ a: 'property' }) +const plogAsyncChild = plogAsync.child({ a: 'property' }) +const plogChildChild = plog.child({ a: 'property' }).child({ sub: 'child' }) +const plogDestChildChild = plogDest.child({ a: 'property' }).child({ sub: 'child' }) +const plogAsyncChildChild = plogAsync.child({ a: 'property' }).child({ sub: 'child' }) + +const max = 10 + +const run = bench([ + function benchPino (cb) { + for (var i = 0; i < max; i++) { + plog.info('hello world') + } + setImmediate(cb) + }, + function benchPinoDest (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello world') + } + setImmediate(cb) + }, + function benchPinoExtreme (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info('hello world') + } + setImmediate(cb) + }, + function benchPinoObj (cb) { + for (var i = 0; i < max; i++) { + plog.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoDestObj (cb) { + for (var i = 0; i < max; i++) { + plogDest.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoAsyncObj (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChild (cb) { + for (var i = 0; i < max; i++) { + plogChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoDestChild (cb) { + for (var i = 0; i < max; i++) { + plogDestChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoAsyncChild (cb) { + for (var i = 0; i < max; i++) { + plogAsyncChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildChild (cb) { + for (var i = 0; i < max; i++) { + plogChildChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoDestChildChild (cb) { + for (var i = 0; i < max; i++) { + plogDestChildChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoAsyncChildChild (cb) { + for (var i = 0; i < max; i++) { + plogAsyncChildChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildCreation (cb) { + const child = plog.child({ a: 'property' }) + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoDestChildCreation (cb) { + const child = plogDest.child({ a: 'property' }) + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoMulti (cb) { + for (var i = 0; i < max; i++) { + plog.info('hello', 'world') + } + setImmediate(cb) + }, + function benchPinoDestMulti (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello', 'world') + } + setImmediate(cb) + }, + function benchPinoAsyncMulti (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info('hello', 'world') + } + setImmediate(cb) + }, + function benchPinoInterpolate (cb) { + for (var i = 0; i < max; i++) { + plog.info('hello %s', 'world') + } + setImmediate(cb) + }, + function benchPinoDestInterpolate (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %s', 'world') + } + setImmediate(cb) + }, + function benchPinoDestInterpolate (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %s', 'world') + } + setImmediate(cb) + }, + function benchPinoInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + plog.info('hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + function benchPinoDestInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + function benchPinoAsyncInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info('hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + function benchPinoInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + plog.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + }, + function benchPinoDestInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + }, + function benchPinoAsyncInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/internal/parent-vs-child.bench.js b/node_modules/pino/benchmarks/internal/parent-vs-child.bench.js new file mode 100644 index 0000000..fc8e9d5 --- /dev/null +++ b/node_modules/pino/benchmarks/internal/parent-vs-child.bench.js @@ -0,0 +1,75 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../../') + +const base = pino(pino.destination('/dev/null')) +const child = base.child({}) +const childChild = child.child({}) +const childChildChild = childChild.child({}) +const childChildChildChild = childChildChild.child({}) +const child2 = base.child({}) +const baseSerializers = pino(pino.destination('/dev/null')) +const baseSerializersChild = baseSerializers.child({}) +const baseSerializersChildSerializers = baseSerializers.child({}) + +const max = 100 + +const run = bench([ + function benchPinoBase (cb) { + for (var i = 0; i < max; i++) { + base.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChild (cb) { + for (var i = 0; i < max; i++) { + child.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildChild (cb) { + for (var i = 0; i < max; i++) { + childChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildChildChild (cb) { + for (var i = 0; i < max; i++) { + childChildChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChildChildChildChild (cb) { + for (var i = 0; i < max; i++) { + childChildChildChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoChild2 (cb) { + for (var i = 0; i < max; i++) { + child2.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoBaseSerializers (cb) { + for (var i = 0; i < max; i++) { + baseSerializers.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoBaseSerializersChild (cb) { + for (var i = 0; i < max; i++) { + baseSerializersChild.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoBaseSerializersChildSerializers (cb) { + for (var i = 0; i < max; i++) { + baseSerializersChildSerializers.info({ hello: 'world' }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/internal/redact.bench.js b/node_modules/pino/benchmarks/internal/redact.bench.js new file mode 100644 index 0000000..852dd75 --- /dev/null +++ b/node_modules/pino/benchmarks/internal/redact.bench.js @@ -0,0 +1,86 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../../') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plog = pino(dest) +delete require.cache[require.resolve('../../')] +const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false })) +delete require.cache[require.resolve('../../')] +const plogUnsafe = require('../../')({ safe: false }, dest) +delete require.cache[require.resolve('../../')] +const plogUnsafeAsync = require('../../')( + { safe: false }, + pino.destination({ dest: '/dev/null', sync: false }) +) +const plogRedact = pino({ redact: ['a.b.c'] }, dest) +delete require.cache[require.resolve('../../')] +const plogAsyncRedact = require('../../')( + { redact: ['a.b.c'] }, + pino.destination({ dest: '/dev/null', sync: false }) +) +delete require.cache[require.resolve('../../')] +const plogUnsafeRedact = require('../../')({ redact: ['a.b.c'], safe: false }, dest) +delete require.cache[require.resolve('../../')] +const plogUnsafeAsyncRedact = require('../../')( + { redact: ['a.b.c'], safe: false }, + pino.destination({ dest: '/dev/null', sync: false }) +) + +const max = 10 + +// note that "redact me." is the same amount of bytes as the censor: "[Redacted]" + +const run = bench([ + function benchPinoNoRedact (cb) { + for (var i = 0; i < max; i++) { + plog.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) + } + setImmediate(cb) + }, + function benchPinoRedact (cb) { + for (var i = 0; i < max; i++) { + plogRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) + } + setImmediate(cb) + }, + function benchPinoUnsafeNoRedact (cb) { + for (var i = 0; i < max; i++) { + plogUnsafe.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) + } + setImmediate(cb) + }, + function benchPinoUnsafeRedact (cb) { + for (var i = 0; i < max; i++) { + plogUnsafeRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) + } + setImmediate(cb) + }, + function benchPinoAsyncNoRedact (cb) { + for (var i = 0; i < max; i++) { + plogAsync.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) + } + setImmediate(cb) + }, + function benchPinoAsyncRedact (cb) { + for (var i = 0; i < max; i++) { + plogAsyncRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) + } + setImmediate(cb) + }, + function benchPinoUnsafeAsyncNoRedact (cb) { + for (var i = 0; i < max; i++) { + plogUnsafeAsync.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) + } + setImmediate(cb) + }, + function benchPinoUnsafeAsyncRedact (cb) { + for (var i = 0; i < max; i++) { + plogUnsafeAsyncRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/long-string.bench.js b/node_modules/pino/benchmarks/long-string.bench.js new file mode 100644 index 0000000..7f37a32 --- /dev/null +++ b/node_modules/pino/benchmarks/long-string.bench.js @@ -0,0 +1,81 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../') +const bunyan = require('bunyan') +const bole = require('bole')('bench') +const winston = require('winston') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plogNodeStream = pino(dest) +delete require.cache[require.resolve('../')] +const plogDest = require('../')(pino.destination('/dev/null')) +delete require.cache[require.resolve('../')] +const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) + +const crypto = require('crypto') + +const longStr = crypto.randomBytes(2000).toString() + +const max = 10 +const blog = bunyan.createLogger({ + name: 'myapp', + streams: [{ + level: 'trace', + stream: dest + }] +}) + +require('bole').output({ + level: 'info', + stream: dest +}).setFastTime(true) + +const chill = winston.createLogger({ + transports: [ + new winston.transports.Stream({ + stream: fs.createWriteStream('/dev/null') + }) + ] +}) + +const run = bench([ + function benchBunyan (cb) { + for (var i = 0; i < max; i++) { + blog.info(longStr) + } + setImmediate(cb) + }, + function benchWinston (cb) { + for (var i = 0; i < max; i++) { + chill.info(longStr) + } + setImmediate(cb) + }, + function benchBole (cb) { + for (var i = 0; i < max; i++) { + bole.info(longStr) + } + setImmediate(cb) + }, + function benchPino (cb) { + for (var i = 0; i < max; i++) { + plogDest.info(longStr) + } + setImmediate(cb) + }, + function benchPinoMinLength (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info(longStr) + } + setImmediate(cb) + }, + function benchPinoNodeStream (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info(longStr) + } + setImmediate(cb) + } +], 1000) + +run(run) diff --git a/node_modules/pino/benchmarks/multi-arg.bench.js b/node_modules/pino/benchmarks/multi-arg.bench.js new file mode 100644 index 0000000..8cbc4dc --- /dev/null +++ b/node_modules/pino/benchmarks/multi-arg.bench.js @@ -0,0 +1,193 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../') +const bunyan = require('bunyan') +const bole = require('bole')('bench') +const winston = require('winston') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const plogNodeStream = pino(dest) +delete require.cache[require.resolve('../')] +const plogDest = require('../')(pino.destination('/dev/null')) +delete require.cache[require.resolve('../')] +const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) +delete require.cache[require.resolve('../')] + +const deep = require('../package.json') +deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep))) +deep.deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep))) +deep.deep.deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep))) + +const blog = bunyan.createLogger({ + name: 'myapp', + streams: [{ + level: 'trace', + stream: dest + }] +}) + +require('bole').output({ + level: 'info', + stream: dest +}).setFastTime(true) + +const chill = winston.createLogger({ + transports: [ + new winston.transports.Stream({ + stream: fs.createWriteStream('/dev/null') + }) + ] +}) + +const max = 10 + +const run = bench([ + function benchBunyanInterpolate (cb) { + for (var i = 0; i < max; i++) { + blog.info('hello %s', 'world') + } + setImmediate(cb) + }, + function benchWinstonInterpolate (cb) { + for (var i = 0; i < max; i++) { + chill.log('info', 'hello %s', 'world') + } + setImmediate(cb) + }, + function benchBoleInterpolate (cb) { + for (var i = 0; i < max; i++) { + bole.info('hello %s', 'world') + } + setImmediate(cb) + }, + function benchPinoInterpolate (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %s', 'world') + } + setImmediate(cb) + }, + function benchPinoMinLengthInterpolate (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info('hello %s', 'world') + } + setImmediate(cb) + }, + function benchPinoNodeStreamInterpolate (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info('hello %s', 'world') + } + setImmediate(cb) + }, + function benchBunyanInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + blog.info('hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + + function benchWinstonInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + chill.log('info', 'hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + function benchBoleInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + bole.info('hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + function benchPinoInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + function benchPinoMinLengthInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info('hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + function benchPinoNodeStreamInterpolateAll (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info('hello %s %j %d', 'world', { obj: true }, 4) + } + setImmediate(cb) + }, + function benchBunyanInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + blog.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + }, + function benchWinstonInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + chill.log('info', 'hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + }, + function benchBoleInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + bole.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + }, + function benchPinoInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + }, + function benchPinoMinLengthInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + }, + function benchPinoNodeStreamInterpolateExtra (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' }) + } + setImmediate(cb) + }, + function benchBunyanInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + blog.info('hello %j', deep) + } + setImmediate(cb) + }, + function benchWinstonInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + chill.log('info', 'hello %j', deep) + } + setImmediate(cb) + }, + function benchBoleInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + bole.info('hello %j', deep) + } + setImmediate(cb) + }, + function benchPinoInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + plogDest.info('hello %j', deep) + } + setImmediate(cb) + }, + function benchPinoMinLengthInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info('hello %j', deep) + } + setImmediate(cb) + }, + function benchPinoNodeStreamInterpolateDeep (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info('hello %j', deep) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/multistream.js b/node_modules/pino/benchmarks/multistream.js new file mode 100644 index 0000000..18b9661 --- /dev/null +++ b/node_modules/pino/benchmarks/multistream.js @@ -0,0 +1,98 @@ +'use strict' + +const bench = require('fastbench') +const bunyan = require('bunyan') +const pino = require('../') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') + +const tenStreams = [ + { stream: dest }, + { stream: dest }, + { stream: dest }, + { stream: dest }, + { stream: dest }, + { level: 'debug', stream: dest }, + { level: 'debug', stream: dest }, + { level: 'trace', stream: dest }, + { level: 'warn', stream: dest }, + { level: 'fatal', stream: dest } +] +const pinomsTen = pino({ level: 'debug' }, pino.multistream(tenStreams)) + +const fourStreams = [ + { stream: dest }, + { stream: dest }, + { level: 'debug', stream: dest }, + { level: 'trace', stream: dest } +] +const pinomsFour = pino({ level: 'debug' }, pino.multistream(fourStreams)) + +const pinomsOne = pino({ level: 'info' }, pino.multistream(dest)) +const blogOne = bunyan.createLogger({ + name: 'myapp', + streams: [{ stream: dest }] +}) + +const blogTen = bunyan.createLogger({ + name: 'myapp', + streams: tenStreams +}) +const blogFour = bunyan.createLogger({ + name: 'myapp', + streams: fourStreams +}) + +const max = 10 +const run = bench([ + function benchBunyanTen (cb) { + for (let i = 0; i < max; i++) { + blogTen.info('hello world') + blogTen.debug('hello world') + blogTen.trace('hello world') + blogTen.warn('hello world') + blogTen.fatal('hello world') + } + setImmediate(cb) + }, + function benchPinoMSTen (cb) { + for (let i = 0; i < max; i++) { + pinomsTen.info('hello world') + pinomsTen.debug('hello world') + pinomsTen.trace('hello world') + pinomsTen.warn('hello world') + pinomsTen.fatal('hello world') + } + setImmediate(cb) + }, + function benchBunyanFour (cb) { + for (let i = 0; i < max; i++) { + blogFour.info('hello world') + blogFour.debug('hello world') + blogFour.trace('hello world') + } + setImmediate(cb) + }, + function benchPinoMSFour (cb) { + for (let i = 0; i < max; i++) { + pinomsFour.info('hello world') + pinomsFour.debug('hello world') + pinomsFour.trace('hello world') + } + setImmediate(cb) + }, + function benchBunyanOne (cb) { + for (let i = 0; i < max; i++) { + blogOne.info('hello world') + } + setImmediate(cb) + }, + function benchPinoMSOne (cb) { + for (let i = 0; i < max; i++) { + pinomsOne.info('hello world') + } + setImmediate(cb) + } +], 10000) + +run() diff --git a/node_modules/pino/benchmarks/object.bench.js b/node_modules/pino/benchmarks/object.bench.js new file mode 100644 index 0000000..6207dec --- /dev/null +++ b/node_modules/pino/benchmarks/object.bench.js @@ -0,0 +1,82 @@ +'use strict' + +const bench = require('fastbench') +const pino = require('../') +const bunyan = require('bunyan') +const bole = require('bole')('bench') +const winston = require('winston') +const fs = require('node:fs') +const dest = fs.createWriteStream('/dev/null') +const loglevel = require('./utils/wrap-log-level')(dest) +const plogNodeStream = pino(dest) +delete require.cache[require.resolve('../')] +const plogDest = require('../')(pino.destination('/dev/null')) +delete require.cache[require.resolve('../')] +const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 })) +const blog = bunyan.createLogger({ + name: 'myapp', + streams: [{ + level: 'trace', + stream: dest + }] +}) +require('bole').output({ + level: 'info', + stream: dest +}).setFastTime(true) +const chill = winston.createLogger({ + transports: [ + new winston.transports.Stream({ + stream: fs.createWriteStream('/dev/null') + }) + ] +}) + +const max = 10 + +const run = bench([ + function benchBunyanObj (cb) { + for (var i = 0; i < max; i++) { + blog.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchWinstonObj (cb) { + for (var i = 0; i < max; i++) { + chill.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchBoleObj (cb) { + for (var i = 0; i < max; i++) { + bole.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchLogLevelObject (cb) { + for (var i = 0; i < max; i++) { + loglevel.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoObj (cb) { + for (var i = 0; i < max; i++) { + plogDest.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoMinLengthObj (cb) { + for (var i = 0; i < max; i++) { + plogMinLength.info({ hello: 'world' }) + } + setImmediate(cb) + }, + function benchPinoNodeStreamObj (cb) { + for (var i = 0; i < max; i++) { + plogNodeStream.info({ hello: 'world' }) + } + setImmediate(cb) + } +], 10000) + +run(run) diff --git a/node_modules/pino/benchmarks/utils/generate-benchmark-doc.js b/node_modules/pino/benchmarks/utils/generate-benchmark-doc.js new file mode 100644 index 0000000..edf8a03 --- /dev/null +++ b/node_modules/pino/benchmarks/utils/generate-benchmark-doc.js @@ -0,0 +1,36 @@ +'use strict' +const { join } = require('node:path') +const { execSync } = require('node:child_process') + +const run = (type) => { + process.stderr.write(`benchmarking ${type}\n`) + return execSync(`node ${join(__dirname, 'runbench')} ${type} -q`) +} + +console.log(` +# Benchmarks + +\`pino.info('hello world')\`: + +\`\`\` +${run('basic')} +\`\`\` + +\`pino.info({'hello': 'world'})\`: + +\`\`\` +${run('object')} +\`\`\` + +\`pino.info(aBigDeeplyNestedObject)\`: + +\`\`\` +${run('deep-object')} +\`\`\` + +\`pino.info('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'})\`: + +For a fair comparison, [LogLevel](http://npm.im/loglevel) was extended +to include a timestamp and [bole](http://npm.im/bole) had +\`fastTime\` mode switched on. +`) diff --git a/node_modules/pino/benchmarks/utils/runbench.js b/node_modules/pino/benchmarks/utils/runbench.js new file mode 100644 index 0000000..7bb5585 --- /dev/null +++ b/node_modules/pino/benchmarks/utils/runbench.js @@ -0,0 +1,138 @@ +'use strict' + +const { type, platform, arch, release, cpus } = require('node:os') +const { resolve, join } = require('node:path') +const spawn = require('node:child_process').spawn +const pump = require('pump') +const split = require('split2') +const through = require('through2') +const steed = require('steed') + +function usage () { + console.log(` + Pino Benchmarks + + To run a benchmark, specify which to run: + + ・all ⁃ run all benchmarks (takes a while) + ・basic ⁃ log a simple string + ・object ⁃ logging a basic object + ・deep-object ⁃ logging a large object + ・multi-arg ⁃ multiple log method arguments + ・child ⁃ child from a parent + ・child-child ⁃ child from a child + ・child-creation ⁃ child constructor + ・formatters ⁃ difference between with or without formatters + + Example: + + node runbench basic + `) +} + +if (!process.argv[2]) { + usage() + process.exit() +} + +const quiet = process.argv[3] === '-q' + +const selectedBenchmark = process.argv[2].toLowerCase() +const benchmarkDir = resolve(__dirname, '..') +const benchmarks = { + basic: 'basic.bench.js', + object: 'object.bench.js', + 'deep-object': 'deep-object.bench.js', + 'multi-arg': 'multi-arg.bench.js', + 'long-string': 'long-string.bench.js', + child: 'child.bench.js', + 'child-child': 'child-child.bench.js', + 'child-creation': 'child-creation.bench.js', + formatters: 'formatters.bench.js' +} + +function runBenchmark (name, done) { + const benchmarkResults = {} + benchmarkResults[name] = {} + + const processor = through(function (line, enc, cb) { + const [label, time] = ('' + line).split(': ') + const [target, iterations] = label.split('*') + const logger = target.replace('bench', '') + + if (!benchmarkResults[name][logger]) benchmarkResults[name][logger] = [] + + benchmarkResults[name][logger].push({ + time: time.replace('ms', ''), + iterations: iterations.replace(':', '') + }) + + cb() + }) + + if (quiet === false) console.log(`Running ${name.toUpperCase()} benchmark\n`) + + const benchmark = spawn( + process.argv[0], + [join(benchmarkDir, benchmarks[name])] + ) + + if (quiet === false) { + benchmark.stdout.pipe(process.stdout) + } + + pump(benchmark.stdout, split(), processor) + + benchmark.on('exit', () => { + console.log() + if (done && typeof done === 'function') done(null, benchmarkResults) + }) +} + +function sum (arr) { + let result = 0 + for (var i = 0; i < arr.length; i += 1) { + result += Number.parseFloat(arr[i].time) + } + return result +} + +function displayResults (results) { + if (quiet === false) console.log('==========') + const benchNames = Object.keys(results) + for (var i = 0; i < benchNames.length; i += 1) { + console.log(`${benchNames[i].toUpperCase()} benchmark averages`) + const benchmark = results[benchNames[i]] + const loggers = Object.keys(benchmark) + for (var j = 0; j < loggers.length; j += 1) { + const logger = benchmark[loggers[j]] + const average = sum(logger) / logger.length + console.log(`${loggers[j]} average: ${average.toFixed(3)}ms`) + } + } + if (quiet === false) { + console.log('==========') + console.log( + `System: ${type()}/${platform()} ${arch()} ${release()}`, + `~ ${cpus()[0].model} (cores/threads: ${cpus().length})` + ) + } +} + +function toBench (done) { + runBenchmark(this.name, done) +} + +const benchQueue = [] +if (selectedBenchmark !== 'all') { + benchQueue.push(toBench.bind({ name: selectedBenchmark })) +} else { + const keys = Object.keys(benchmarks) + for (var i = 0; i < keys.length; i += 1) { + benchQueue.push(toBench.bind({ name: keys[i] })) + } +} +steed.series(benchQueue, function (err, results) { + if (err) return console.error(err.message) + results.forEach(displayResults) +}) diff --git a/node_modules/pino/benchmarks/utils/wrap-log-level.js b/node_modules/pino/benchmarks/utils/wrap-log-level.js new file mode 100644 index 0000000..77d0691 --- /dev/null +++ b/node_modules/pino/benchmarks/utils/wrap-log-level.js @@ -0,0 +1,55 @@ +'use strict' + +const { readFileSync } = require('node:fs') +const vm = require('vm') +const { join } = require('node:path') +const code = readFileSync( + join(__dirname, '..', '..', 'node_modules', 'loglevel', 'lib', 'loglevel.js') +) +const { Console } = require('console') + +function build (dest) { + const sandbox = { + module: {}, + console: new Console(dest, dest) + } + const context = vm.createContext(sandbox) + + const script = new vm.Script(code) + script.runInContext(context) + + const loglevel = sandbox.log + + const originalFactory = loglevel.methodFactory + loglevel.methodFactory = function (methodName, logLevel, loggerName) { + const rawMethod = originalFactory(methodName, logLevel, loggerName) + + return function () { + const time = new Date() + let array + if (typeof arguments[0] === 'string') { + arguments[0] = '[' + time.toISOString() + '] ' + arguments[0] + rawMethod.apply(null, arguments) + } else { + array = new Array(arguments.length + 1) + array[0] = '[' + time.toISOString() + ']' + for (var i = 0; i < arguments.length; i++) { + array[i + 1] = arguments[i] + } + rawMethod.apply(null, array) + } + } + } + + loglevel.setLevel(loglevel.levels.INFO) + return loglevel +} + +module.exports = build + +if (require.main === module) { + const loglevel = build(process.stdout) + loglevel.info('hello') + loglevel.info({ hello: 'world' }) + loglevel.info('hello %j', { hello: 'world' }) +} diff --git a/node_modules/pino/bin.js b/node_modules/pino/bin.js new file mode 100644 index 0000000..939b117 --- /dev/null +++ b/node_modules/pino/bin.js @@ -0,0 +1,6 @@ +#!/usr/bin/env node +console.error( + '`pino` cli has been removed. Use `pino-pretty` cli instead.\n' + + '\nSee: https://github.com/pinojs/pino-pretty' +) +process.exit(1) diff --git a/node_modules/pino/browser.js b/node_modules/pino/browser.js new file mode 100644 index 0000000..bfdb8ea --- /dev/null +++ b/node_modules/pino/browser.js @@ -0,0 +1,505 @@ +'use strict' + +const format = require('quick-format-unescaped') + +module.exports = pino + +const _console = pfGlobalThisOrFallback().console || {} +const stdSerializers = { + mapHttpRequest: mock, + mapHttpResponse: mock, + wrapRequestSerializer: passthrough, + wrapResponseSerializer: passthrough, + wrapErrorSerializer: passthrough, + req: mock, + res: mock, + err: asErrValue, + errWithCause: asErrValue +} +function levelToValue (level, logger) { + return level === 'silent' + ? Infinity + : logger.levels.values[level] +} +const baseLogFunctionSymbol = Symbol('pino.logFuncs') +const hierarchySymbol = Symbol('pino.hierarchy') + +const logFallbackMap = { + error: 'log', + fatal: 'error', + warn: 'error', + info: 'log', + debug: 'log', + trace: 'log' +} + +function appendChildLogger (parentLogger, childLogger) { + const newEntry = { + logger: childLogger, + parent: parentLogger[hierarchySymbol] + } + childLogger[hierarchySymbol] = newEntry +} + +function setupBaseLogFunctions (logger, levels, proto) { + const logFunctions = {} + levels.forEach(level => { + logFunctions[level] = proto[level] ? proto[level] : (_console[level] || _console[logFallbackMap[level] || 'log'] || noop) + }) + logger[baseLogFunctionSymbol] = logFunctions +} + +function shouldSerialize (serialize, serializers) { + if (Array.isArray(serialize)) { + const hasToFilter = serialize.filter(function (k) { + return k !== '!stdSerializers.err' + }) + return hasToFilter + } else if (serialize === true) { + return Object.keys(serializers) + } + + return false +} + +function pino (opts) { + opts = opts || {} + opts.browser = opts.browser || {} + + const transmit = opts.browser.transmit + if (transmit && typeof transmit.send !== 'function') { throw Error('pino: transmit option must have a send function') } + + const proto = opts.browser.write || _console + if (opts.browser.write) opts.browser.asObject = true + const serializers = opts.serializers || {} + const serialize = shouldSerialize(opts.browser.serialize, serializers) + let stdErrSerialize = opts.browser.serialize + + if ( + Array.isArray(opts.browser.serialize) && + opts.browser.serialize.indexOf('!stdSerializers.err') > -1 + ) stdErrSerialize = false + + const customLevels = Object.keys(opts.customLevels || {}) + const levels = ['error', 'fatal', 'warn', 'info', 'debug', 'trace'].concat(customLevels) + + if (typeof proto === 'function') { + levels.forEach(function (level) { + proto[level] = proto + }) + } + if (opts.enabled === false || opts.browser.disabled) opts.level = 'silent' + const level = opts.level || 'info' + const logger = Object.create(proto) + if (!logger.log) logger.log = noop + + setupBaseLogFunctions(logger, levels, proto) + // setup root hierarchy entry + appendChildLogger({}, logger) + + Object.defineProperty(logger, 'levelVal', { + get: getLevelVal + }) + Object.defineProperty(logger, 'level', { + get: getLevel, + set: setLevel + }) + + const setOpts = { + transmit, + serialize, + asObject: opts.browser.asObject, + asObjectBindingsOnly: opts.browser.asObjectBindingsOnly, + formatters: opts.browser.formatters, + levels, + timestamp: getTimeFunction(opts), + messageKey: opts.messageKey || 'msg', + onChild: opts.onChild || noop + } + logger.levels = getLevels(opts) + logger.level = level + + logger.isLevelEnabled = function (level) { + if (!this.levels.values[level]) { + return false + } + + return this.levels.values[level] >= this.levels.values[this.level] + } + logger.setMaxListeners = logger.getMaxListeners = + logger.emit = logger.addListener = logger.on = + logger.prependListener = logger.once = + logger.prependOnceListener = logger.removeListener = + logger.removeAllListeners = logger.listeners = + logger.listenerCount = logger.eventNames = + logger.write = logger.flush = noop + logger.serializers = serializers + logger._serialize = serialize + logger._stdErrSerialize = stdErrSerialize + logger.child = function (...args) { return child.call(this, setOpts, ...args) } + + if (transmit) logger._logEvent = createLogEventShape() + + function getLevelVal () { + return levelToValue(this.level, this) + } + + function getLevel () { + return this._level + } + function setLevel (level) { + if (level !== 'silent' && !this.levels.values[level]) { + throw Error('unknown level ' + level) + } + this._level = level + + set(this, setOpts, logger, 'error') // <-- must stay first + set(this, setOpts, logger, 'fatal') + set(this, setOpts, logger, 'warn') + set(this, setOpts, logger, 'info') + set(this, setOpts, logger, 'debug') + set(this, setOpts, logger, 'trace') + + customLevels.forEach((level) => { + set(this, setOpts, logger, level) + }) + } + + function child (setOpts, bindings, childOptions) { + if (!bindings) { + throw new Error('missing bindings for child Pino') + } + childOptions = childOptions || {} + if (serialize && bindings.serializers) { + childOptions.serializers = bindings.serializers + } + const childOptionsSerializers = childOptions.serializers + if (serialize && childOptionsSerializers) { + var childSerializers = Object.assign({}, serializers, childOptionsSerializers) + var childSerialize = opts.browser.serialize === true + ? Object.keys(childSerializers) + : serialize + delete bindings.serializers + applySerializers([bindings], childSerialize, childSerializers, this._stdErrSerialize) + } + function Child (parent) { + this._childLevel = (parent._childLevel | 0) + 1 + + // make sure bindings are available in the `set` function + this.bindings = bindings + + if (childSerializers) { + this.serializers = childSerializers + this._serialize = childSerialize + } + if (transmit) { + this._logEvent = createLogEventShape( + [].concat(parent._logEvent.bindings, bindings) + ) + } + } + Child.prototype = this + const newLogger = new Child(this) + + // must happen before the level is assigned + appendChildLogger(this, newLogger) + newLogger.child = function (...args) { return child.call(this, setOpts, ...args) } + // required to actually initialize the logger functions for any given child + newLogger.level = childOptions.level || this.level // allow level to be set by childOptions + setOpts.onChild(newLogger) + + return newLogger + } + return logger +} + +function getLevels (opts) { + const customLevels = opts.customLevels || {} + + const values = Object.assign({}, pino.levels.values, customLevels) + const labels = Object.assign({}, pino.levels.labels, invertObject(customLevels)) + + return { + values, + labels + } +} + +function invertObject (obj) { + const inverted = {} + Object.keys(obj).forEach(function (key) { + inverted[obj[key]] = key + }) + return inverted +} + +pino.levels = { + values: { + fatal: 60, + error: 50, + warn: 40, + info: 30, + debug: 20, + trace: 10 + }, + labels: { + 10: 'trace', + 20: 'debug', + 30: 'info', + 40: 'warn', + 50: 'error', + 60: 'fatal' + } +} + +pino.stdSerializers = stdSerializers +pino.stdTimeFunctions = Object.assign({}, { nullTime, epochTime, unixTime, isoTime }) + +function getBindingChain (logger) { + const bindings = [] + if (logger.bindings) { + bindings.push(logger.bindings) + } + + // traverse up the tree to get all bindings + let hierarchy = logger[hierarchySymbol] + while (hierarchy.parent) { + hierarchy = hierarchy.parent + if (hierarchy.logger.bindings) { + bindings.push(hierarchy.logger.bindings) + } + } + + return bindings.reverse() +} + +function set (self, opts, rootLogger, level) { + // override the current log functions with either `noop` or the base log function + Object.defineProperty(self, level, { + value: (levelToValue(self.level, rootLogger) > levelToValue(level, rootLogger) + ? noop + : rootLogger[baseLogFunctionSymbol][level]), + writable: true, + enumerable: true, + configurable: true + }) + + if (self[level] === noop) { + if (!opts.transmit) return + + const transmitLevel = opts.transmit.level || self.level + const transmitValue = levelToValue(transmitLevel, rootLogger) + const methodValue = levelToValue(level, rootLogger) + if (methodValue < transmitValue) return + } + + // make sure the log format is correct + self[level] = createWrap(self, opts, rootLogger, level) + + // prepend bindings if it is not the root logger + const bindings = getBindingChain(self) + if (bindings.length === 0) { + // early exit in case for rootLogger + return + } + self[level] = prependBindingsInArguments(bindings, self[level]) +} + +function prependBindingsInArguments (bindings, logFunc) { + return function () { + return logFunc.apply(this, [...bindings, ...arguments]) + } +} + +function createWrap (self, opts, rootLogger, level) { + return (function (write) { + return function LOG () { + const ts = opts.timestamp() + const args = new Array(arguments.length) + const proto = (Object.getPrototypeOf && Object.getPrototypeOf(this) === _console) ? _console : this + for (var i = 0; i < args.length; i++) args[i] = arguments[i] + + var argsIsSerialized = false + if (opts.serialize) { + applySerializers(args, this._serialize, this.serializers, this._stdErrSerialize) + argsIsSerialized = true + } + if (opts.asObject || opts.formatters) { + write.call(proto, ...asObject(this, level, args, ts, opts)) + } else write.apply(proto, args) + + if (opts.transmit) { + const transmitLevel = opts.transmit.level || self._level + const transmitValue = levelToValue(transmitLevel, rootLogger) + const methodValue = levelToValue(level, rootLogger) + if (methodValue < transmitValue) return + transmit(this, { + ts, + methodLevel: level, + methodValue, + transmitLevel, + transmitValue: rootLogger.levels.values[opts.transmit.level || self._level], + send: opts.transmit.send, + val: levelToValue(self._level, rootLogger) + }, args, argsIsSerialized) + } + } + })(self[baseLogFunctionSymbol][level]) +} + +function asObject (logger, level, args, ts, opts) { + const { + level: levelFormatter, + log: logObjectFormatter = (obj) => obj + } = opts.formatters || {} + const argsCloned = args.slice() + let msg = argsCloned[0] + const logObject = {} + + let lvl = (logger._childLevel | 0) + 1 + if (lvl < 1) lvl = 1 + + if (ts) { + logObject.time = ts + } + + if (levelFormatter) { + const formattedLevel = levelFormatter(level, logger.levels.values[level]) + Object.assign(logObject, formattedLevel) + } else { + logObject.level = logger.levels.values[level] + } + + if (opts.asObjectBindingsOnly) { + if (msg !== null && typeof msg === 'object') { + while (lvl-- && typeof argsCloned[0] === 'object') { + Object.assign(logObject, argsCloned.shift()) + } + } + + const formattedLogObject = logObjectFormatter(logObject) + return [formattedLogObject, ...argsCloned] + } else { + // deliberate, catching objects, arrays + if (msg !== null && typeof msg === 'object') { + while (lvl-- && typeof argsCloned[0] === 'object') { + Object.assign(logObject, argsCloned.shift()) + } + msg = argsCloned.length ? format(argsCloned.shift(), argsCloned) : undefined + } else if (typeof msg === 'string') msg = format(argsCloned.shift(), argsCloned) + if (msg !== undefined) logObject[opts.messageKey] = msg + + const formattedLogObject = logObjectFormatter(logObject) + return [formattedLogObject] + } +} + +function applySerializers (args, serialize, serializers, stdErrSerialize) { + for (const i in args) { + if (stdErrSerialize && args[i] instanceof Error) { + args[i] = pino.stdSerializers.err(args[i]) + } else if (typeof args[i] === 'object' && !Array.isArray(args[i]) && serialize) { + for (const k in args[i]) { + if (serialize.indexOf(k) > -1 && k in serializers) { + args[i][k] = serializers[k](args[i][k]) + } + } + } + } +} + +function transmit (logger, opts, args, argsIsSerialized = false) { + const send = opts.send + const ts = opts.ts + const methodLevel = opts.methodLevel + const methodValue = opts.methodValue + const val = opts.val + const bindings = logger._logEvent.bindings + + if (!argsIsSerialized) { + applySerializers( + args, + logger._serialize || Object.keys(logger.serializers), + logger.serializers, + logger._stdErrSerialize === undefined ? true : logger._stdErrSerialize + ) + } + + logger._logEvent.ts = ts + logger._logEvent.messages = args.filter(function (arg) { + // bindings can only be objects, so reference equality check via indexOf is fine + return bindings.indexOf(arg) === -1 + }) + + logger._logEvent.level.label = methodLevel + logger._logEvent.level.value = methodValue + + send(methodLevel, logger._logEvent, val) + + logger._logEvent = createLogEventShape(bindings) +} + +function createLogEventShape (bindings) { + return { + ts: 0, + messages: [], + bindings: bindings || [], + level: { label: '', value: 0 } + } +} + +function asErrValue (err) { + const obj = { + type: err.constructor.name, + msg: err.message, + stack: err.stack + } + for (const key in err) { + if (obj[key] === undefined) { + obj[key] = err[key] + } + } + return obj +} + +function getTimeFunction (opts) { + if (typeof opts.timestamp === 'function') { + return opts.timestamp + } + if (opts.timestamp === false) { + return nullTime + } + return epochTime +} + +function mock () { return {} } +function passthrough (a) { return a } +function noop () {} + +function nullTime () { return false } +function epochTime () { return Date.now() } +function unixTime () { return Math.round(Date.now() / 1000.0) } +function isoTime () { return new Date(Date.now()).toISOString() } // using Date.now() for testability + +/* eslint-disable */ +/* istanbul ignore next */ +function pfGlobalThisOrFallback () { + function defd (o) { return typeof o !== 'undefined' && o } + try { + if (typeof globalThis !== 'undefined') return globalThis + Object.defineProperty(Object.prototype, 'globalThis', { + get: function () { + delete Object.prototype.globalThis + return (this.globalThis = this) + }, + configurable: true + }) + return globalThis + } catch (e) { + return defd(self) || defd(window) || defd(this) || {} + } +} +/* eslint-enable */ + +module.exports.default = pino +module.exports.pino = pino diff --git a/node_modules/pino/build/sync-version.js b/node_modules/pino/build/sync-version.js new file mode 100644 index 0000000..67bc625 --- /dev/null +++ b/node_modules/pino/build/sync-version.js @@ -0,0 +1,25 @@ +'use strict' + +const fs = require('node:fs') +const path = require('node:path') +let { version } = require('../package.json') + +let passedVersion = process.argv[2] + +if (passedVersion) { + passedVersion = passedVersion.trim().replace(/^v/, '') + if (version !== passedVersion) { + console.log(`Syncing version from ${version} to ${passedVersion}`) + version = passedVersion + const packageJson = require('../package.json') + packageJson.version = version + fs.writeFileSync(path.resolve('./package.json'), JSON.stringify(packageJson, null, 2) + '\n', { encoding: 'utf-8' }) + } +} + +const metaContent = `'use strict' + +module.exports = { version: '${version}' } +` + +fs.writeFileSync(path.resolve('./lib/meta.js'), metaContent, { encoding: 'utf-8' }) diff --git a/node_modules/pino/docs/api.md b/node_modules/pino/docs/api.md new file mode 100644 index 0000000..a923987 --- /dev/null +++ b/node_modules/pino/docs/api.md @@ -0,0 +1,1588 @@ +# API + +* [pino() => logger](#export) + * [options](#options) + * [destination](#destination) + * [destination\[Symbol.for('pino.metadata')\]](#metadata) +* [Logger Instance](#logger) + * [logger.trace()](#trace) + * [logger.debug()](#debug) + * [logger.info()](#info) + * [logger.warn()](#warn) + * [logger.error()](#error) + * [logger.fatal()](#fatal) + * [logger.silent()](#silent) + * [logger.child()](#child) + * [logger.bindings()](#logger-bindings) + * [logger.setBindings()](#logger-set-bindings) + * [logger.flush()](#flush) + * [logger.level](#logger-level) + * [logger.isLevelEnabled()](#islevelenabled) + * [logger.levels](#levels) + * [logger\[Symbol.for('pino.serializers')\]](#serializers) + * [Event: 'level-change'](#level-change) + * [logger.version](#version) + * [logger.msgPrefix](#msgPrefix) +* [Statics](#statics) + * [pino.destination()](#pino-destination) + * [pino.transport()](#pino-transport) + * [pino.multistream()](#pino-multistream) + * [pino.stdSerializers](#pino-stdserializers) + * [pino.stdTimeFunctions](#pino-stdtimefunctions) + * [pino.symbols](#pino-symbols) + * [pino.version](#pino-version) +* [Interfaces](#interfaces) + * [MultiStreamRes](#multistreamres) + * [StreamEntry](#streamentry) + * [DestinationStream](#destinationstream) +* [Types](#types) + * [Level](#level-1) +* [TypeScript](#typescript) + * [Module Augmentation](#module-augmentation) + * [LogFnFields Interface](#logfnfields-interface) + + +## `pino([options], [destination]) => logger` + +The exported `pino` function takes two optional arguments, +[`options`](#options) and [`destination`](#destination), and +returns a [logger instance](#logger). + + +### `options` (Object) + +#### `name` (String) + +Default: `undefined` + +The name of the logger. When set adds a `name` field to every JSON line logged. + +#### `level` (String) + +Default: `'info'` + +The minimum level to log: Pino will not log messages with a lower level. Setting this option reduces the load, as typically, debug and trace logs are only valid for development, and not needed in production. + +One of `'fatal'`, `'error'`, `'warn'`, `'info'`, `'debug'`, `'trace'` or `'silent'`. + +Additional levels can be added to the instance via the `customLevels` option. + +* See [`customLevels` option](#opt-customlevels) + + + +#### `levelComparison` ("ASC", "DESC", Function) + +Default: `ASC` + +Use this option to customize levels order. +In order to be able to define custom levels ordering pass a function which will accept `current` and `expected` values and return `boolean` which shows should `current` level to be shown or not. + +```js +const logger = pino({ + levelComparison: 'DESC', + customLevels: { + foo: 20, // `foo` is more valuable than `bar` + bar: 10 + }, +}) + +// OR + +const logger = pino({ + levelComparison: function(current, expected) { + return current >= expected; + } +}) +``` + +#### `customLevels` (Object) + +Default: `undefined` + +Use this option to define additional logging levels. +The keys of the object correspond to the namespace of the log level, +and the values should be the numerical value of the level. + +```js +const logger = pino({ + customLevels: { + foo: 35 + } +}) +logger.foo('hi') +``` + + +#### `useOnlyCustomLevels` (Boolean) + +Default: `false` + +Use this option to only use defined `customLevels` and omit Pino's levels. +Logger's default `level` must be changed to a value in `customLevels` to use `useOnlyCustomLevels` +Warning: this option may not be supported by downstream transports. + +```js +const logger = pino({ + customLevels: { + foo: 35 + }, + useOnlyCustomLevels: true, + level: 'foo' +}) +logger.foo('hi') +logger.info('hello') // Will throw an error saying info is not found in logger object +``` +#### `depthLimit` (Number) + +Default: `5` + +Option to limit stringification at a specific nesting depth when logging circular objects. + +#### `edgeLimit` (Number) + +Default: `100` + +Option to limit stringification of properties/elements when logging a specific object/array with circular references. + + +#### `mixin` (Function): + +Default: `undefined` + +If provided, the `mixin` function is called each time one of the active +logging methods is called. The first parameter is the value `mergeObject` or an empty object. The second parameter is the log level number. +The third parameter is the logger or child logger itself, which can be used to +retrieve logger-specific context from within the `mixin` function. +The function must synchronously return an object. The properties of the returned object will be added to the +logged JSON. + +```js +let n = 0 +const logger = pino({ + mixin () { + return { line: ++n } + } +}) +logger.info('hello') +// {"level":30,"time":1573664685466,"pid":78742,"hostname":"x","line":1,"msg":"hello"} +logger.info('world') +// {"level":30,"time":1573664685469,"pid":78742,"hostname":"x","line":2,"msg":"world"} +``` + +The result of `mixin()` is supposed to be a _new_ object. For performance reason, the object returned by `mixin()` will be mutated by pino. +In the following example, passing `mergingObject` argument to the first `info` call will mutate the global `mixin` object by default: +(* See [`mixinMergeStrategy` option](#opt-mixin-merge-strategy)): +```js +const mixin = { + appName: 'My app' +} + +const logger = pino({ + mixin() { + return mixin; + } +}) + +logger.info({ + description: 'Ok' +}, 'Message 1') +// {"level":30,"time":1591195061437,"pid":16012,"hostname":"x","appName":"My app","description":"Ok","msg":"Message 1"} +logger.info('Message 2') +// {"level":30,"time":1591195061437,"pid":16012,"hostname":"x","appName":"My app","description":"Ok","msg":"Message 2"} +// Note: the second log contains "description":"Ok" text, even if it was not provided. +``` + +The `mixin` method can be used to add the level label to each log message such as in the following example: +```js +const logger = pino({ + mixin(_context, level) { + return { 'level-label': logger.levels.labels[level] } + } +}) + +logger.info({ + description: 'Ok' +}, 'Message 1') +// {"level":30,"time":1591195061437,"pid":16012,"hostname":"x","description":"Ok","level-label":"info","msg":"Message 1"} +logger.error('Message 2') +// {"level":30,"time":1591195061437,"pid":16012,"hostname":"x","level-label":"error","msg":"Message 2"} +``` + +If the `mixin` feature is being used merely to add static metadata to each log message, +then a [child logger ⇗](/docs/child-loggers.md) should be used instead. Unless your application +needs to concatenate values for a specific key multiple times, in which case `mixin` can be +used to avoid the [duplicate keys caveat](/docs/child-loggers.md#duplicate-keys-caveat): + +```js +const logger = pino({ + mixin (obj, num, logger) { + return { + tags: logger.tags + } + } +}) +logger.tags = {} + +logger.addTag = function (key, value) { + logger.tags[key] = value +} + +function createChild (parent, ...context) { + const newChild = logger.child(...context) + newChild.tags = { ...logger.tags } + newChild.addTag = function (key, value) { + newChild.tags[key] = value + } + return newChild +} + +logger.addTag('foo', 1) +const child = createChild(logger, {}) +child.addTag('bar', 2) +logger.info('this will only have `foo: 1`') +child.info('this will have both `foo: 1` and `bar: 2`') +logger.info('this will still only have `foo: 1`') +``` + +As of pino 7.x, when the `mixin` is used with the [`nestedKey` option](#opt-nestedkey), +the object returned from the `mixin` method will also be nested. Prior versions would mix +this object into the root. + +```js +const logger = pino({ + nestedKey: 'payload', + mixin() { + return { requestId: requestId.currentId() } + } +}) + +logger.info({ + description: 'Ok' +}, 'Message 1') +// {"level":30,"time":1591195061437,"pid":16012,"hostname":"x","payload":{"requestId":"dfe9a9014b","description":"Ok"},"msg":"Message 1"} +``` + + +#### `mixinMergeStrategy` (Function): + +Default: `undefined` + +If provided, the `mixinMergeStrategy` function is called each time one of the active +logging methods is called. The first parameter is the value `mergeObject` or an empty object, +the second parameter is the value resulting from `mixin()` (* See [`mixin` option](#opt-mixin) or an empty object. +The function must synchronously return an object. + +```js +// Default strategy, `mergeObject` has priority +const logger = pino({ + mixin() { + return { tag: 'docker' } + }, + // mixinMergeStrategy(mergeObject, mixinObject) { + // return Object.assign(mixinMeta, mergeObject) + // } +}) + +logger.info({ + tag: 'local' +}, 'Message') +// {"level":30,"time":1591195061437,"pid":16012,"hostname":"x","tag":"local","msg":"Message"} +``` + +```js +// Custom mutable strategy, `mixin` has priority +const logger = pino({ + mixin() { + return { tag: 'k8s' } + }, + mixinMergeStrategy(mergeObject, mixinObject) { + return Object.assign(mergeObject, mixinObject) + } +}) + +logger.info({ + tag: 'local' +}, 'Message') +// {"level":30,"time":1591195061437,"pid":16012,"hostname":"x","tag":"k8s","msg":"Message"} +``` + +```js +// Custom immutable strategy, `mixin` has priority +const logger = pino({ + mixin() { + return { tag: 'k8s' } + }, + mixinMergeStrategy(mergeObject, mixinObject) { + return Object.assign({}, mergeObject, mixinObject) + } +}) + +logger.info({ + tag: 'local' +}, 'Message') +// {"level":30,"time":1591195061437,"pid":16012,"hostname":"x","tag":"k8s","msg":"Message"} +``` + + +#### `redact` (Array | Object): + +Default: `undefined` + +As an array, the `redact` option specifies paths that should +have their values redacted from any log output. + +Each path must be a string using a syntax that corresponds to JavaScript dot and bracket notation. + +If an object is supplied, three options can be specified: + * `paths` (array): Required. An array of paths. See [redaction - Path Syntax ⇗](/docs/redaction.md#paths) for specifics. + * `censor` (String|Function|Undefined): Optional. When supplied as a String the `censor` option will overwrite keys that are to be redacted. When set to `undefined` the key will be removed entirely from the object. + The `censor` option may also be a mapping function. The (synchronous) mapping function has the signature `(value, path) => redactedValue` and is called with the unredacted `value` and `path` to the key being redacted, as an array. For example given a redaction path of `a.b.c` the `path` argument would be `['a', 'b', 'c']`. The value returned from the mapping function becomes the applied censor value. Default: `'[Redacted]'` + value synchronously. + Default: `'[Redacted]'` + * `remove` (Boolean): Optional. Instead of censoring the value, remove both the key and the value. Default: `false` + +**WARNING**: Never allow user input to define redacted paths. + +* See the [redaction ⇗](/docs/redaction.md) documentation. +* See [fast-redact#caveat ⇗](https://github.com/davidmarkclements/fast-redact#caveat) + + +#### `hooks` (Object) + +An object mapping to hook functions. Hook functions allow for customizing +internal logger operations. Hook functions ***must*** be synchronous functions. + + +##### `logMethod` + +Allows for manipulating the parameters passed to logger methods. The signature +for this hook is `logMethod (args, method, level) {}`, where `args` is an array +of the arguments that were passed to the log method and `method` is the log +method itself, `level` is the log level itself. This hook ***must*** invoke the +`method` function by using apply, like so: `method.apply(this, newArgumentsArray)`. + +For example, Pino expects a binding object to be the first parameter with an +optional string message as the second parameter. Using this hook the parameters +can be flipped: + +```js +const hooks = { + logMethod (inputArgs, method, level) { + if (inputArgs.length >= 2) { + const arg1 = inputArgs.shift() + const arg2 = inputArgs.shift() + return method.apply(this, [arg2, arg1, ...inputArgs]) + } + return method.apply(this, inputArgs) + } +} +``` + + + +##### `streamWrite` + +Allows for manipulating the _stringified_ JSON log data just before writing to various transports. + +The method receives the stringified JSON and must return valid stringified JSON. + +For example: +```js +const hooks = { + streamWrite (s) { + return s.replaceAll('sensitive-api-key', 'XXX') + } +} +``` + + +#### `formatters` (Object) + +An object containing functions for formatting the shape of the log lines. +These functions should return a JSONifiable object and +should never throw. These functions allow for full customization of +the resulting log lines. For example, they can be used to change +the level key name or to enrich the default metadata. + +##### `level` + +Changes the shape of the log level. The default shape is `{ level: number }`. +The function takes two arguments, the label of the level (e.g. `'info'`) +and the numeric value (e.g. `30`). + +ps: The log level cannot be customized when using multiple transports + +```js +const formatters = { + level (label, number) { + return { level: number } + } +} +``` + +##### `bindings` + +Changes the shape of the bindings. The default shape is `{ pid, hostname }`. +The function takes a single argument, the bindings object, which can be configured +using the [`base` option](#opt-base). Called once when creating logger. + +```js +const formatters = { + bindings (bindings) { + return { pid: bindings.pid, hostname: bindings.hostname } + } +} +``` + +##### `log` + +Changes the shape of the log object. This function will be called every time +one of the log methods (such as `.info`) is called. All arguments passed to the +log method, except the message, will be passed to this function. By default, it does +not change the shape of the log object. + +```js +const formatters = { + log (object) { + return object + } +} +``` + + +#### `serializers` (Object) + +Default: `{err: pino.stdSerializers.err}` + +An object containing functions for custom serialization of objects. +These functions should return an JSONifiable object and they +should never throw. When logging an object, each top-level property +matching the exact key of a serializer will be serialized using the defined serializer. + +The serializers are applied when a property in the logged object matches a property +in the serializers. The only exception is the `err` serializer as it is also applied in case +the object is an instance of `Error`, e.g. `logger.info(new Error('kaboom'))`. +See `errorKey` option to change `err` namespace. + +* See [pino.stdSerializers](#pino-stdserializers) + +#### `msgPrefix` (String) + +Default: `undefined` + +The `msgPrefix` property allows you to specify a prefix for every message of the logger and its children. + +```js +const logger = pino({ + msgPrefix: '[HTTP] ' +}) +logger.info('got new request!') +// > [HTTP] got new request! + +const child = logger.child({}) +child.info('User authenticated!') +// > [HTTP] User authenticated! +``` + + +#### `base` (Object) + +Default: `{pid: process.pid, hostname: os.hostname()}` + +Key-value object added as child logger to each log line. + +Set to `undefined` to avoid adding `pid`, `hostname` properties to each log. + +#### `enabled` (Boolean) + +Default: `true` + +Set to `false` to disable logging. + +#### `crlf` (Boolean) + +Default: `false` + +Set to `true` to logs newline delimited JSON with `\r\n` instead of `\n`. + + +#### `timestamp` (Boolean | Function) + +Default: `true` + +Enables or disables the inclusion of a timestamp in the +log message. If a function is supplied, it must synchronously return a partial JSON string +representation of the time, e.g. `,"time":1493426328206` (which is the default). + +If set to `false`, no timestamp will be included in the output. + +See [stdTimeFunctions](#pino-stdtimefunctions) for a set of available functions +for passing in as a value for this option. + +Example: +```js +timestamp: () => `,"time":"${new Date(Date.now()).toISOString()}"` +// which is equivalent to: +// timestamp: stdTimeFunctions.isoTime +``` + +**Caution**: attempting to format time in-process will significantly impact logging performance. + + +#### `messageKey` (String) + +Default: `'msg'` + +The string key for the 'message' in the JSON object. + + +#### `errorKey` (String) + +Default: `'err'` + +The string key for the 'error' in the JSON object. + + +#### `nestedKey` (String) + +Default: `null` + +If there's a chance that objects being logged have properties that conflict with those from pino itself (`level`, `timestamp`, `pid`, etc) +and duplicate keys in your log records are undesirable, pino can be configured with a `nestedKey` option that causes any `object`s that are logged +to be placed under a key whose name is the value of `nestedKey`. + +This way, when searching something like Kibana for values, one can consistently search under the configured `nestedKey` value instead of the root log record keys. + +For example, +```js +const logger = require('pino')({ + nestedKey: 'payload' +}) + +const thing = { level: 'hi', time: 'never', foo: 'bar'} // has pino-conflicting properties! +logger.info(thing) + +// logs the following: +// {"level":30,"time":1578357790020,"pid":91736,"hostname":"x","payload":{"level":"hi","time":"never","foo":"bar"}} +``` +In this way, logged objects' properties don't conflict with pino's standard logging properties, +and searching for logged objects can start from a consistent path. + +#### `browser` (Object) + +Browser only, may have `asObject` and `write` keys. This option is separately +documented in the [Browser API ⇗](/docs/browser.md) documentation. + +* See [Browser API ⇗](/docs/browser.md) + +#### `transport` (Object) + +The `transport` option is a shorthand for the [pino.transport()](#pino-transport) function. +It supports the same input options: +```js +require('pino')({ + transport: { + target: '/absolute/path/to/my-transport.mjs' + } +}) + +// or multiple transports +require('pino')({ + transport: { + targets: [ + { target: '/absolute/path/to/my-transport.mjs', level: 'error' }, + { target: 'some-file-transport', options: { destination: '/dev/null' } + ] + } +}) +``` + +If the transport option is supplied to `pino`, a [`destination`](#destination) parameter may not also be passed as a separate argument to `pino`: + +```js +pino({ transport: {}}, '/path/to/somewhere') // THIS WILL NOT WORK, DO NOT DO THIS +pino({ transport: {}}, process.stderr) // THIS WILL NOT WORK, DO NOT DO THIS +``` + +when using the `transport` option. In this case, an `Error` will be thrown. + +* See [pino.transport()](#pino-transport) + +#### `onChild` (Function) + +The `onChild` function is a synchronous callback that will be called on each creation of a new child, passing the child instance as its first argument. +Any error thrown inside the callback will be uncaught and should be handled inside the callback. +```js +const parent = require('pino')({ onChild: (instance) => { + // Execute call back code for each newly created child. +}}) +// `onChild` will now be executed with the new child. +parent.child(bindings) +``` + + + +### `destination` (Number | String | Object | DestinationStream | SonicBoomOpts | WritableStream) + +Default: `pino.destination(1)` (STDOUT) + +The `destination` parameter can be a file descriptor, a file path, or an +object with `dest` property pointing to a fd or path. +An ordinary Node.js `stream` file descriptor can be passed as the +destination (such as the result +of `fs.createWriteStream`) but for peak log writing performance, it is strongly +recommended to use `pino.destination` to create the destination stream. +Note that the `destination` parameter can be the result of `pino.transport()`. + +```js +// pino.destination(1) by default +const stdoutLogger = require('pino')() + +// destination param may be in first position when no options: +const fileLogger = require('pino')( pino.destination('/log/path')) + +// use the stderr file handle to log to stderr: +const opts = {name: 'my-logger'} +const stderrLogger = require('pino')(opts, pino.destination(2)) + +// automatic wrapping in pino.destination +const fileLogger = require('pino')('/log/path') + +// Asynchronous logging +const fileLogger = pino(pino.destination({ dest: '/log/path', sync: false })) +``` + +However, there are some special instances where `pino.destination` is not used as the default: + ++ When something, e.g a process manager, has monkey-patched `process.stdout.write`. + +In these cases `process.stdout` is used instead. + +Note: If the parameter is a string integer, e.g. `'1'`, it will be coerced to +a number and used as a file descriptor. If this is not desired, provide a full +path, e.g. `/tmp/1`. + +* See [`pino.destination`](#pino-destination) + + +#### `destination[Symbol.for('pino.metadata')]` + +Default: `false` + +Using the global symbol `Symbol.for('pino.metadata')` as a key on the `destination` parameter and +setting the key to `true`, indicates that the following properties should be +set on the `destination` object after each log line is written: + +* the last logging level as `destination.lastLevel` +* the last logging message as `destination.lastMsg` +* the last logging object as `destination.lastObj` +* the last time as `destination.lastTime`, which will be the partial string returned + by the time function. +* the last logger instance as `destination.lastLogger` (to support child + loggers) + +The following is a succinct usage example: + +```js +const dest = pino.destination('/dev/null') +dest[Symbol.for('pino.metadata')] = true +const logger = pino(dest) +logger.info({a: 1}, 'hi') +const { lastMsg, lastLevel, lastObj, lastTime} = dest +console.log( + 'Logged message "%s" at level %d with object %o at time %s', + lastMsg, lastLevel, lastObj, lastTime +) // Logged message "hi" at level 30 with object { a: 1 } at time 1531590545089 +``` + + +## Logger Instance + +The logger instance is the object returned by the main exported +[`pino`](#export) function. + +The primary purpose of the logger instance is to provide logging methods. + +The default logging methods are `trace`, `debug`, `info`, `warn`, `error`, and `fatal`. + +Each logging method has the following signature: +`([mergingObject], [message], [...interpolationValues])`. + +The parameters are explained below using the `logger.info` method but the same applies to all logging methods. + +### Logging Method Parameters + + +#### `mergingObject` (Object) + +An object can optionally be supplied as the first parameter. Each enumerable key and value +of the `mergingObject` is copied into the JSON log line. + +```js +logger.info({MIX: {IN: true}}) +// {"level":30,"time":1531254555820,"pid":55956,"hostname":"x","MIX":{"IN":true}} +``` + +If the object is of type Error, it is wrapped in an object containing a property err (`{ err: mergingObject }`). +This allows for a unified error handling flow. + +Options `serializers` and `errorKey` could be used at instantiation time to change the namespace +from `err` to another string as preferred. + + +#### `message` (String) + +A `message` string can optionally be supplied as the first parameter, or +as the second parameter after supplying a `mergingObject`. + +By default, the contents of the `message` parameter will be merged into the +JSON log line under the `msg` key: + +```js +logger.info('hello world') +// {"level":30,"time":1531257112193,"msg":"hello world","pid":55956,"hostname":"x"} +``` + +The `message` parameter takes precedence over the `mergingObject`. +That is, if a `mergingObject` contains a `msg` property, and a `message` parameter +is supplied in addition, the `msg` property in the output log will be the value of +the `message` parameter not the value of the `msg` property on the `mergingObject`. +See [Avoid Message Conflict](/docs/help.md#avoid-message-conflict) for information +on how to overcome this limitation. + +If no `message` parameter is provided, and the `mergingObject` is of type `Error` or it has a property named `err`, the +`message` parameter is set to the `message` value of the error. See option `errorKey` if you want to change the namespace. + +The `messageKey` option can be used at instantiation time to change the namespace +from `msg` to another string as preferred. + +The `message` string may contain a printf style string with support for +the following placeholders: + +* `%s` – string placeholder, every non-string value passed in will have `.toString()` called. +* `%d` – digit placeholder +* `%O`, `%o`, and `%j` – object placeholder + +Values supplied as additional arguments to the logger method will +then be interpolated accordingly. + +* See [`messageKey` pino option](#opt-messagekey) +* See [`...interpolationValues` log method parameter](#interpolationvalues) + + +#### `...interpolationValues` (Any) + +All arguments supplied after `message` are serialized and interpolated according +to any supplied printf-style placeholders (`%s`, `%d`, `%o`|`%O`|`%j`) to form +the final output `msg` value for the JSON log line. + +```js +logger.info('%o hello %s', {worldly: 1}, 'world') +// {"level":30,"time":1531257826880,"msg":"{\"worldly\":1} hello world","pid":55956,"hostname":"x"} +``` + +Since pino v6, we do not automatically concatenate and cast to string +consecutive parameters: + +```js +logger.info('hello', 'world') +// {"level":30,"time":1531257618044,"msg":"hello","pid":55956,"hostname":"x"} +// world is missing +``` + +However, it's possible to inject a hook to modify this behavior: + +```js +const pinoOptions = { + hooks: { logMethod } +} + +function logMethod (args, method) { + if (args.length === 2) { + args[0] = `${args[0]} %j` + } + method.apply(this, args) +} + +const logger = pino(pinoOptions) +``` + +* See [`message` log method parameter](#message) +* See [`logMethod` hook](#logmethod) + + +#### Errors + +Errors can be supplied as either the first parameter or if already using `mergingObject` then as the `err` property on the `mergingObject`. + +Options `serializers` and `errorKey` could be used at instantiation time to change the namespace +from `err` to another string as preferred. + +> ## Note +> This section describes the default configuration. The error serializer can be +> mapped to a different key using the [`serializers`](#opt-serializers) option. +```js +logger.info(new Error("test")) +// {"level":30,"time":1531257618044,"msg":"test","stack":"...","type":"Error","pid":55956,"hostname":"x"} + +logger.info({ err: new Error("test"), otherkey: 123 }, "some text") +// {"level":30,"time":1531257618044,"err":{"msg": "test", "stack":"...","type":"Error"},"msg":"some text","pid":55956,"hostname":"x","otherkey":123} +``` + + +### `logger.trace([mergingObject], [message], [...interpolationValues])` + +Write a `'trace'` level log, if the configured [`level`](#level) allows for it. + +* See [`mergingObject` log method parameter](#mergingobject) +* See [`message` log method parameter](#message) +* See [`...interpolationValues` log method parameter](#interpolationvalues) + + +### `logger.debug([mergingObject], [message], [...interpolationValues])` + +Write a `'debug'` level log, if the configured `level` allows for it. + +* See [`mergingObject` log method parameter](#mergingobject) +* See [`message` log method parameter](#message) +* See [`...interpolationValues` log method parameter](#interpolationvalues) + + +### `logger.info([mergingObject], [message], [...interpolationValues])` + +Write an `'info'` level log, if the configured `level` allows for it. + +* See [`mergingObject` log method parameter](#mergingobject) +* See [`message` log method parameter](#message) +* See [`...interpolationValues` log method parameter](#interpolationvalues) + + +### `logger.warn([mergingObject], [message], [...interpolationValues])` + +Write a `'warn'` level log, if the configured `level` allows for it. + +* See [`mergingObject` log method parameter](#mergingobject) +* See [`message` log method parameter](#message) +* See [`...interpolationValues` log method parameter](#interpolationvalues) + + +### `logger.error([mergingObject], [message], [...interpolationValues])` + +Write a `'error'` level log, if the configured `level` allows for it. + +* See [`mergingObject` log method parameter](#mergingobject) +* See [`message` log method parameter](#message) +* See [`...interpolationValues` log method parameter](#interpolationvalues) + + +### `logger.fatal([mergingObject], [message], [...interpolationValues])` + +Write a `'fatal'` level log, if the configured `level` allows for it. + +Since `'fatal'` level messages are intended to be logged just before the process exiting the `fatal` +method will always sync flush the destination. +Therefore it's important not to misuse `fatal` since +it will cause performance overhead if used for any +other purpose than writing final log messages before +the process crashes or exits. + +* See [`mergingObject` log method parameter](#mergingobject) +* See [`message` log method parameter](#message) +* See [`...interpolationValues` log method parameter](#interpolationvalues) + + +### `logger.silent()` + +Noop function. + + +### `logger.child(bindings, [options]) => logger` + +The `logger.child` method allows for the creation of stateful loggers, +where key-value pairs can be pinned to a logger causing them to be output +on every log line. + +Child loggers use the same output stream as the parent and inherit +the current log level of the parent at the time they are spawned. + +The log level of a child is mutable. It can be set independently +of the parent either by setting the [`level`](#level) accessor after creating +the child logger or using the [`options.level`](#optionslevel-string) key. + + +#### `bindings` (Object) + +An object of key-value pairs to include in every log line output +via the returned child logger. + +```js +const child = logger.child({ MIX: {IN: 'always'} }) +child.info('hello') +// {"level":30,"time":1531258616689,"msg":"hello","pid":64849,"hostname":"x","MIX":{"IN":"always"}} +child.info('child!') +// {"level":30,"time":1531258617401,"msg":"child!","pid":64849,"hostname":"x","MIX":{"IN":"always"}} +``` + +The `bindings` object may contain any key except for reserved configuration keys `level` and `serializers`. + +##### `bindings.serializers` (Object) - DEPRECATED + +Use `options.serializers` instead. + +#### `options` (Object) + +Options for child logger. These options will override the parent logger options. + +##### `options.level` (String) + +The `level` property overrides the log level of the child logger. +By default, the parent log level is inherited. +After the creation of the child logger, it is also accessible using the [`logger.level`](#logger-level) key. + +```js +const logger = pino() +logger.debug('nope') // will not log, since default level is info +const child = logger.child({foo: 'bar'}, {level: 'debug'}) +child.debug('debug!') // will log as the `level` property set the level to debug +``` + + +##### `options.msgPrefix` (String) + +Default: `undefined` + +The `msgPrefix` property allows you to specify a prefix for every message of the child logger. +By default, the parent prefix is inherited. +If the parent already has a prefix, the prefix of the parent and then the child will be displayed. + +```js +const logger = pino({ + msgPrefix: '[HTTP] ' +}) +logger.info('got new request!') +// > [HTTP] got new request! + +const child = logger.child({avengers: 'assemble'}, {msgPrefix: '[Proxy] '}) +child.info('message proxied!') +// > [HTTP] [Proxy] message proxied! +``` + +##### `options.redact` (Array | Object) + +Setting `options.redact` to an array or object will override the parent `redact` options. To remove `redact` options inherited from the parent logger set this value as an empty array (`[]`). + +```js +const logger = require('pino')({ redact: ['hello'] }) +logger.info({ hello: 'world' }) +// {"level":30,"time":1625794363403,"pid":67930,"hostname":"x","hello":"[Redacted]"} +const child = logger.child({ foo: 'bar' }, { redact: ['foo'] }) +logger.info({ hello: 'world' }) +// {"level":30,"time":1625794553558,"pid":67930,"hostname":"x","hello":"world", "foo": "[Redacted]" } +``` + +* See [`redact` option](#opt-redact) + +##### `options.serializers` (Object) + +Child loggers inherit the [serializers](#opt-serializers) from the parent logger. + +Setting the `serializers` key of the `options` object will override +any configured parent serializers. + +```js +const logger = require('pino')() +logger.info({test: 'will appear'}) +// {"level":30,"time":1531259759482,"pid":67930,"hostname":"x","test":"will appear"} +const child = logger.child({}, {serializers: {test: () => `child-only serializer`}}) +child.info({test: 'will be overwritten'}) +// {"level":30,"time":1531259784008,"pid":67930,"hostname":"x","test":"child-only serializer"} +``` + +* See [`serializers` option](#opt-serializers) +* See [pino.stdSerializers](#pino-stdSerializers) + + +### `logger.bindings()` + +Returns an object containing all the current bindings, cloned from the ones passed in via `logger.child()`. +```js +const child = logger.child({ foo: 'bar' }) +console.log(child.bindings()) +// { foo: 'bar' } +const anotherChild = child.child({ MIX: { IN: 'always' } }) +console.log(anotherChild.bindings()) +// { foo: 'bar', MIX: { IN: 'always' } } +``` + + +### `logger.setBindings(bindings)` + +Adds to the bindings of this logger instance. + +**Note:** Does not overwrite bindings. Can potentially result in duplicate keys in +log lines. + +* See [`bindings` parameter in `logger.child`](#logger-child-bindings) + + +### `logger.flush([cb])` + +Flushes the content of the buffer when using `pino.destination({ +sync: false })`. + +This is an asynchronous, best used as fire and forget, operation. + +The use case is primarily for asynchronous logging, which may buffer +log lines while others are being written. The `logger.flush` method can be +used to flush the logs +on a long interval, say ten seconds. Such a strategy can provide an +optimum balance between extremely efficient logging at high demand periods +and safer logging at low demand periods. + +If there is a need to wait for the logs to be flushed, a callback should be used. + +* See [`destination` parameter](#destination) +* See [Asynchronous Logging ⇗](/docs/asynchronous.md) + + +### `logger.level` (String) [Getter/Setter] + +Set this property to the desired logging level. + +The core levels and their values are as follows: + +| | | | | | | | | +|:-----------|-------|-------|------|------|-------|-------|---------:| +| **Level:** | trace | debug | info | warn | error | fatal | silent | +| **Value:** | 10 | 20 | 30 | 40 | 50 | 60 | Infinity | + +The logging level is a *minimum* level based on the associated value of that level. + +For instance if `logger.level` is `info` *(30)* then `info` *(30)*, `warn` *(40)*, `error` *(50)*, and `fatal` *(60)* log methods will be enabled but the `trace` *(10)* and `debug` *(20)* methods, being less than 30, will not. + +The `silent` logging level is a specialized level that will disable all logging, +the `silent` log method is a noop function. + + +### `logger.isLevelEnabled(level)` + +A utility method for determining if a given log level will write to the destination. + +#### `level` (String) + +The given level to check against: + +```js +if (logger.isLevelEnabled('debug')) logger.debug('conditional log') +``` + +#### `levelLabel` (String) + +Defines the method name of the new level. + +* See [`logger.level`](#level) + +#### `levelValue` (Number) + +Defines the associated minimum threshold value for the level, and +therefore where it sits in order of priority among other levels. + +* See [`logger.level`](#level) + + +### `logger.levelVal` (Number) + +Supplies the integer value for the current logging level. + +```js +if (logger.levelVal === 30) { + console.log('logger level is `info`') +} +``` + + +### `logger.levels` (Object) + +Levels are mapped to values to determine the minimum threshold that a +logging method should be enabled at (see [`logger.level`](#level)). + +The `logger.levels` property holds the mappings between levels and values, +and vice versa. + +```sh +$ node -p "require('pino')().levels" +``` + +```js +{ labels: + { '10': 'trace', + '20': 'debug', + '30': 'info', + '40': 'warn', + '50': 'error', + '60': 'fatal' }, + values: + { fatal: 60, error: 50, warn: 40, info: 30, debug: 20, trace: 10 } } +``` + +* See [`logger.level`](#level) + + +### logger\[Symbol.for('pino.serializers')\] + +Returns the serializers as applied to the current logger instance. If a child logger did not +register its own serializer upon instantiation the serializers of the parent will be returned. + + +### Event: 'level-change' + +The logger instance is also an [`EventEmitter ⇗`](https://nodejs.org/dist/latest/docs/api/events.html#events_class_eventemitter) + +A listener function can be attached to a logger via the `level-change` event + +The listener is passed five arguments: + +* `levelLabel` – the new level string, e.g `trace` +* `levelValue` – the new level number, e.g `10` +* `previousLevelLabel` – the prior level string, e.g `info` +* `previousLevelValue` – the prior level number, e.g `30` +* `logger` – the logger instance from which the event originated + +```js +const logger = require('pino')() +logger.on('level-change', (lvl, val, prevLvl, prevVal) => { + console.log('%s (%d) was changed to %s (%d)', prevLvl, prevVal, lvl, val) +}) +logger.level = 'trace' // trigger event +``` + +Please note that due to a [known bug](https://github.com/pinojs/pino/issues/1006), every `logger.child()` call will +fire a `level-change` event. These events can be ignored by writing an event handler like: + +```js +const logger = require('pino')() +logger.on('level-change', function (lvl, val, prevLvl, prevVal, instance) { + if (logger !== instance) { + return + } + console.log('%s (%d) was changed to %s (%d)', prevLvl, prevVal, lvl, val) +}) +logger.child({}); // trigger an event by creating a child instance, notice no console.log +logger.level = 'trace' // trigger event using actual value change, notice console.log +``` + + +### `logger.version` (String) + +Exposes the Pino package version. Also available on the exported `pino` function. + +* See [`pino.version`](#pino-version) + + +### `logger.msgPrefix` (String|Undefined) + +Exposes the cumulative `msgPrefix` of the logger. + +* See [`options.msgPrefix`](#options-msgPrefix) + +## Statics + + +### `pino.destination([opts]) => SonicBoom` + +Create a Pino Destination instance: a stream-like object with +significantly more throughput than a standard Node.js stream. + +```js +const pino = require('pino') +const logger = pino(pino.destination('./my-file')) +const logger2 = pino(pino.destination()) +const logger3 = pino(pino.destination({ + dest: './my-file', + minLength: 4096, // Buffer before writing + sync: false // Asynchronous logging, the default +})) +const logger4 = pino(pino.destination({ + dest: './my-file2', + sync: true // Synchronous logging +})) +``` + +The `pino.destination` method may be passed a file path or a numerical file descriptor. +By default, `pino.destination` will use `process.stdout.fd` (1) as the file descriptor. + +`pino.destination` is implemented on [`sonic-boom` ⇗](https://github.com/mcollina/sonic-boom). + +A `pino.destination` instance can also be used to reopen closed files +(for example, for some log rotation scenarios), see [Reopening log files](/docs/help.md#reopening). + +* See [`destination` parameter](#destination) +* See [`sonic-boom` ⇗](https://github.com/mcollina/sonic-boom) +* See [Reopening log files](/docs/help.md#reopening) +* See [Asynchronous Logging ⇗](/docs/asynchronous.md) + + +### `pino.transport(options) => ThreadStream` + +Create a stream that routes logs to a worker thread that +wraps around a [Pino Transport](/docs/transports.md). + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'some-transport', + options: { some: 'options for', the: 'transport' } +}) +pino(transport) +``` + +Multiple transports may also be defined, and specific levels can be logged to each transport: + +```js +const pino = require('pino') +const transport = pino.transport({ + targets: [{ + level: 'info', + target: 'pino-pretty' // must be installed separately + }, { + level: 'trace', + target: 'pino/file', + options: { destination: '/path/to/store/logs' } + }] +}) +pino(transport) +``` + +A pipeline could also be created to transform log lines _before_ sending them: + +```js +const pino = require('pino') +const transport = pino.transport({ + pipeline: [{ + target: 'pino-syslog' // must be installed separately + }, { + target: 'pino-socket' // must be installed separately + }] +}) +pino(transport) +``` + +Multiple transports can now be defined to include pipelines: + +```js +const pino = require('pino') +const transport = pino.transport({ + targets: [{ + level: 'info', + target: 'pino-pretty' // must be installed separately + }, { + level: 'trace', + target: 'pino/file', + options: { destination: '/path/to/store/logs' } + }, { + pipeline: [{ + target: 'pino-syslog' // must be installed separately + }, { + target: 'pino-socket' // must be installed separately + }] + } + ] +}) +pino(transport) +``` + +If `WeakRef`, `WeakMap`, and `FinalizationRegistry` are available in the current runtime (v14.5.0+), then the thread +will be automatically terminated in case the stream or logger goes out of scope. +The `transport()` function adds a listener to `process.on('beforeExit')` and `process.on('exit')` to ensure the worker +is flushed and all data synced before the process exits. + +Note that calling `process.exit()` on the main thread will stop the event loop on the main thread from turning. As a result, +using `console.log` and `process.stdout` after the main thread called `process.exit()` will not produce any output. + +If you are embedding/integrating pino within your framework, you will need to make pino aware of the script that is calling it, +like so: + +```js +const pino = require('pino') +const getCaller = require('get-caller-file') + +module.exports = function build () { + const logger = pino({ + transport: { + caller: getCaller(), + target: 'transport', + options: { destination: './destination' } + } + }) + return logger +} +``` + +Note that _any `'error'`_ event emitted by the transport must be considered a fatal error and the process must be terminated. +Error events are not recoverable. + +For more on transports, how they work, and how to create them see the [`Transports documentation`](/docs/transports.md). + +* See [`Transports`](/docs/transports.md) +* See [`thread-stream` ⇗](https://github.com/mcollina/thread-stream) + +#### Options + +* `target`: The transport to pass logs through. This may be an installed module name or an absolute path. +* `options`: An options object which is serialized (see [Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm)), passed to the worker thread, parsed and then passed to the exported transport function. +* `worker`: [Worker thread](https://nodejs.org/api/worker_threads.html#worker_threads_new_worker_filename_options) configuration options. Additionally, the `worker` option supports `worker.autoEnd`. If this is set to `false` logs will not be flushed on process exit. It is then up to the developer to call `transport.end()` to flush logs. +* `targets`: May be specified instead of `target`. Must be an array of transport configurations and/or pipelines. Transport configurations include the aforementioned `options` and `target` options plus a `level` option which will send only logs above a specified level to a transport. +* `pipeline`: May be specified instead of `target`. Must be an array of transport configurations. Transport configurations include the aforementioned `options` and `target` options. All intermediate steps in the pipeline _must_ be `Transform` streams and not `Writable`. +* `dedupe`: See [pino.multistream options](#pino-multistream) + + + +### `pino.multistream(streamsArray, opts) => MultiStreamRes` + +Create a stream composed by multiple destination streams and returns an +object implementing the [MultiStreamRes](#multistreamres) interface. + +```js +var fs = require('node:fs') +var pino = require('pino') +var pretty = require('pino-pretty') +var streams = [ + {stream: fs.createWriteStream('/tmp/info.stream.out')}, + {stream: pretty() }, + {level: 'debug', stream: fs.createWriteStream('/tmp/debug.stream.out')}, + {level: 'fatal', stream: fs.createWriteStream('/tmp/fatal.stream.out')} +] + +var log = pino({ + level: 'debug' // this MUST be set at the lowest level of the + // destinations +}, pino.multistream(streams)) + +log.debug('this will be written to /tmp/debug.stream.out') +log.info('this will be written to /tmp/debug.stream.out and /tmp/info.stream.out') +log.fatal('this will be written to /tmp/debug.stream.out, /tmp/info.stream.out and /tmp/fatal.stream.out') +``` + +In order for `multistream` to work, the log level __must__ be set to the lowest level used in the streams array. Default is `info`. + +#### Options + +* `levels`: Pass custom log level definitions to the instance as an object. + ++ `dedupe`: Set this to `true` to send logs only to the stream with the higher level. Default: `false` + + `dedupe` flag can be useful for example when using `pino.multistream` to redirect `error` logs to `process.stderr` and others to `process.stdout`: + + ```js + var pino = require('pino') + var multistream = pino.multistream + var streams = [ + {level: 'debug', stream: process.stdout}, + {level: 'error', stream: process.stderr}, + ] + + var opts = { + levels: { + silent: Infinity, + fatal: 60, + error: 50, + warn: 50, + info: 30, + debug: 20, + trace: 10 + }, + dedupe: true, + } + + var log = pino({ + level: 'debug' // this MUST be set at the lowest level of the + // destinations + }, multistream(streams, opts)) + + log.debug('this will be written ONLY to process.stdout') + log.info('this will be written ONLY to process.stdout') + log.error('this will be written ONLY to process.stderr') + log.fatal('this will be written ONLY to process.stderr') + ``` + + +### `pino.stdSerializers` (Object) + +The `pino.stdSerializers` object provides functions for serializing objects common to many projects. The standard serializers are directly imported from [pino-std-serializers](https://github.com/pinojs/pino-std-serializers). + +* See [pino-std-serializers ⇗](https://github.com/pinojs/pino-std-serializers) + + +### `pino.stdTimeFunctions` (Object) + +The [`timestamp`](#opt-timestamp) option can accept a function that determines the +`timestamp` value in a log line. + +The `pino.stdTimeFunctions` object provides a very small set of common functions for generating the +`timestamp` property. These consist of the following + +* `pino.stdTimeFunctions.epochTime`: Milliseconds since Unix epoch (Default) +* `pino.stdTimeFunctions.unixTime`: Seconds since Unix epoch +* `pino.stdTimeFunctions.nullTime`: Clears timestamp property (Used when `timestamp: false`) +* `pino.stdTimeFunctions.isoTime`: ISO 8601-formatted time in UTC +* `pino.stdTimeFunctions.isoTimeNano`: RFC 3339-formatted time in UTC with nanosecond precision + +* See [`timestamp` option](#opt-timestamp) + + +### `pino.symbols` (Object) + +For integration purposes with ecosystem and third-party libraries `pino.symbols` +exposes the symbols used to hold non-public state and methods on the logger instance. + +Access to the symbols allows logger state to be adjusted, and methods to be overridden or +proxied for performant integration where necessary. + +The `pino.symbols` object is intended for library implementers and shouldn't be utilized +for general use. + + +### `pino.version` (String) + +Exposes the Pino package version. Also available on the logger instance. + +* See [`logger.version`](#version) + +## Interfaces + + +### `MultiStreamRes` + Properties: + + * `write(data)` + - `data` Object | string + - Returns: void + + Write `data` onto the streams held by the current instance. + * `add(dest)` + - `dest` [StreamEntry](#streamentry) | [DestinationStream](#destinationstream) + - Returns: [MultiStreamRes](#multistreamres) + + Add `dest` stream to the array of streams of the current instance. + * `flushSync()` + - Returns: `undefined` + + Call `flushSync` on each stream held by the current instance. + + * `lastId` + - number + + The ID assigned to the last stream assigned to the current instance. + * `minLevel` + - number + + The minimum level amongst all the streams held by the current instance. + + * `remove(id)` + - `id` [number] + + Removes a stream from the array of streams of the current instance using its assigned ID. + * `streams` + - Returns: [StreamEntry[]](#streamentry) + + The array of streams currently held by the current instance. + * `clone(level)` + - `level` [Level](#level-1) + - Returns: [MultiStreamRes](#multistreamres) + + Returns a cloned object of the current instance but with the provided `level`. + +### `StreamEntry` + Properties: + + * `stream` + - DestinationStream + * `level` + - Optional: [Level](#level-1) + +### `DestinationStream` + Properties: + + * `write(msg)` + - `msg` string + +## Types +### `Level` + + * Values: `"fatal"` | `"error"` | `"warn"` | `"info"` | `"debug"` | `"trace"` + +## TypeScript + +### Module Augmentation + +Pino supports TypeScript module augmentation to extend its type definitions. This allows you to customize the logging behavior to fit your application's specific requirements. + +#### `LogFnFields` Interface + +The `LogFnFields` interface can be augmented to control what fields are allowed in logging method objects. This is particularly useful for: + +- Preventing certain fields from being logged (for security or compliance reasons) +- Enforcing specific field types across your application +- Enforcing consistent structured logging + +##### Banning Fields + +You can ban specific fields from being passed to logging methods by setting them to `never`. This helps prevent users from unintentionally overriding fields that are already set in the logger's `base` option, or clarifies that these fields are predefined. + +```typescript +declare module "pino" { + interface LogFnFields { + service?: never; + version?: never; + } +} + + +// These will now cause TypeScript errors +logger.info({ service: 'other-api', message: 'success' }) // ❌ +logger.info({ message: 'success' }) // ✅ +``` + +##### Enforcing Field Types + +You can also enforce specific types for certain fields: + +```typescript +declare module "pino" { + interface LogFnFields { + userId?: string; + requestId?: string; + } +} + +// These will cause TypeScript errors +logger.info({ userId: 123 }) // ❌ Error: userId must be string +logger.info({ requestId: null }) // ❌ Error: requestId must be string + +// This works fine +logger.info({ userId: '123' }) // ✅ Works fine +``` + +##### Enforcing Structured Logging + +Required fields (non-optional) enforce consistent structured logging by requiring specific fields in all log objects: + +```typescript +declare module "pino" { + interface LogFnFields { + userId: string + } +} + +logger.info({ userId: '123' }) // ✅ Works fine +logger.info({}) // ❌ Property 'userId' is missing in type '{}' +``` + +**Note**: Required fields will cause TypeScript errors when logging certain types like `Error` objects that don't contain the required properties: + +```typescript +logger.error(new Error('test')) // ❌ Property 'userId' is missing in type 'Error' +``` + +This ensures that all log entries include required context fields, promoting consistent logging practices. diff --git a/node_modules/pino/docs/asynchronous.md b/node_modules/pino/docs/asynchronous.md new file mode 100644 index 0000000..ec8af84 --- /dev/null +++ b/node_modules/pino/docs/asynchronous.md @@ -0,0 +1,40 @@ +# Asynchronous Logging + +Asynchronous logging enables the minimum overhead of Pino. +Asynchronous logging works by buffering log messages and writing them in larger chunks. + +```js +const pino = require('pino') +const logger = pino(pino.destination({ + dest: './my-file', // omit for stdout + minLength: 4096, // Buffer before writing + sync: false // Asynchronous logging +})) +``` + +It's always possible to turn on synchronous logging by passing `sync: true`. +In this mode of operation, log messages are directly written to the +output stream as the messages are generated with a _blocking_ operation. + +* See [`pino.destination`](/docs/api.md#pino-destination) +* `pino.destination` is implemented on [`sonic-boom` ⇗](https://github.com/mcollina/sonic-boom). + +### AWS Lambda + +Asynchronous logging is disabled by default on AWS Lambda or any other environment +that modifies `process.stdout`. If forcefully turned on, we recommend calling `dest.flushSync()` at the end +of each function execution to avoid losing data. + +## Caveats + +Asynchronous logging has a couple of important caveats: + +* As opposed to the synchronous mode, there is not a one-to-one relationship between + calls to logging methods (e.g. `logger.info`) and writes to a log file +* There is a possibility of the most recently buffered log messages being lost + in case of a system failure, e.g. a power cut. + +See also: + +* [`pino.destination` API](/docs/api.md#pino-destination) +* [`destination` parameter](/docs/api.md#destination) diff --git a/node_modules/pino/docs/benchmarks.md b/node_modules/pino/docs/benchmarks.md new file mode 100644 index 0000000..6b6e769 --- /dev/null +++ b/node_modules/pino/docs/benchmarks.md @@ -0,0 +1,55 @@ + +# Benchmarks + +`pino.info('hello world')`: + +``` + +BASIC benchmark averages +Bunyan average: 377.434ms +Winston average: 270.249ms +Bole average: 172.690ms +Debug average: 220.527ms +LogLevel average: 222.802ms +Pino average: 114.801ms +PinoMinLength average: 70.968ms +PinoNodeStream average: 159.192ms + +``` + +`pino.info({'hello': 'world'})`: + +``` + +OBJECT benchmark averages +BunyanObj average: 410.379ms +WinstonObj average: 273.120ms +BoleObj average: 185.069ms +LogLevelObject average: 433.425ms +PinoObj average: 119.315ms +PinoMinLengthObj average: 76.968ms +PinoNodeStreamObj average: 164.268ms + +``` + +`pino.info(aBigDeeplyNestedObject)`: + +``` + +DEEP-OBJECT benchmark averages +BunyanDeepObj average: 1.839ms +WinstonDeepObj average: 5.604ms +BoleDeepObj average: 3.422ms +LogLevelDeepObj average: 11.716ms +PinoDeepObj average: 2.256ms +PinoMinLengthDeepObj average: 2.240ms +PinoNodeStreamDeepObj average: 2.595ms + +``` + +`pino.info('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'})`: + +For a fair comparison, [LogLevel](http://npm.im/loglevel) was extended +to include a timestamp and [bole](http://npm.im/bole) had +`fastTime` mode switched on. + diff --git a/node_modules/pino/docs/browser.md b/node_modules/pino/docs/browser.md new file mode 100644 index 0000000..360c993 --- /dev/null +++ b/node_modules/pino/docs/browser.md @@ -0,0 +1,242 @@ +# Browser API + +Pino is compatible with [`browserify`](https://npm.im/browserify) for browser-side usage: + +This can be useful with isomorphic/universal JavaScript code. + +By default, in the browser, +`pino` uses corresponding [Log4j](https://en.wikipedia.org/wiki/Log4j) `console` methods (`console.error`, `console.warn`, `console.info`, `console.debug`, `console.trace`) and uses `console.error` for any `fatal` level logs. + +## Options + +Pino can be passed a `browser` object in the options object, +which can have the following properties: + +### `asObject` (Boolean) + +```js +const pino = require('pino')({browser: {asObject: true}}) +``` + +The `asObject` option will create a pino-like log object instead of +passing all arguments to a console method, for instance: + +```js +pino.info('hi') // creates and logs {msg: 'hi', level: 30, time: } +``` + +When `write` is set, `asObject` will always be `true`. + +### `asObjectBindingsOnly` (Boolean) + +```js +const pino = require('pino')({browser: {asObjectBindingsOnly: true}}) +``` + +The `asObjectBindingsOnly` option is similar to `asObject` but will keep the message +and arguments unformatted. This allows to defer formatting the message to the +actual call to `console` methods, where browsers then have richer formatting in +their devtools than when pino will format the message to a string first. + +```js +pino.info('hello %s', 'world') // creates and logs {level: 30, time: }, 'hello %s', 'world' +``` + +### `formatters` (Object) + +An object containing functions for formatting the shape of the log lines. When provided, it enables the logger to produce a pino-like log object with customized formatting. Currently, it supports formatting for the `level` object only. + +##### `level` + +Changes the shape of the log level. The default shape is `{ level: number }`. +The function takes two arguments, the label of the level (e.g. `'info'`) +and the numeric value (e.g. `30`). + +```js +const formatters = { + level (label, number) { + return { level: number } + } +} +``` + + +### `write` (Function | Object) + +Instead of passing log messages to `console.log` they can be passed to +a supplied function. + +If `write` is set to a single function, all logging objects are passed +to this function. + +```js +const pino = require('pino')({ + browser: { + write: (o) => { + // do something with o + } + } +}) +``` + +If `write` is an object, it can have methods that correspond to the +levels. When a message is logged at a given level, the corresponding +method is called. If a method isn't present, the logging falls back +to using the `console`. + + +```js +const pino = require('pino')({ + browser: { + write: { + info: function (o) { + //process info log object + }, + error: function (o) { + //process error log object + } + } + } +}) +``` + +### `serialize`: (Boolean | Array) + +The serializers provided to `pino` are ignored by default in the browser, including +the standard serializers provided with Pino. Since the default destination for log +messages is the console, values such as `Error` objects are enhanced for inspection, +which they otherwise wouldn't be if the Error serializer was enabled. + +We can turn all serializers on, + +```js +const pino = require('pino')({ + browser: { + serialize: true + } +}) +``` + +Or we can selectively enable them via an array: + +```js +const pino = require('pino')({ + serializers: { + custom: myCustomSerializer, + another: anotherSerializer + }, + browser: { + serialize: ['custom'] + } +}) +// following will apply myCustomSerializer to the custom property, +// but will not apply anotherSerializer to another key +pino.info({custom: 'a', another: 'b'}) +``` + +When `serialize` is `true` the standard error serializer is also enabled (see https://github.com/pinojs/pino/blob/master/docs/api.md#stdSerializers). +This is a global serializer, which will apply to any `Error` objects passed to the logger methods. + +If `serialize` is an array the standard error serializer is also automatically enabled, it can +be explicitly disabled by including a string in the serialize array: `!stdSerializers.err`, like so: + +```js +const pino = require('pino')({ + serializers: { + custom: myCustomSerializer, + another: anotherSerializer + }, + browser: { + serialize: ['!stdSerializers.err', 'custom'] //will not serialize Errors, will serialize `custom` keys + } +}) +``` + +The `serialize` array also applies to any child logger serializers (see https://github.com/pinojs/pino/blob/master/docs/api.md#discussion-2 +for how to set child-bound serializers). + +Unlike server pino the serializers apply to every object passed to the logger method, +if the `asObject` option is `true`, this results in the serializers applying to the +first object (as in server pino). + +For more info on serializers see https://github.com/pinojs/pino/blob/master/docs/api.md#mergingobject. + +### `transmit` (Object) + +An object with `send` and `level` properties. + +The `transmit.level` property specifies the minimum level (inclusive) of when the `send` function +should be called, if not supplied the `send` function be called based on the main logging `level` +(set via `options.level`, defaulting to `info`). + +The `transmit` object must have a `send` function which will be called after +writing the log message. The `send` function is passed the level of the log +message and a `logEvent` object. + +The `logEvent` object is a data structure representing a log message, it represents +the arguments passed to a logger statement, the level +at which they were logged, and the hierarchy of child bindings. + +The `logEvent` format is structured like so: + +```js +{ + ts = Number, + messages = Array, + bindings = Array, + level: { label = String, value = Number} +} +``` + +The `ts` property is a Unix epoch timestamp in milliseconds, the time is taken from the moment the +logger method is called. + +The `messages` array is all arguments passed to logger method, (for instance `logger.info('a', 'b', 'c')` +would result in `messages` array `['a', 'b', 'c']`). + +The `bindings` array represents each child logger (if any), and the relevant bindings. +For instance, given `logger.child({a: 1}).child({b: 2}).info({c: 3})`, the bindings array +would hold `[{a: 1}, {b: 2}]` and the `messages` array would be `[{c: 3}]`. The `bindings` +are ordered according to their position in the child logger hierarchy, with the lowest index +being the top of the hierarchy. + +By default, serializers are not applied to log output in the browser, but they will *always* be +applied to `messages` and `bindings` in the `logEvent` object. This allows us to ensure a consistent +format for all values between server and client. + +The `level` holds the label (for instance `info`), and the corresponding numerical value +(for instance `30`). This could be important in cases where client-side level values and +labels differ from server-side. + +The point of the `send` function is to remotely record log messages: + +```js +const pino = require('pino')({ + browser: { + transmit: { + level: 'warn', + send: function (level, logEvent) { + if (level === 'warn') { + // maybe send the logEvent to a separate endpoint + // or maybe analyze the messages further before sending + } + // we could also use the `logEvent.level.value` property to determine + // numerical value + if (logEvent.level.value >= 50) { // covers error and fatal + + // send the logEvent somewhere + } + } + } + } +}) +``` + +### `disabled` (Boolean) + +```js +const pino = require('pino')({browser: {disabled: true}}) +``` + +The `disabled` option will disable logging in browser if set +to `true`, by default it is set to `false`. diff --git a/node_modules/pino/docs/bundling.md b/node_modules/pino/docs/bundling.md new file mode 100644 index 0000000..c2aee8f --- /dev/null +++ b/node_modules/pino/docs/bundling.md @@ -0,0 +1,40 @@ +# Bundling + +Due to its internal architecture based on Worker Threads, it is not possible to bundle Pino *without* generating additional files. + +In particular, a bundler must ensure that the following files are also bundled separately: + +* `lib/worker.js` from the `thread-stream` dependency +* `file.js` +* `lib/worker.js` +* Any transport used by the user (like `pino-pretty`) + +Once the files above have been generated, the bundler must also add information about the files above by injecting a code that sets `__bundlerPathsOverrides` in the `globalThis` object. + +The variable is an object whose keys are an identifier for the files and the values are the paths of files relative to the currently bundle files. + +Example: + +```javascript +// Inject this using your bundle plugin +globalThis.__bundlerPathsOverrides = { + 'thread-stream-worker': pinoWebpackAbsolutePath('./thread-stream-worker.js') + 'pino/file': pinoWebpackAbsolutePath('./pino-file.js'), + 'pino-worker': pinoWebpackAbsolutePath('./pino-worker.js'), + 'pino-pretty': pinoWebpackAbsolutePath('./pino-pretty.js'), +}; +``` + +Note that `pino/file`, `pino-worker` and `thread-stream-worker` are required identifiers. Other identifiers are possible based on the user configuration. + +## Webpack Plugin + +If you are a Webpack user, you can achieve this with [pino-webpack-plugin](https://github.com/pinojs/pino-webpack-plugin) without manual configuration of `__bundlerPathsOverrides`; however, you still need to configure it manually if you are using other bundlers. + +## Esbuild Plugin + +[esbuild-plugin-pino](https://github.com/davipon/esbuild-plugin-pino) is the esbuild plugin to generate extra pino files for bundling. + +## Bun Plugin + +[bun-plugin-pino](https://github.com/vktrl/bun-plugin-pino) is the Bun plugin to generate extra pino files for bundling. \ No newline at end of file diff --git a/node_modules/pino/docs/child-loggers.md b/node_modules/pino/docs/child-loggers.md new file mode 100644 index 0000000..13b6ebc --- /dev/null +++ b/node_modules/pino/docs/child-loggers.md @@ -0,0 +1,95 @@ +# Child loggers + +Let's assume we want to have `"module":"foo"` added to every log within a +module `foo.js`. + +To accomplish this, simply use a child logger: + +```js +'use strict' +// imports a pino logger instance of `require('pino')()` +const parentLogger = require('./lib/logger') +const log = parentLogger.child({module: 'foo'}) + +function doSomething () { + log.info('doSomething invoked') +} + +module.exports = { + doSomething +} +``` + +## Cost of child logging + +Child logger creation is fast: + +``` +benchBunyanCreation*10000: 564.514ms +benchBoleCreation*10000: 283.276ms +benchPinoCreation*10000: 258.745ms +benchPinoExtremeCreation*10000: 150.506ms +``` + +Logging through a child logger has little performance penalty: + +``` +benchBunyanChild*10000: 556.275ms +benchBoleChild*10000: 288.124ms +benchPinoChild*10000: 231.695ms +benchPinoExtremeChild*10000: 122.117ms +``` + +Logging via the child logger of a child logger also has negligible overhead: + +``` +benchBunyanChildChild*10000: 559.082ms +benchPinoChildChild*10000: 229.264ms +benchPinoExtremeChildChild*10000: 127.753ms +``` + +## Duplicate keys caveat + +Naming conflicts can arise between child loggers and +children of child loggers. + +This isn't as bad as it sounds, even if the same keys between +parent and child loggers are used, Pino resolves the conflict in the sanest way. + +For example, consider the following: + +```js +const pino = require('pino') +pino(pino.destination('./my-log')) + .child({a: 'property'}) + .child({a: 'prop'}) + .info('howdy') +``` + +```sh +$ cat my-log +{"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":1459534114473,"a":"property","a":"prop"} +``` + +Notice how there are two keys named `a` in the JSON output. The sub-child's properties +appear after the parent child properties. + +At some point, the logs will most likely be processed (for instance with a [transport](transports.md)), +and this generally involves parsing. `JSON.parse` will return an object where the conflicting +namespace holds the final value assigned to it: + +```sh +$ cat my-log | node -e "process.stdin.once('data', (line) => console.log(JSON.stringify(JSON.parse(line))))" +{"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":"2016-04-01T18:08:34.473Z","a":"prop"} +``` + +Ultimately the conflict is resolved by taking the last value, which aligns with Bunyan's child logging +behavior. + +There may be cases where this edge case becomes problematic if a JSON parser with alternative behavior +is used to process the logs. It's recommended to be conscious of namespace conflicts with child loggers, +in light of an expected log processing approach. + +One of Pino's performance tricks is to avoid building objects and stringifying +them, so we're building strings instead. This is why duplicate keys between +parents and children will end up in the log output. diff --git a/node_modules/pino/docs/diagnostics.md b/node_modules/pino/docs/diagnostics.md new file mode 100644 index 0000000..c4c370d --- /dev/null +++ b/node_modules/pino/docs/diagnostics.md @@ -0,0 +1,16 @@ +# Diagnostics + +Pino provides [tracing channel](tc) events that allow insight into the +internal workings of the library. The currently supported events are: + ++ `tracing:pino_asJson:start`: emitted when the final serialization process + of logs is started. The emitted event payload has the following fields: + - `instance`: the Pino instance associated with the function + - `arguments`: the arguments passed to the function ++ `tracing:pino_asJson:end`: emitted at the end of the final serialization + process. The emitted event payload has the following fields: + - `instance`: the Pino instance associated with the function + - `arguments`: the arguments passed to the function + - `result`: the finalized, newline delimited, log line as a string + +[tc]: https://nodejs.org/docs/latest/api/diagnostics_channel.html#tracingchannel-channels diff --git a/node_modules/pino/docs/ecosystem.md b/node_modules/pino/docs/ecosystem.md new file mode 100644 index 0000000..a0e6e06 --- /dev/null +++ b/node_modules/pino/docs/ecosystem.md @@ -0,0 +1,86 @@ +# Pino Ecosystem + +This is a list of ecosystem modules that integrate with `pino`. + +Modules listed under [Core](#core) are maintained by the Pino team. Modules +listed under [Community](#community) are maintained by independent community +members. + +Please send a PR to add new modules! + + +## Core + +### Frameworks ++ [`express-pino-logger`](https://github.com/pinojs/express-pino-logger): use +Pino to log requests within [express](https://expressjs.com/). ++ [`koa-pino-logger`](https://github.com/pinojs/koa-pino-logger): use Pino to +log requests within [Koa](https://koajs.com/). ++ [`restify-pino-logger`](https://github.com/pinojs/restify-pino-logger): use +Pino to log requests within [restify](http://restify.com/). ++ [`rill-pino-logger`](https://github.com/pinojs/rill-pino-logger): use Pino as +the logger for the [Rill framework](https://rill.site/). + +### Utilities ++ [`pino-arborsculpture`](https://github.com/pinojs/pino-arborsculpture): change +log levels at runtime. ++ [`pino-caller`](https://github.com/pinojs/pino-caller): add callsite to the log line. ++ [`pino-clf`](https://github.com/pinojs/pino-clf): reformat Pino logs into +Common Log Format. ++ [`pino-console`](https://github.com/pinojs/pino-console): adapter for the [WHATWG Console](https://console.spec.whatwg.org/) spec. ++ [`pino-debug`](https://github.com/pinojs/pino-debug): use Pino to interpret +[`debug`](https://npm.im/debug) logs. ++ [`pino-elasticsearch`](https://github.com/pinojs/pino-elasticsearch): send +Pino logs to an Elasticsearch instance. ++ [`pino-eventhub`](https://github.com/pinojs/pino-eventhub): send Pino logs +to an [Event Hub](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-what-is-event-hubs). ++ [`pino-filter`](https://github.com/pinojs/pino-filter): filter Pino logs in +the same fashion as the [`debug`](https://npm.im/debug) module. ++ [`pino-gelf`](https://github.com/pinojs/pino-gelf): reformat Pino logs into +GELF format for Graylog. ++ [`pino-hapi`](https://github.com/pinojs/hapi-pino): use Pino as the logger +for [Hapi](https://hapijs.com/). ++ [`pino-http`](https://github.com/pinojs/pino-http): easily use Pino to log +requests with the core `http` module. ++ [`pino-http-print`](https://github.com/pinojs/pino-http-print): reformat Pino +logs into traditional [HTTPD](https://httpd.apache.org/) style request logs. ++ [`pino-mongodb`](https://github.com/pinojs/pino-mongodb): store Pino logs +in a MongoDB database. ++ [`pino-multi-stream`](https://github.com/pinojs/pino-multi-stream): send +logs to multiple destination streams (slow!). ++ [`pino-noir`](https://github.com/pinojs/pino-noir): redact sensitive information +in logs. ++ [`pino-pretty`](https://github.com/pinojs/pino-pretty): basic prettifier to +make log lines human-readable. ++ [`pino-socket`](https://github.com/pinojs/pino-socket): send logs to TCP or UDP +destinations. ++ [`pino-std-serializers`](https://github.com/pinojs/pino-std-serializers): the +core object serializers used within Pino. ++ [`pino-syslog`](https://github.com/pinojs/pino-syslog): reformat Pino logs +to standard syslog format. ++ [`pino-tee`](https://github.com/pinojs/pino-tee): pipe Pino logs into files +based upon log levels. ++ [`pino-test`](https://github.com/pinojs/pino-test): a set of utilities for +verifying logs generated by the Pino logger. ++ [`pino-toke`](https://github.com/pinojs/pino-toke): reformat Pino logs +according to a given format string. + + + +## Community + ++ [`@google-cloud/pino-logging-gcp-config`](https://www.npmjs.com/package/@google-cloud/pino-logging-gcp-config): Config helper and formatter to output [Google Cloud Platform Structured Logging](https://cloud.google.com/logging/docs/structured-logging) ++ [`@newrelic/pino-enricher`](https://github.com/newrelic/newrelic-node-log-extensions/blob/main/packages/pino-log-enricher): a log customization to add New Relic context to use [Logs In Context](https://docs.newrelic.com/docs/logs/logs-context/logs-in-context/) ++ [`cloud-pine`](https://github.com/metcoder95/cloud-pine): transport that provides abstraction and compatibility with [`@google-cloud/logging`](https://www.npmjs.com/package/@google-cloud/logging). ++ [`cls-proxify`](https://github.com/keenondrums/cls-proxify): integration of pino and [CLS](https://github.com/jeff-lewis/cls-hooked). Useful for creating dynamically configured child loggers (e.g. with added trace ID) for each request. ++ [`crawlee-pino`](https://github.com/imyelo/crawlee-pino): use Pino to log within Crawlee ++ [`eslint-plugin-pino`](https://github.com/orzarchi/eslint-plugin-pino): linting rules for pino usage, primarly for preventing missing context in logs due to incorrect argument order. ++ [`pino-colada`](https://github.com/lrlna/pino-colada): cute ndjson formatter for pino. ++ [`pino-dev`](https://github.com/dnjstrom/pino-dev): simple prettifier for pino with built-in support for common ecosystem packages. ++ [`pino-fluentd`](https://github.com/davidedantonio/pino-fluentd): send Pino logs to Elasticsearch, +MongoDB, and many [others](https://www.fluentd.org/dataoutputs) via Fluentd. ++ [`pino-lambda`](https://github.com/FormidableLabs/pino-lambda): log transport for cloudwatch support inside aws-lambda ++ [`pino-pretty-min`](https://github.com/unjello/pino-pretty-min): a minimal +prettifier inspired by the [logrus](https://github.com/sirupsen/logrus) logger. ++ [`pino-rotating-file`](https://github.com/homeaway/pino-rotating-file): a hapi-pino log transport for splitting logs into separate, automatically rotating files. ++ [`pino-tiny`](https://github.com/holmok/pino-tiny): a tiny (and extensible?) little log formatter for pino. diff --git a/node_modules/pino/docs/help.md b/node_modules/pino/docs/help.md new file mode 100644 index 0000000..623d0a2 --- /dev/null +++ b/node_modules/pino/docs/help.md @@ -0,0 +1,345 @@ +# Help + +* [Log rotation](#rotate) +* [Reopening log files](#reopening) +* [Saving to multiple files](#multiple) +* [Log filtering](#filter-logs) +* [Transports and systemd](#transport-systemd) +* [Log to different streams](#multi-stream) +* [Duplicate keys](#dupe-keys) +* [Log levels as labels instead of numbers](#level-string) +* [Pino with `debug`](#debug) +* [Unicode and Windows terminal](#windows) +* [Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels](#stackdriver) +* [Using Grafana Loki to evaluate pino logs in a kubernetes cluster](#grafana-loki) +* [Avoid Message Conflict](#avoid-message-conflict) +* [Best performance for logging to `stdout`](#best-performance-for-stdout) +* [Testing](#testing) + + +## Log rotation + +Use a separate tool for log rotation: +We recommend [logrotate](https://github.com/logrotate/logrotate). +Consider we output our logs to `/var/log/myapp.log` like so: + +``` +$ node server.js > /var/log/myapp.log +``` + +We would rotate our log files with logrotate, by adding the following to `/etc/logrotate.d/myapp`: + +``` +/var/log/myapp.log { + su root + daily + rotate 7 + delaycompress + compress + notifempty + missingok + copytruncate +} +``` + +The `copytruncate` configuration has a very slight possibility of lost log lines due +to a gap between copying and truncating - the truncate may occur after additional lines +have been written. To perform log rotation without `copytruncate`, see the [Reopening log files](#reopening) +help. + + +## Reopening log files + +In cases where a log rotation tool doesn't offer copy-truncate capabilities, +or where using them is deemed inappropriate, `pino.destination` +can reopen file paths after a file has been moved away. + +One way to use this is to set up a `SIGUSR2` or `SIGHUP` signal handler that +reopens the log file destination, making sure to write the process PID out +somewhere so the log rotation tool knows where to send the signal. + +```js +// write the process pid to a well known location for later +const fs = require('node:fs') +fs.writeFileSync('/var/run/myapp.pid', process.pid) + +const dest = pino.destination('/log/file') +const logger = require('pino')(dest) +process.on('SIGHUP', () => dest.reopen()) +``` + +The log rotation tool can then be configured to send this signal to the process +after a log rotation event has occurred. + +Given a similar scenario as in the [Log rotation](#rotate) section a basic +`logrotate` config that aligns with this strategy would look similar to the following: + +``` +/var/log/myapp.log { + su root + daily + rotate 7 + delaycompress + compress + notifempty + missingok + postrotate + kill -HUP `cat /var/run/myapp.pid` + endscript +} +``` + + +## Saving to multiple files + +See [`pino.multistream`](/docs/api.md#pino-multistream). + + +## Log Filtering +The Pino philosophy advocates common, preexisting, system utilities. + +Some recommendations in line with this philosophy are: + +1. Use [`grep`](https://linux.die.net/man/1/grep): + ```sh + $ # View all "INFO" level logs + $ node app.js | grep '"level":30' + ``` +1. Use [`jq`](https://stedolan.github.io/jq/): + ```sh + $ # View all "ERROR" level logs + $ node app.js | jq 'select(.level == 50)' + ``` + + +## Transports and systemd +`systemd` makes it complicated to use pipes in services. One method for overcoming +this challenge is to use a subshell: + +``` +ExecStart=/bin/sh -c '/path/to/node app.js | pino-transport' +``` + + +## Log to different streams + +Pino's default log destination is the singular destination of `stdout`. While +not recommended for performance reasons, multiple destinations can be targeted +by using [`pino.multistream`](/docs/api.md#pino-multistream). + +In this example, we use `stderr` for `error` level logs and `stdout` as default +for all other levels (e.g. `debug`, `info`, and `warn`). + +```js +const pino = require('pino') +var streams = [ + {level: 'debug', stream: process.stdout}, + {level: 'error', stream: process.stderr}, + {level: 'fatal', stream: process.stderr} +] + +const logger = pino({ + name: 'my-app', + level: 'debug', // must be the lowest level of all streams +}, pino.multistream(streams)) +``` + + +## How Pino handles duplicate keys + +Duplicate keys are possibly when a child logger logs an object with a key that +collides with a key in the child loggers bindings. + +See the [child logger duplicate keys caveat](/docs/child-loggers.md#duplicate-keys-caveat) +for information on this is handled. + + +## Log levels as labels instead of numbers +Pino log lines are meant to be parsable. Thus, Pino's default mode of operation +is to print the level value instead of the string name. +However, you can use the [`formatters`](/docs/api.md#formatters-object) option +with a [`level`](/docs/api.md#level) function to print the string name instead of the level value : + +```js +const pino = require('pino') + +const log = pino({ + formatters: { + level: (label) => { + return { + level: label + } + } + } +}) + +log.info('message') + +// {"level":"info","time":1661632832200,"pid":18188,"hostname":"foo","msg":"message"} +``` + +Although it works, we recommend using one of these options instead if you are able: + +1. If the only change desired is the name then a transport can be used. One such +transport is [`pino-text-level-transport`](https://npm.im/pino-text-level-transport). +1. Use a prettifier like [`pino-pretty`](https://npm.im/pino-pretty) to make +the logs human friendly. + + +## Pino with `debug` + +The popular [`debug`](https://npm.im/debug) is used in many modules across the ecosystem. + +The [`pino-debug`](https://github.com/pinojs/pino-debug) module +can capture calls to `debug` loggers and run them +through `pino` instead. This results in a 10x (20x in asynchronous mode) +performance improvement - even though `pino-debug` is logging additional +data and wrapping it in JSON. + +To quickly enable this install [`pino-debug`](https://github.com/pinojs/pino-debug) +and preload it with the `-r` flag, enabling any `debug` logs with the +`DEBUG` environment variable: + +```sh +$ npm i pino-debug +$ DEBUG=* node -r pino-debug app.js +``` + +[`pino-debug`](https://github.com/pinojs/pino-debug) also offers fine-grain control to map specific `debug` +namespaces to `pino` log levels. See [`pino-debug`](https://github.com/pinojs/pino-debug) +for more. + + +## Unicode and Windows terminal + +Pino uses [sonic-boom](https://github.com/mcollina/sonic-boom) to speed +up logging. Internally, it uses [`fs.write`](https://nodejs.org/dist/latest-v10.x/docs/api/fs.html#fs_fs_write_fd_string_position_encoding_callback) to write log lines directly to a file +descriptor. On Windows, Unicode output is not handled properly in the +terminal (both `cmd.exe` and PowerShell), and as such the output could +be visualized incorrectly if the log lines include utf8 characters. It +is possible to configure the terminal to visualize those characters +correctly with the use of [`chcp`](https://ss64.com/nt/chcp.html) by +executing in the terminal `chcp 65001`. This is a known limitation of +Node.js. + + +## Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels + +Google Cloud Logging uses `severity` levels instead of log levels. As a result, all logs may show as INFO +level logs while completely ignoring the level set in the pino log. Google Cloud Logging also prefers that +log data is present inside a `message` key instead of the default `msg` key that Pino uses. Use a technique +similar to the one below to retain log levels in Google Cloud Logging + +```js +const pino = require('pino') + +// https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#logseverity +const PinoLevelToSeverityLookup = { + trace: 'DEBUG', + debug: 'DEBUG', + info: 'INFO', + warn: 'WARNING', + error: 'ERROR', + fatal: 'CRITICAL', +}; + +const defaultPinoConf = { + messageKey: 'message', + formatters: { + level(label, number) { + return { + severity: PinoLevelToSeverityLookup[label] || PinoLevelToSeverityLookup['info'], + level: number, + } + } + }, +} + +module.exports = function createLogger(options) { + return pino(Object.assign({}, options, defaultPinoConf)) +} +``` + +A library that configures Pino for +[Google Cloud Structured Logging](https://cloud.google.com/logging/docs/structured-logging) +is available at: +[@google-cloud/pino-logging-gcp-config](https://www.npmjs.com/package/@google-cloud/pino-logging-gcp-config) + +This library has the following features: + ++ Converts Pino log levels to Google Cloud Logging log levels, as above ++ Uses `message` instead of `msg` for the message key, as above ++ Adds a millisecond-granularity timestamp in the + [structure](https://cloud.google.com/logging/docs/agent/logging/configuration#timestamp-processing) + recognised by Google Cloud Logging eg: \ + `"timestamp":{"seconds":1445470140,"nanos":123000000}` ++ Adds a sequential + [`insertId`](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#FIELDS.insert_id) + to ensure log messages with identical timestamps are ordered correctly. ++ Logs including an `Error` object have the + [`stack_trace`](https://cloud.google.com/error-reporting/docs/formatting-error-messages#log-error) + property set so that the error is forwarded to Google Cloud Error Reporting. ++ Includes a + [`ServiceContext`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/ServiceContext) + object in the logs for Google Cloud Error Reporting, auto detected from the + environment if not specified ++ Maps the OpenTelemetry properties `span_id`, `trace_id`, and `trace_flags` + to the equivalent Google Cloud Logging fields. + + +## Using Grafana Loki to evaluate pino logs in a kubernetes cluster + +To get pino logs into Grafana Loki there are two options: + +1. **Push:** Use [pino-loki](https://github.com/Julien-R44/pino-loki) to send logs directly to Loki. +1. **Pull:** Configure Grafana Promtail to read and properly parse the logs before sending them to Loki. + Similar to Google Cloud logging, this involves remapping the log levels. See this [article](https://medium.com/@janpaepke/structured-logging-in-the-grafana-monitoring-stack-8aff0a5af2f5) for details. + + +## Avoid Message Conflict + +As described in the [`message` documentation](/docs/api.md#message), when a log +is written like `log.info({ msg: 'a message' }, 'another message')` then the +final output JSON will have `"msg":"another message"` and the `'a message'` +string will be lost. To overcome this, the [`logMethod` hook](/docs/api.md#logmethod) +can be used: + +```js +'use strict' + +const log = require('pino')({ + level: 'debug', + hooks: { + logMethod (inputArgs, method) { + if (inputArgs.length === 2 && inputArgs[0].msg) { + inputArgs[0].originalMsg = inputArgs[0].msg + } + return method.apply(this, inputArgs) + } + } +}) + +log.info('no original message') +log.info({ msg: 'mapped to originalMsg' }, 'a message') + +// {"level":30,"time":1596313323106,"pid":63739,"hostname":"foo","msg":"no original message"} +// {"level":30,"time":1596313323107,"pid":63739,"hostname":"foo","msg":"a message","originalMsg":"mapped to originalMsg"} +``` + + +## Best performance for logging to `stdout` + +The best performance for logging directly to stdout is _usually_ achieved by using the +default configuration: + +```js +const log = require('pino')(); +``` + +You should only have to configure custom transports or other settings +if you have broader logging requirements. + + +## Testing + +See [`pino-test`](https://github.com/pinojs/pino-test). diff --git a/node_modules/pino/docs/lts.md b/node_modules/pino/docs/lts.md new file mode 100644 index 0000000..2c880cb --- /dev/null +++ b/node_modules/pino/docs/lts.md @@ -0,0 +1,64 @@ +## Long Term Support + +Pino's Long Term Support (LTS) is provided according to the schedule laid +out in this document: + +1. Major releases, "X" release of [semantic versioning][semver] X.Y.Z release + versions, are supported for a minimum period of six months from their release + date. The release date of any specific version can be found at + [https://github.com/pinojs/pino/releases](https://github.com/pinojs/pino/releases). + +1. Major releases will receive security updates for an additional six months + from the release of the next major release. After this period + we will still review and release security fixes as long as they are + provided by the community and they do not violate other constraints, + e.g. minimum supported Node.js version. + +1. Major releases will be tested and verified against all Node.js + release lines that are supported by the + [Node.js LTS policy](https://github.com/nodejs/Release) within the + LTS period of that given Pino release line. This implies that only + the latest Node.js release of a given line is supported. + +A "month" is defined as 30 consecutive days. + +> ## Security Releases and Semver +> +> As a consequence of providing long-term support for major releases, there +> are occasions where we need to release breaking changes as a _minor_ +> version release. Such changes will _always_ be noted in the +> [release notes](https://github.com/pinojs/pino/releases). +> +> To avoid automatically receiving breaking security updates it is possible to use +> the tilde (`~`) range qualifier. For example, to get patches for the 6.1 +> release, and avoid automatically updating to the 6.1 release, specify +> the dependency as `"pino": "~6.1.x"`. This will leave your application vulnerable, +> so please use with caution. + +[semver]: https://semver.org/ + + + +### Schedule + +| Version | Release Date | End Of LTS Date | Node.js | +| :------ | :----------- | :-------------- | :------------------- | +| 9.x | 2024-04-26 | TBD | 18, 20, 22 | +| 8.x | 2022-06-01 | 2024-10-26 | 14, 16, 18, 20 | +| 7.x | 2021-10-14 | 2023-06-01 | 12, 14, 16 | +| 6.x | 2020-03-07 | 2022-04-14 | 10, 12, 14, 16 | + + + +### CI tested operating systems + +Pino uses GitHub Actions for CI testing, please refer to +[GitHub's documentation regarding workflow runners](https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources) +for further details on what the latest virtual environment is in relation to +the YAML workflow labels below: + +| OS | YAML Workflow Label | Node.js | +|---------|------------------------|--------------| +| Linux | `ubuntu-latest` | 18, 20, 22 | +| Windows | `windows-latest` | 18, 20, 22 | +| MacOS | `macos-latest` | 18, 20, 22 | diff --git a/node_modules/pino/docs/pretty.md b/node_modules/pino/docs/pretty.md new file mode 100644 index 0000000..a1a7a92 --- /dev/null +++ b/node_modules/pino/docs/pretty.md @@ -0,0 +1,35 @@ +# Pretty Printing + +By default, Pino log lines are newline delimited JSON (NDJSON). This is perfect +for production usage and long-term storage. It's not so great for development +environments. Thus, Pino logs can be prettified by using a Pino prettifier +module like [`pino-pretty`][pp]: + +1. Install a prettifier module as a separate dependency, e.g. `npm install pino-pretty`. +2. Instantiate the logger with the `transport.target` option set to `'pino-pretty'`: + ```js + const pino = require('pino') + const logger = pino({ + transport: { + target: 'pino-pretty' + }, + }) + + logger.info('hi') + ``` +3. The transport option can also have an options object containing `pino-pretty` options: + ```js + const pino = require('pino') + const logger = pino({ + transport: { + target: 'pino-pretty', + options: { + colorize: true + } + } + }) + + logger.info('hi') + ``` + + [pp]: https://github.com/pinojs/pino-pretty diff --git a/node_modules/pino/docs/redaction.md b/node_modules/pino/docs/redaction.md new file mode 100644 index 0000000..9b7e4ff --- /dev/null +++ b/node_modules/pino/docs/redaction.md @@ -0,0 +1,135 @@ +# Redaction + +> Redaction is not supported in the browser [#670](https://github.com/pinojs/pino/issues/670) + +To redact sensitive information, supply paths to keys that hold sensitive data +using the `redact` option. Note that paths that contain hyphens need to use +brackets to access the hyphenated property: + +```js +const logger = require('.')({ + redact: ['key', 'path.to.key', 'stuff.thats[*].secret', 'path["with-hyphen"]'] +}) + +logger.info({ + key: 'will be redacted', + path: { + to: {key: 'sensitive', another: 'thing'} + }, + stuff: { + thats: [ + {secret: 'will be redacted', logme: 'will be logged'}, + {secret: 'as will this', logme: 'as will this'} + ] + } +}) +``` + +This will output: + +```JSON +{"level":30,"time":1527777350011,"pid":3186,"hostname":"Davids-MacBook-Pro-3.local","key":"[Redacted]","path":{"to":{"key":"[Redacted]","another":"thing"}},"stuff":{"thats":[{"secret":"[Redacted]","logme":"will be logged"},{"secret":"[Redacted]","logme":"as will this"}]}} +``` + +The `redact` option can take an array (as shown in the above example) or +an object. This allows control over *how* information is redacted. + +For instance, setting the censor: + +```js +const logger = require('.')({ + redact: { + paths: ['key', 'path.to.key', 'stuff.thats[*].secret'], + censor: '**GDPR COMPLIANT**' + } +}) + +logger.info({ + key: 'will be redacted', + path: { + to: {key: 'sensitive', another: 'thing'} + }, + stuff: { + thats: [ + {secret: 'will be redacted', logme: 'will be logged'}, + {secret: 'as will this', logme: 'as will this'} + ] + } +}) +``` + +This will output: + +```JSON +{"level":30,"time":1527778563934,"pid":3847,"hostname":"Davids-MacBook-Pro-3.local","key":"**GDPR COMPLIANT**","path":{"to":{"key":"**GDPR COMPLIANT**","another":"thing"}},"stuff":{"thats":[{"secret":"**GDPR COMPLIANT**","logme":"will be logged"},{"secret":"**GDPR COMPLIANT**","logme":"as will this"}]}} +``` + +The `redact.remove` option also allows for the key and value to be removed from output: + +```js +const logger = require('.')({ + redact: { + paths: ['key', 'path.to.key', 'stuff.thats[*].secret'], + remove: true + } +}) + +logger.info({ + key: 'will be redacted', + path: { + to: {key: 'sensitive', another: 'thing'} + }, + stuff: { + thats: [ + {secret: 'will be redacted', logme: 'will be logged'}, + {secret: 'as will this', logme: 'as will this'} + ] + } +}) +``` + +This will output + +```JSON +{"level":30,"time":1527782356751,"pid":5758,"hostname":"Davids-MacBook-Pro-3.local","path":{"to":{"another":"thing"}},"stuff":{"thats":[{"logme":"will be logged"},{"logme":"as will this"}]}} +``` + +See [pino options in API](/docs/api.md#redact-array-object) for `redact` API details. + + +## Path Syntax + +The syntax for paths supplied to the `redact` option conform to the syntax in path lookups +in standard ECMAScript, with two additions: + +* paths may start with bracket notation +* paths may contain the asterisk `*` to denote a wildcard +* paths are **case sensitive** + +By way of example, the following are all valid paths: + +* `a.b.c` +* `a["b-c"].d` +* `["a-b"].c` +* `a.b.*` +* `a[*].b` + +## Overhead + +Pino's redaction functionality is built on top of [`fast-redact`](https://github.com/davidmarkclements/fast-redact) +which adds about 2% overhead to `JSON.stringify` when using paths without wildcards. + +When used with pino logger with a single redacted path, any overhead is within noise - +a way to deterministically measure its effect has not been found. This is because it is not a bottleneck. + +However, wildcard redaction does carry a non-trivial cost relative to explicitly declaring the keys +(50% in a case where four keys are redacted across two objects). See +the [`fast-redact` benchmarks](https://github.com/davidmarkclements/fast-redact#benchmarks) for details. + +## Safety + +The `redact` option is intended as an initialization time configuration option. +Path strings must not originate from user input. +The `fast-redact` module uses a VM context to syntax check the paths, user input +should never be combined with such an approach. See the [`fast-redact` Caveat](https://github.com/davidmarkclements/fast-redact#caveat) +and the [`fast-redact` Approach](https://github.com/davidmarkclements/fast-redact#approach) for in-depth information. diff --git a/node_modules/pino/docs/transports.md b/node_modules/pino/docs/transports.md new file mode 100644 index 0000000..5c4ae64 --- /dev/null +++ b/node_modules/pino/docs/transports.md @@ -0,0 +1,1263 @@ +# Transports + +Pino transports can be used for both transmitting and transforming log output. + +The way Pino generates logs: + +1. Reduces the impact of logging on an application to the absolute minimum. +2. Gives greater flexibility in how logs are processed and stored. + +It is recommended that any log transformation or transmission is performed either +in a separate thread or a separate process. + +Before Pino v7 transports would ideally operate in a separate process - these are +now referred to as [Legacy Transports](#legacy-transports). + +From Pino v7 and upwards transports can also operate inside a [Worker Thread][worker-thread] +and can be used or configured via the options object passed to `pino` on initialization. +In this case the transports would always operate asynchronously (unless `options.sync` is set to `true` in transport options), and logs would be +flushed as quickly as possible (there is nothing to do). + +[worker-thread]: https://nodejs.org/dist/latest-v14.x/docs/api/worker_threads.html + +## v7+ Transports + +A transport is a module that exports a default function that returns a writable stream: + +```js +import { createWriteStream } from 'node:fs' + +export default (options) => { + return createWriteStream(options.destination) +} +``` + +Let's imagine the above defines our "transport" as the file `my-transport.mjs` +(ESM files are supported even if the project is written in CJS). + +We would set up our transport by creating a transport stream with `pino.transport` +and passing it to the `pino` function: + +```js +const pino = require('pino') +const transport = pino.transport({ + target: '/absolute/path/to/my-transport.mjs' +}) +pino(transport) +``` + +The transport code will be executed in a separate worker thread. The main thread +will write logs to the worker thread, which will write them to the stream returned +from the function exported from the transport file/module. + +The exported function can also be async. If we use an async function we can throw early +if the transform could not be opened. As an example: + +```js +import fs from 'node:fs' +import { once } from 'events' +export default async (options) => { + const stream = fs.createWriteStream(options.destination) + await once(stream, 'open') + return stream +} +``` + +While initializing the stream we're able to use `await` to perform asynchronous operations. In this +case, waiting for the write streams `open` event. + +Let's imagine the above was published to npm with the module name `some-file-transport`. + +The `options.destination` value can be set when creating the transport stream with `pino.transport` like so: + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'some-file-transport', + options: { destination: '/dev/null' } +}) +pino(transport) +``` + +Note here we've specified a module by package rather than by relative path. The options object we provide +is serialized and injected into the transport worker thread, then passed to the module's exported function. +This means that the options object can only contain types that are supported by the +[Structured Clone Algorithm][sca] which is used to (de)serialize objects between threads. + +What if we wanted to use both transports, but send only error logs to `my-transport.mjs` while +sending all logs to `some-file-transport`? We can use the `pino.transport` function's `level` option: + +```js +const pino = require('pino') +const transport = pino.transport({ + targets: [ + { target: '/absolute/path/to/my-transport.mjs', level: 'error' }, + { target: 'some-file-transport', options: { destination: '/dev/null' }} + ] +}) +pino(transport) +``` + +If we're using custom levels, they should be passed in when using more than one transport. +```js +const pino = require('pino') +const transport = pino.transport({ + targets: [ + { target: '/absolute/path/to/my-transport.mjs', level: 'error' }, + { target: 'some-file-transport', options: { destination: '/dev/null' } + ], + levels: { foo: 35 } +}) +pino(transport) +``` + +It is also possible to use the `dedupe` option to send logs only to the stream with the higher level. +```js +const pino = require('pino') +const transport = pino.transport({ + targets: [ + { target: '/absolute/path/to/my-transport.mjs', level: 'error' }, + { target: 'some-file-transport', options: { destination: '/dev/null' } + ], + dedupe: true +}) +pino(transport) +``` + +To make pino log synchronously, pass `sync: true` to transport options. +```js +const pino = require('pino') +const transport = pino.transport({ + targets: [ + { target: '/absolute/path/to/my-transport.mjs', level: 'error' }, + ], + dedupe: true, + sync: true, +}); +pino(transport); +``` + +For more details on `pino.transport` see the [API docs for `pino.transport`][pino-transport]. + +[pino-transport]: /docs/api.md#pino-transport +[sca]: https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm + + +### Writing a Transport + +The module [pino-abstract-transport](https://github.com/pinojs/pino-abstract-transport) provides +a simple utility to parse each line. Its usage is highly recommended. + +You can see an example using an async iterator with ESM: + +```js +import build from 'pino-abstract-transport' +import SonicBoom from 'sonic-boom' +import { once } from 'events' + +export default async function (opts) { + // SonicBoom is necessary to avoid loops with the main thread. + // It is the same of pino.destination(). + const destination = new SonicBoom({ dest: opts.destination || 1, sync: false }) + await once(destination, 'ready') + + return build(async function (source) { + for await (let obj of source) { + const toDrain = !destination.write(obj.msg.toUpperCase() + '\n') + // This block will handle backpressure + if (toDrain) { + await once(destination, 'drain') + } + } + }, { + async close (err) { + destination.end() + await once(destination, 'close') + } + }) +} +``` + +or using Node.js streams and CommonJS: + +```js +'use strict' + +const build = require('pino-abstract-transport') +const SonicBoom = require('sonic-boom') + +module.exports = function (opts) { + const destination = new SonicBoom({ dest: opts.destination || 1, sync: false }) + return build(function (source) { + source.pipe(destination) + }, { + close (err, cb) { + destination.end() + destination.on('close', cb.bind(null, err)) + } + }) +} +``` + +(It is possible to use the async iterators with CommonJS and streams with ESM.) + +To consume async iterators in batches, consider using the [hwp](https://github.com/mcollina/hwp) library. + +The `close()` function is needed to make sure that the stream is closed and flushed when its +callback is called or the returned promise resolves. Otherwise, log lines will be lost. + +### Writing to a custom transport & stdout + +In case you want to both use a custom transport, and output the log entries with default processing to STDOUT, you can use 'pino/file' transport configured with `destination: 1`: + +```js + const transports = [ + { + target: 'pino/file', + options: { destination: 1 } // this writes to STDOUT + }, + { + target: 'my-custom-transport', + options: { someParameter: true } + } + ] + + const logger = pino(pino.transport({ targets: transports })) +``` + +### Creating a transport pipeline + +As an example, the following transport returns a `Transform` stream: + +```js +import build from 'pino-abstract-transport' +import { pipeline, Transform } from 'node:stream' +export default async function (options) { + return build(function (source) { + const myTransportStream = new Transform({ + // Make sure autoDestroy is set, + // this is needed in Node v12 or when using the + // readable-stream module. + autoDestroy: true, + + objectMode: true, + transform (chunk, enc, cb) { + + // modifies the payload somehow + chunk.service = 'pino' + + // stringify the payload again + this.push(`${JSON.stringify(chunk)}\n`) + cb() + } + }) + pipeline(source, myTransportStream, () => {}) + return myTransportStream + }, { + // This is needed to be able to pipeline transports. + enablePipelining: true + }) +} +``` + +Then you can pipeline them with: + +```js +import pino from 'pino' + +const logger = pino({ + transport: { + pipeline: [{ + target: './my-transform.js' + }, { + // Use target: 'pino/file' with STDOUT descriptor 1 to write + // logs without any change. + target: 'pino/file', + options: { destination: 1 } + }] + } +}) + +logger.info('hello world') +``` + +__NOTE: there is no "default" destination for a pipeline but +a terminating target, i.e. a `Writable` stream.__ + +### TypeScript compatibility + +Pino provides basic support for transports written in TypeScript. + +Ideally, they should be transpiled to ensure maximum compatibility, but sometimes +you might want to use tools such as TS-Node, to execute your TypeScript +code without having to go through an explicit transpilation step. + +You can use your TypeScript code without explicit transpilation, but there are +some known caveats: +- For "pure" TypeScript code, ES imports are still not supported (ES imports are + supported once the code is transpiled). +- Only TS-Node is supported for now, there's no TSM support. +- Running transports TypeScript code on TS-Node seems to be problematic on + Windows systems, there's no official support for that yet. + +### Notable transports + +#### `pino/file` + +The `pino/file` transport routes logs to a file (or file descriptor). + +The `options.destination` property may be set to specify the desired file destination. + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'pino/file', + options: { destination: '/path/to/file' } +}) +pino(transport) +``` + +By default, the `pino/file` transport assumes the directory of the destination file exists. If it does not exist, the transport will throw an error when it attempts to open the file for writing. The `mkdir` option may be set to `true` to configure the transport to create the directory, if it does not exist, before opening the file for writing. + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'pino/file', + options: { destination: '/path/to/file', mkdir: true } +}) +pino(transport) +``` + +By default, the `pino/file` transport appends to the destination file if it exists. The `append` option may be set to `false` to configure the transport to truncate the file upon opening it for writing. + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'pino/file', + options: { destination: '/path/to/file', append: false } +}) +pino(transport) +``` + +The `options.destination` property may also be a number to represent a file descriptor. Typically this would be `1` to write to STDOUT or `2` to write to STDERR. If `options.destination` is not set, it defaults to `1` which means logs will be written to STDOUT. If `options.destination` is a string integer, e.g. `'1'`, it will be coerced to a number and used as a file descriptor. If this is not desired, provide a full path, e.g. `/tmp/1`. + +The difference between using the `pino/file` transport builtin and using `pino.destination` is that `pino.destination` runs in the main thread, whereas `pino/file` sets up `pino.destination` in a worker thread. + +#### `pino-pretty` + +The [`pino-pretty`][pino-pretty] transport prettifies logs. + +By default the `pino-pretty` builtin logs to STDOUT. + +The `options.destination` property may be set to log pretty logs to a file descriptor or file. The following would send the prettified logs to STDERR: + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'pino-pretty', + options: { destination: 1 } // use 2 for stderr +}) +pino(transport) +``` + +### Asynchronous startup + +The new transports boot asynchronously and calling `process.exit()` before the transport +starts will cause logs to not be delivered. + +```js +const pino = require('pino') +const transport = pino.transport({ + targets: [ + { target: '/absolute/path/to/my-transport.mjs', level: 'error' }, + { target: 'some-file-transport', options: { destination: '/dev/null' } } + ] +}) +const logger = pino(transport) + +logger.info('hello') + +// If logs are printed before the transport is ready when process.exit(0) is called, +// they will be lost. +transport.on('ready', function () { + process.exit(0) +}) +``` + +## Legacy Transports + +A legacy Pino "transport" is a supplementary tool that consumes Pino logs. + +Consider the following example for creating a transport: + +```js +const { pipeline, Writable } = require('node:stream') +const split = require('split2') + +const myTransportStream = new Writable({ + write (chunk, enc, cb) { + // apply a transform and send to STDOUT + console.log(chunk.toString().toUpperCase()) + cb() + } +}) + +pipeline(process.stdin, split(JSON.parse), myTransportStream) +``` + +The above defines our "transport" as the file `my-transport-process.js`. + +Logs can now be consumed using shell piping: + +```sh +node my-app-which-logs-stuff-to-stdout.js | node my-transport-process.js +``` + +Ideally, a transport should consume logs in a separate process to the application, +Using transports in the same process causes unnecessary load and slows down +Node's single-threaded event loop. + +## Known Transports + +PRs to this document are welcome for any new transports! + +### Pino v7+ Compatible + ++ [@axiomhq/pino](#@axiomhq/pino) ++ [@logtail/pino](#@logtail/pino) ++ [@macfja/pino-fingers-crossed](#macfja-pino-fingers-crossed) ++ [@openobserve/pino-openobserve](#pino-openobserve) ++ [pino-airbrake-transport](#pino-airbrake-transport) ++ [pino-axiom](#pino-axiom) ++ [pino-datadog-transport](#pino-datadog-transport) ++ [pino-discord-webhook](#pino-discord-webhook) ++ [pino-elasticsearch](#pino-elasticsearch) ++ [pino-hana](#pino-hana) ++ [pino-logflare](#pino-logflare) ++ [pino-logfmt](#pino-logfmt) ++ [pino-loki](#pino-loki) ++ [pino-opentelemetry-transport](#pino-opentelemetry-transport) ++ [pino-pretty](#pino-pretty) ++ [pino-roll](#pino-roll) ++ [pino-seq-transport](#pino-seq-transport) ++ [pino-sentry-transport](#pino-sentry-transport) ++ [pino-slack-webhook](#pino-slack-webhook) ++ [pino-telegram-webhook](#pino-telegram-webhook) ++ [pino-yc-transport](#pino-yc-transport) + +### Legacy + ++ [pino-applicationinsights](#pino-applicationinsights) ++ [pino-azuretable](#pino-azuretable) ++ [pino-cloudwatch](#pino-cloudwatch) ++ [pino-couch](#pino-couch) ++ [pino-datadog](#pino-datadog) ++ [pino-gelf](#pino-gelf) ++ [pino-http-send](#pino-http-send) ++ [pino-kafka](#pino-kafka) ++ [pino-logdna](#pino-logdna) ++ [pino-loki](#pino-loki) ++ [pino-mq](#pino-mq) ++ [pino-mysql](#pino-mysql) ++ [pino-papertrail](#pino-papertrail) ++ [pino-pg](#pino-pg) ++ [pino-redis](#pino-redis) ++ [pino-sentry](#pino-sentry) ++ [pino-seq](#pino-seq) ++ [pino-socket](#pino-socket) ++ [pino-stackdriver](#pino-stackdriver) ++ [pino-syslog](#pino-syslog) ++ [pino-websocket](#pino-websocket) + + + +### @axiomhq/pino + +[@axiomhq/pino](https://www.npmjs.com/package/@axiomhq/pino) is the official [Axiom](https://axiom.co/) transport for Pino, using [axiom-js](https://github.com/axiomhq/axiom-js). + +```javascript +import pino from 'pino'; + +const logger = pino( + { level: 'info' }, + pino.transport({ + target: '@axiomhq/pino', + options: { + dataset: process.env.AXIOM_DATASET, + token: process.env.AXIOM_TOKEN, + }, + }), +); +``` + +then you can use the logger as usual: + +```js +logger.info('Hello from pino!'); +``` + +For further examples, head over to the [examples](https://github.com/axiomhq/axiom-js/tree/main/examples/pino) directory. + + +### @logtail/pino + +The [@logtail/pino](https://www.npmjs.com/package/@logtail/pino) NPM package is a transport that forwards logs to [Logtail](https://logtail.com) by [Better Stack](https://betterstack.com). + +[Quick start guide ⇗](https://betterstack.com/docs/logs/javascript/pino) + + +### @macfja/pino-fingers-crossed + +[@macfja/pino-fingers-crossed](https://github.com/MacFJA/js-pino-fingers-crossed) is a Pino v7+ transport that holds logs until a log level is reached, allowing to only have logs when it matters. + +```js +const pino = require('pino'); +const { default: fingersCrossed, enable } = require('@macfja/pino-fingers-crossed') + +const logger = pino(fingersCrossed()); + +logger.info('Will appear immedialty') +logger.error('Will appear immedialty') + +logger.setBindings({ [enable]: 50 }) +logger.info('Will NOT appear immedialty') +logger.info('Will NOT appear immedialty') +logger.error('Will appear immedialty as well as the 2 previous messages') // error log are level 50 +logger.info('Will NOT appear') +logger.info({ [enable]: false }, 'Will appear immedialty') +logger.info('Will NOT appear') +``` + +### @openobserve/pino-openobserve + +[@openobserve/pino-openobserve](https://github.com/openobserve/pino-openobserve) is a +Pino v7+ transport that will send logs to an +[OpenObserve](https://openobserve.ai) instance. + +``` +const pino = require('pino'); +const OpenobserveTransport = require('@openobserve/pino-openobserve'); + +const logger = pino({ + level: 'info', + transport: { + target: OpenobserveTransport, + options: { + url: 'https://your-openobserve-server.com', + organization: 'your-organization', + streamName: 'your-stream', + auth: { + username: 'your-username', + password: 'your-password', + }, + }, + }, +}); +``` + +For full documentation check the [README](https://github.com/openobserve/pino-openobserve). + + +### pino-airbrake-transport + +[pino-airbrake-transport][pino-airbrake-transport] is a Pino v7+ compatible transport to forward log events to [Airbrake][Airbrake] +from a dedicated worker: + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'pino-airbrake-transport', + options: { + airbrake: { + projectId: 1, + projectKey: "REPLACE_ME", + environment: "production", + // additional options for airbrake + performanceStats: false, + }, + }, + level: "error", // minimum log level that should be sent to airbrake +}) +pino(transport) +``` + +[pino-airbrake-transport]: https://github.com/enricodeleo/pino-airbrake-transport +[Airbrake]: https://airbrake.io/ + + +### pino-applicationinsights +The [pino-applicationinsights](https://www.npmjs.com/package/pino-applicationinsights) module is a transport that will forward logs to [Azure Application Insights](https://docs.microsoft.com/en-us/azure/azure-monitor/app/app-insights-overview). + +Given an application `foo` that logs via pino, you would use `pino-applicationinsights` like so: + +``` sh +$ node foo | pino-applicationinsights --key blablabla +``` + +For full documentation of command line switches read [README](https://github.com/ovhemert/pino-applicationinsights#readme) + + +### pino-axiom + +[pino-axiom](https://www.npmjs.com/package/pino-axiom) is a transport that will forward logs to [Axiom](https://axiom.co). + +```javascript +const pino = require('pino') +const transport = pino.transport({ + target: 'pino-axiom', + options: { + orgId: 'YOUR-ORG-ID', + token: 'YOUR-TOKEN', + dataset: 'YOUR-DATASET', + }, +}) +pino(transport) +``` + + +### pino-azuretable +The [pino-azuretable](https://www.npmjs.com/package/pino-azuretable) module is a transport that will forward logs to the [Azure Table Storage](https://azure.microsoft.com/en-us/services/storage/tables/). + +Given an application `foo` that logs via pino, you would use `pino-azuretable` like so: + +``` sh +$ node foo | pino-azuretable --account storageaccount --key blablabla +``` + +For full documentation of command line switches read [README](https://github.com/ovhemert/pino-azuretable#readme) + + +### pino-cloudwatch + +[pino-cloudwatch][pino-cloudwatch] is a transport that buffers and forwards logs to [Amazon CloudWatch][]. + +```sh +$ node app.js | pino-cloudwatch --group my-log-group +``` + +[pino-cloudwatch]: https://github.com/dbhowell/pino-cloudwatch +[Amazon CloudWatch]: https://aws.amazon.com/cloudwatch/ + + +### pino-couch + +[pino-couch][pino-couch] uploads each log line as a [CouchDB][CouchDB] document. + +```sh +$ node app.js | pino-couch -U https://couch-server -d mylogs +``` + +[pino-couch]: https://github.com/IBM/pino-couch +[CouchDB]: https://couchdb.apache.org + + +### pino-datadog +The [pino-datadog](https://www.npmjs.com/package/pino-datadog) module is a transport that will forward logs to [DataDog](https://www.datadoghq.com/) through its API. + +Given an application `foo` that logs via pino, you would use `pino-datadog` like so: + +``` sh +$ node foo | pino-datadog --key blablabla +``` + +For full documentation of command line switches read [README](https://github.com/ovhemert/pino-datadog#readme) + + +### pino-datadog-transport + +[pino-datadog-transport][pino-datadog-transport] is a Pino v7+ compatible transport to forward log events to [Datadog][Datadog] +from a dedicated worker: + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'pino-datadog-transport', + options: { + ddClientConf: { + authMethods: { + apiKeyAuth: + } + }, + }, + level: "error", // minimum log level that should be sent to datadog +}) +pino(transport) +``` + +[pino-datadog-transport]: https://github.com/theogravity/datadog-transports +[Datadog]: https://www.datadoghq.com/ + +#### Logstash + +The [pino-socket][pino-socket] module can also be used to upload logs to +[Logstash][logstash] via: + +``` +$ node app.js | pino-socket -a 127.0.0.1 -p 5000 -m tcp +``` + +Assuming logstash is running on the same host and configured as +follows: + +``` +input { + tcp { + port => 5000 + } +} + +filter { + json { + source => "message" + } +} + +output { + elasticsearch { + hosts => "127.0.0.1:9200" + } +} +``` + +See to learn +how to setup [Kibana][kibana]. + +For Docker users, see +https://github.com/deviantony/docker-elk to setup an ELK stack. + + +### pino-discord-webhook + +[pino-discord-webhook](https://github.com/fabulousgk/pino-discord-webhook) is a Pino v7+ compatible transport to forward log events to a [Discord](http://discord.com) webhook from a dedicated worker. + +```js +import pino from 'pino' + +const logger = pino({ + transport: { + target: 'pino-discord-webhook', + options: { + webhookUrl: 'https://discord.com/api/webhooks/xxxx/xxxx', + } + } +}) +``` + + +### pino-elasticsearch + +[pino-elasticsearch][pino-elasticsearch] uploads the log lines in bulk +to [Elasticsearch][elasticsearch], to be displayed in [Kibana][kibana]. + +It is extremely simple to use and setup + +```sh +$ node app.js | pino-elasticsearch +``` + +Assuming Elasticsearch is running on localhost. + +To connect to an external Elasticsearch instance (recommended for production): + +* Check that `network.host` is defined in the `elasticsearch.yml` configuration file. See [Elasticsearch Network Settings documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-network.html#common-network-settings) for more details. +* Launch: + +```sh +$ node app.js | pino-elasticsearch --node http://192.168.1.42:9200 +``` + +Assuming Elasticsearch is running on `192.168.1.42`. + +To connect to AWS Elasticsearch: + +```sh +$ node app.js | pino-elasticsearch --node https://es-url.us-east-1.es.amazonaws.com --es-version 6 +``` + +Then [create an index pattern](https://www.elastic.co/guide/en/kibana/current/setup.html) on `'pino'` (the default index key for `pino-elasticsearch`) on the Kibana instance. + +[pino-elasticsearch]: https://github.com/pinojs/pino-elasticsearch +[elasticsearch]: https://www.elastic.co/products/elasticsearch +[kibana]: https://www.elastic.co/products/kibana + + +### pino-gelf + +Pino GELF ([pino-gelf]) is a transport for the Pino logger. Pino GELF receives Pino logs from stdin and transforms them into [GELF format][gelf] before sending them to a remote [Graylog server][graylog] via UDP. + +```sh +$ node your-app.js | pino-gelf log +``` + +[pino-gelf]: https://github.com/pinojs/pino-gelf +[gelf]: https://docs.graylog.org/en/2.1/pages/gelf.html +[graylog]: https://www.graylog.org/ + + +### pino-hana +[pino-hana](https://github.com/HiImGiovi/pino-hana) is a Pino v7+ transport that save pino logs to a SAP HANA database. +```js +const pino = require('pino') +const logger = pino({ + transport: { + target: 'pino-hana', + options: { + connectionOptions: { + host: , + port: , + user: , + password: , + }, + schema: , + table: , + }, + }, +}) + +logger.info('hi') // this log will be saved into SAP HANA +``` +For more detailed information about its usage please check the official [documentation](https://github.com/HiImGiovi/pino-hana#readme). + + +### pino-http-send + +[pino-http-send](https://npmjs.com/package/pino-http-send) is a configurable and low overhead +transport that will batch logs and send to a specified URL. + +```console +$ node app.js | pino-http-send -u http://localhost:8080/logs +``` + + +### pino-kafka + +[pino-kafka](https://github.com/ayZagen/pino-kafka) transport to send logs to [Apache Kafka](https://kafka.apache.org/). + +```sh +$ node index.js | pino-kafka -b 10.10.10.5:9200 -d mytopic +``` + + +### pino-logdna + +[pino-logdna](https://github.com/logdna/pino-logdna) transport to send logs to [LogDNA](https://logdna.com). + +```sh +$ node index.js | pino-logdna --key YOUR_INGESTION_KEY +``` + +Tags and other metadata can be included using the available command line options. See the [pino-logdna README](https://github.com/logdna/pino-logdna#options) for a full list. + + +### pino-logflare + +[pino-logflare](https://github.com/Logflare/pino-logflare) transport to send logs to a [Logflare](https://logflare.app) `source`. + +```sh +$ node index.js | pino-logflare --key YOUR_KEY --source YOUR_SOURCE +``` + + +### pino-logfmt + +[pino-logfmt](https://github.com/botflux/pino-logfmt) is a Pino v7+ transport that formats logs into [logfmt](https://brandur.org/logfmt). This transport can output the formatted logs to stdout or file. + +```js +import pino from 'pino' + +const logger = pino({ + transport: { + target: 'pino-logfmt' + } +}) +``` + + +### pino-loki +pino-loki is a transport that will forwards logs into [Grafana Loki](https://grafana.com/oss/loki/). +Can be used in CLI version in a separate process or in a dedicated worker: + +CLI : +```console +node app.js | pino-loki --hostname localhost:3100 --labels='{ "application": "my-application"}' --user my-username --password my-password +``` + +Worker : +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'pino-loki', + options: { host: 'localhost:3100' } +}) +pino(transport) +``` + +For full documentation and configuration, see the [README](https://github.com/Julien-R44/pino-loki). + + +### pino-mq + +The `pino-mq` transport will take all messages received on `process.stdin` and send them over a message bus using JSON serialization. + +This is useful for: + +* moving backpressure from application to broker +* transforming messages pressure to another component + +``` +node app.js | pino-mq -u "amqp://guest:guest@localhost/" -q "pino-logs" +``` + +Alternatively, a configuration file can be used: + +``` +node app.js | pino-mq -c pino-mq.json +``` + +A base configuration file can be initialized with: + +``` +pino-mq -g +``` + +For full documentation of command line switches and configuration see [the `pino-mq` README](https://github.com/itavy/pino-mq#readme) + + +### pino-mysql + +[pino-mysql][pino-mysql] loads pino logs into [MySQL][MySQL] and [MariaDB][MariaDB]. + +```sh +$ node app.js | pino-mysql -c db-configuration.json +``` + +`pino-mysql` can extract and save log fields into corresponding database fields +and/or save the entire log stream as a [JSON Data Type][JSONDT]. + +For full documentation and command line switches read the [README][pino-mysql]. + +[pino-mysql]: https://www.npmjs.com/package/pino-mysql +[MySQL]: https://www.mysql.com/ +[MariaDB]: https://mariadb.org/ +[JSONDT]: https://dev.mysql.com/doc/refman/8.0/en/json.html + + +### pino-opentelemetry-transport + +[pino-opentelemetry-transport](https://www.npmjs.com/package/pino-opentelemetry-transport) is a transport that will forward logs to an [OpenTelemetry log collector](https://opentelemetry.io/docs/collector/) using [OpenTelemetry JS instrumentation](https://opentelemetry.io/docs/instrumentation/js/). + +```javascript +const pino = require('pino') + +const transport = pino.transport({ + target: 'pino-opentelemetry-transport', + options: { + resourceAttributes: { + 'service.name': 'test-service', + 'service.version': '1.0.0' + } + } +}) + +pino(transport) +``` + +Documentation on running a minimal example is available in the [README](https://github.com/Vunovati/pino-opentelemetry-transport#minimalistic-example). + + +### pino-papertrail +pino-papertrail is a transport that will forward logs to the [papertrail](https://papertrailapp.com) log service through an UDPv4 socket. + +Given an application `foo` that logs via pino, and a papertrail destination that collects logs on port UDP `12345` on address `bar.papertrailapp.com`, you would use `pino-papertrail` +like so: + +``` +node yourapp.js | pino-papertrail --host bar.papertrailapp.com --port 12345 --appname foo +``` + + +for full documentation of command line switches read [README](https://github.com/ovhemert/pino-papertrail#readme) + + +### pino-pg +[pino-pg](https://www.npmjs.com/package/pino-pg) stores logs into PostgreSQL. +Full documentation in the [README](https://github.com/Xstoudi/pino-pg). + + +### pino-redis + +[pino-redis][pino-redis] loads pino logs into [Redis][Redis]. + +```sh +$ node app.js | pino-redis -U redis://username:password@localhost:6379 +``` + +[pino-redis]: https://github.com/buianhthang/pino-redis +[Redis]: https://redis.io/ + + +### pino-roll + +`pino-roll` is a Pino transport that automatically rolls your log files based on size or time frequency. + +```js +import { join } from 'path'; +import pino from 'pino'; + +const transport = pino.transport({ + target: 'pino-roll', + options: { file: join('logs', 'log'), frequency: 'daily', mkdir: true } +}); + +const logger = pino(transport); +``` + +then you can use the logger as usual: + +```js +logger.info('Hello from pino-roll!'); +``` +For full documentation check the [README](https://github.com/mcollina/pino-roll?tab=readme-ov-file#pino-roll). + + +### pino-sentry + +[pino-sentry][pino-sentry] loads pino logs into [Sentry][Sentry]. + +```sh +$ node app.js | pino-sentry --dsn=https://******@sentry.io/12345 +``` + +For full documentation of command line switches see the [pino-sentry README](https://github.com/aandrewww/pino-sentry/blob/master/README.md). + +[pino-sentry]: https://www.npmjs.com/package/pino-sentry +[Sentry]: https://sentry.io/ + + +### pino-sentry-transport + +[pino-sentry-transport][pino-sentry-transport] is a Pino v7+ compatible transport to forward log events to [Sentry][Sentry] +from a dedicated worker: + +```js +const pino = require('pino') +const transport = pino.transport({ + target: 'pino-sentry-transport', + options: { + sentry: { + dsn: 'https://******@sentry.io/12345', + } + } +}) +pino(transport) +``` + +[pino-sentry-transport]: https://github.com/tomer-yechiel/pino-sentry-transport +[Sentry]: https://sentry.io/ + + +### pino-seq + +[pino-seq][pino-seq] supports both out-of-process and in-process log forwarding to [Seq][Seq]. + +```sh +$ node app.js | pino-seq --serverUrl http://localhost:5341 --apiKey 1234567890 --property applicationName=MyNodeApp +``` + +[pino-seq]: https://www.npmjs.com/package/pino-seq +[Seq]: https://datalust.co/seq + + +### pino-seq-transport + +[pino-seq-transport][pino-seq-transport] is a Pino v7+ compatible transport to forward log events to [Seq][Seq] +from a dedicated worker: + +```js +const pino = require('pino') +const transport = pino.transport({ + target: '@autotelic/pino-seq-transport', + options: { serverUrl: 'http://localhost:5341' } +}) +pino(transport) +``` + +[pino-seq-transport]: https://github.com/autotelic/pino-seq-transport +[Seq]: https://datalust.co/seq + + +### pino-slack-webhook + +[pino-slack-webhook][pino-slack-webhook] is a Pino v7+ compatible transport to forward log events to [Slack][Slack] +from a dedicated worker: + +```js +const pino = require('pino') +const transport = pino.transport({ + target: '@youngkiu/pino-slack-webhook', + options: { + webhookUrl: 'https://hooks.slack.com/services/xxx/xxx/xxx', + channel: '#pino-log', + username: 'webhookbot', + icon_emoji: ':ghost:' + } +}) +pino(transport) +``` + +[pino-slack-webhook]: https://github.com/youngkiu/pino-slack-webhook +[Slack]: https://slack.com/ + +[pino-pretty]: https://github.com/pinojs/pino-pretty + +For full documentation of command line switches read the [README](https://github.com/abeai/pino-websocket#readme). + + +### pino-socket + +[pino-socket][pino-socket] is a transport that will forward logs to an IPv4 +UDP or TCP socket. + +As an example, use `socat` to fake a listener: + +```sh +$ socat -v udp4-recvfrom:6000,fork exec:'/bin/cat' +``` + +Then run an application that uses `pino` for logging: + +```sh +$ node app.js | pino-socket -p 6000 +``` + +Logs from the application should be observed on both consoles. + +[pino-socket]: https://www.npmjs.com/package/pino-socket + + +### pino-stackdriver +The [pino-stackdriver](https://www.npmjs.com/package/pino-stackdriver) module is a transport that will forward logs to the [Google Stackdriver](https://cloud.google.com/logging/) log service through its API. + +Given an application `foo` that logs via pino, a stackdriver log project `bar`, and credentials in the file `/credentials.json`, you would use `pino-stackdriver` +like so: + +``` sh +$ node foo | pino-stackdriver --project bar --credentials /credentials.json +``` + +For full documentation of command line switches read [README](https://github.com/ovhemert/pino-stackdriver#readme) + + +### pino-syslog + +[pino-syslog][pino-syslog] is a transforming transport that converts +`pino` NDJSON logs to [RFC3164][rfc3164] compatible log messages. The `pino-syslog` module does not +forward the logs anywhere, it merely re-writes the messages to `stdout`. But +when used in combination with `pino-socket` the log messages can be relayed to a syslog server: + +```sh +$ node app.js | pino-syslog | pino-socket -a syslog.example.com +``` + +Example output for the "hello world" log: + +``` +<134>Apr 1 16:44:58 MacBook-Pro-3 none[94473]: {"pid":94473,"hostname":"MacBook-Pro-3","level":30,"msg":"hello world","time":1459529098958} +``` + +[pino-syslog]: https://www.npmjs.com/package/pino-syslog +[rfc3164]: https://tools.ietf.org/html/rfc3164 +[logstash]: https://www.elastic.co/products/logstash + + +### pino-telegram-webhook + +[pino-telegram-webhook](https://github.com/Jhon-Mosk/pino-telegram-webhook) is a Pino v7+ transport for sending messages to [Telegram](https://telegram.org/). + +```js +const pino = require('pino'); + +const logger = pino({ + transport: { + target: 'pino-telegram-webhook', + level: 'error', + options: { + chatId: -1234567890, + botToken: "123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11", + extra: { + parse_mode: "HTML", + }, + }, + }, +}) + +logger.error('test log!'); +``` + +The `extra` parameter is optional. Parameters that the method [`sendMessage`](https://core.telegram.org/bots/api#sendmessage) supports can be passed to it. + + +### pino-websocket + +[pino-websocket](https://www.npmjs.com/package/@abeai/pino-websocket) is a transport that will forward each log line to a websocket server. + +```sh +$ node app.js | pino-websocket -a my-websocket-server.example.com -p 3004 +``` + +For full documentation of command line switches read the [README](https://github.com/abeai/pino-websocket#readme). + + +### pino-yc-transport + +[pino-yc-transport](https://github.com/Jhon-Mosk/pino-yc-transport) is a Pino v7+ transport for writing to [Yandex Cloud Logging](https://yandex.cloud/ru/services/logging) from serveless functions or containers. + +```js +const pino = require("pino"); + +const config = { + level: "debug", + transport: { + target: "pino-yc-transport", + }, +}; + +const logger = pino(config); + +logger.debug("some message") +logger.debug({ foo: "bar" }); +logger.debug("some message %o, %s", { foo: "bar" }, "baz"); +logger.info("info"); +logger.warn("warn"); +logger.error("error"); +logger.error(new Error("error")); +logger.fatal("fatal"); +``` + + +## Communication between Pino and Transports +Here we discuss some technical details of how Pino communicates with its [worker threads](https://nodejs.org/api/worker_threads.html). + +Pino uses [`thread-stream`](https://github.com/pinojs/thread-stream) to create a stream for transports. +When we create a stream with `thread-stream`, `thread-stream` spawns a [worker](https://github.com/pinojs/thread-stream/blob/f19ac8dbd602837d2851e17fbc7dfc5bbc51083f/index.js#L50-L60) (an independent JavaScript execution thread). + +### Error messages +How are error messages propagated from a transport worker to Pino? + +Let's assume we have a transport with an error listener: +```js +// index.js +const transport = pino.transport({ + target: './transport.js' +}) + +transport.on('error', err => { + console.error('error caught', err) +}) + +const log = pino(transport) +``` + +When our worker emits an error event, the worker has listeners for it: [error](https://github.com/pinojs/thread-stream/blob/f19ac8dbd602837d2851e17fbc7dfc5bbc51083f/lib/worker.js#L59-L70) and [unhandledRejection](https://github.com/pinojs/thread-stream/blob/f19ac8dbd602837d2851e17fbc7dfc5bbc51083f/lib/worker.js#L135-L141). These listeners send the error message to the main thread where Pino is present. + +When Pino receives the error message, it further [emits](https://github.com/pinojs/thread-stream/blob/f19ac8dbd602837d2851e17fbc7dfc5bbc51083f/index.js#L349) the error message. Finally, the error message arrives at our `index.js` and is caught by our error listener. diff --git a/node_modules/pino/docs/web.md b/node_modules/pino/docs/web.md new file mode 100644 index 0000000..45de8ad --- /dev/null +++ b/node_modules/pino/docs/web.md @@ -0,0 +1,309 @@ +# Web Frameworks + +Since HTTP logging is a primary use case, Pino has first-class support for the Node.js +web framework ecosystem. + +- [Web Frameworks](#web-frameworks) + - [Pino with Fastify](#pino-with-fastify) + - [Pino with Express](#pino-with-express) + - [Pino with Hapi](#pino-with-hapi) + - [Pino with Restify](#pino-with-restify) + - [Pino with Koa](#pino-with-koa) + - [Pino with Node core `http`](#pino-with-node-core-http) + - [Pino with Nest](#pino-with-nest) + - [Pino with H3](#pino-with-h3) + - [Pino with Hono](#pino-with-hono) + + +## Pino with Fastify + +The Fastify web framework comes bundled with Pino by default, simply set Fastify's +`logger` option to `true` and use `request.log` or `reply.log` for log messages that correspond +to each request: + +```js +const fastify = require('fastify')({ + logger: true +}) + +fastify.get('/', async (request, reply) => { + request.log.info('something') + return { hello: 'world' } +}) + +fastify.listen({ port: 3000 }, (err) => { + if (err) { + fastify.log.error(err) + process.exit(1) + } +}) +``` + +The `logger` option can also be set to an object, which will be passed through directly +as the [`pino` options object](/docs/api.md#options-object). + +See the [fastify documentation](https://www.fastify.io/docs/latest/Reference/Logging/) for more information. + + +## Pino with Express + +```sh +npm install pino-http +``` + +```js +const app = require('express')() +const pino = require('pino-http')() + +app.use(pino) + +app.get('/', function (req, res) { + req.log.info('something') + res.send('hello world') +}) + +app.listen(3000) +``` + +See the [pino-http README](https://npm.im/pino-http) for more info. + + +## Pino with Hapi + +```sh +npm install hapi-pino +``` + +```js +'use strict' + +const Hapi = require('@hapi/hapi') +const Pino = require('hapi-pino'); + +async function start () { + // Create a server with a host and port + const server = Hapi.server({ + host: 'localhost', + port: 3000 + }) + + // Add the route + server.route({ + method: 'GET', + path: '/', + handler: async function (request, h) { + // request.log is HAPI's standard way of logging + request.log(['a', 'b'], 'Request into hello world') + + // a pino instance can also be used, which will be faster + request.logger.info('In handler %s', request.path) + + return 'hello world' + } + }) + + await server.register(Pino) + + // also as a decorated API + server.logger.info('another way for accessing it') + + // and through Hapi standard logging system + server.log(['subsystem'], 'third way for accessing it') + + await server.start() + + return server +} + +start().catch((err) => { + console.log(err) + process.exit(1) +}) +``` + +See the [hapi-pino README](https://npm.im/hapi-pino) for more info. + + +## Pino with Restify + +```sh +npm install restify-pino-logger +``` + +```js +const server = require('restify').createServer({name: 'server'}) +const pino = require('restify-pino-logger')() + +server.use(pino) + +server.get('/', function (req, res) { + req.log.info('something') + res.send('hello world') +}) + +server.listen(3000) +``` + +See the [restify-pino-logger README](https://npm.im/restify-pino-logger) for more info. + + +## Pino with Koa + +```sh +npm install koa-pino-logger +``` + +```js +const Koa = require('koa') +const app = new Koa() +const pino = require('koa-pino-logger')() + +app.use(pino) + +app.use((ctx) => { + ctx.log.info('something else') + ctx.body = 'hello world' +}) + +app.listen(3000) +``` + +See the [koa-pino-logger README](https://github.com/pinojs/koa-pino-logger) for more info. + + +## Pino with Node core `http` + +```sh +npm install pino-http +``` + +```js +const http = require('http') +const server = http.createServer(handle) +const logger = require('pino-http')() + +function handle (req, res) { + logger(req, res) + req.log.info('something else') + res.end('hello world') +} + +server.listen(3000) +``` + +See the [pino-http README](https://npm.im/pino-http) for more info. + + + +## Pino with Nest + +```sh +npm install nestjs-pino +``` + +```ts +import { NestFactory } from '@nestjs/core' +import { Controller, Get, Module } from '@nestjs/common' +import { LoggerModule, Logger } from 'nestjs-pino' + +@Controller() +export class AppController { + constructor(private readonly logger: Logger) {} + + @Get() + getHello() { + this.logger.log('something') + return `Hello world` + } +} + +@Module({ + controllers: [AppController], + imports: [LoggerModule.forRoot()] +}) +class MyModule {} + +async function bootstrap() { + const app = await NestFactory.create(MyModule) + await app.listen(3000) +} +bootstrap() +``` + +See the [nestjs-pino README](https://npm.im/nestjs-pino) for more info. + + + +## Pino with H3 + +```sh +npm install pino-http h3 +``` + +Save as `server.mjs`: + +```js +import { createApp, createRouter, eventHandler, fromNodeMiddleware } from "h3"; +import pino from 'pino-http' + +export const app = createApp(); + +const router = createRouter(); +app.use(router); +app.use(fromNodeMiddleware(pino())) + +app.use(eventHandler((event) => { + event.node.req.log.info('something') + return 'hello world' +})) + +router.get( + "/", + eventHandler((event) => { + return { path: event.path, message: "Hello World!" }; + }), +); +``` + +Execute `npx --yes listhen -w --open ./server.mjs`. + +See the [pino-http README](https://npm.im/pino-http) for more info. + + + +## Pino with Hono + +```sh +npm install pino pino-http hono +``` + +```js +import { serve } from '@hono/node-server'; +import { Hono } from 'hono'; +import { requestId } from 'hono/request-id'; +import { pinoHttp } from 'pino-http'; + +const app = new Hono(); +app.use(requestId()); +app.use(async (c, next) => { + // pass hono's request-id to pino-http + c.env.incoming.id = c.var.requestId; + + // map express style middleware to hono + await new Promise((resolve) => pinoHttp()(c.env.incoming, c.env.outgoing, () => resolve())); + + c.set('logger', c.env.incoming.log); + + await next(); +}); + +app.get('/', (c) => { + c.var.logger.info('something'); + + return c.text('Hello Node.js!'); +}); + +serve(app); +``` + +See the [pino-http README](https://npm.im/pino-http) for more info. diff --git a/node_modules/pino/eslint.config.js b/node_modules/pino/eslint.config.js new file mode 100644 index 0000000..9bed7d4 --- /dev/null +++ b/node_modules/pino/eslint.config.js @@ -0,0 +1,19 @@ +'use strict' + +const { defineConfig, globalIgnores } = require('eslint/config') +const neostandard = require('neostandard') + +module.exports = defineConfig([ + neostandard({}), + globalIgnores([ + 'pino.d.ts', + 'test/types/pino.test-d.ts', + 'test/fixtures/syntax-error-esm.mjs', + 'test/fixtures/ts/*cjs', + ]), + { + rules: { + 'no-var': 'off', + }, + }, +]) diff --git a/node_modules/pino/examples/basic.js b/node_modules/pino/examples/basic.js new file mode 100644 index 0000000..bab079a --- /dev/null +++ b/node_modules/pino/examples/basic.js @@ -0,0 +1,43 @@ +'use strict' + +// Pino's primary usage writes ndjson to `stdout`: +const pino = require('..')() + +// However, if "human readable" output is desired, +// `pino-pretty` can be provided as the destination +// stream by uncommenting the following line in place +// of the previous declaration: +// const pino = require('..')(require('pino-pretty')()) + +pino.info('hello world') +pino.error('this is at error level') +pino.info('the answer is %d', 42) +pino.info({ obj: 42 }, 'hello world') +pino.info({ obj: 42, b: 2 }, 'hello world') +pino.info({ nested: { obj: 42 } }, 'nested') +setImmediate(() => { + pino.info('after setImmediate') +}) +pino.error(new Error('an error')) + +const child = pino.child({ a: 'property' }) +child.info('hello child!') + +const childsChild = child.child({ another: 'property' }) +childsChild.info('hello baby..') + +pino.debug('this should be mute') + +pino.level = 'trace' + +pino.debug('this is a debug statement') + +pino.child({ another: 'property' }).debug('this is a debug statement via child') +pino.trace('this is a trace statement') + +pino.debug('this is a "debug" statement with "') + +pino.info(new Error('kaboom')) +pino.info(null) + +pino.info(new Error('kaboom'), 'with', 'a', 'message') diff --git a/node_modules/pino/examples/transport.js b/node_modules/pino/examples/transport.js new file mode 100644 index 0000000..7ffab98 --- /dev/null +++ b/node_modules/pino/examples/transport.js @@ -0,0 +1,68 @@ +'use strict' + +const pino = require('..') +const { tmpdir } = require('node:os') +const { join } = require('node:path') + +const file = join(tmpdir(), `pino-${process.pid}-example`) + +const transport = pino.transport({ + targets: [{ + level: 'warn', + target: 'pino/file', + options: { + destination: file + } + /* + }, { + level: 'info', + target: 'pino-elasticsearch', + options: { + node: 'http://localhost:9200' + } + */ + }, { + level: 'info', + target: 'pino-pretty' + }] +}) + +const logger = pino(transport) + +logger.info({ + file +}, 'logging destination') + +logger.info('hello world') +logger.error('this is at error level') +logger.info('the answer is %d', 42) +logger.info({ obj: 42 }, 'hello world') +logger.info({ obj: 42, b: 2 }, 'hello world') +logger.info({ nested: { obj: 42 } }, 'nested') +logger.warn('WARNING!') +setImmediate(() => { + logger.info('after setImmediate') +}) +logger.error(new Error('an error')) + +const child = logger.child({ a: 'property' }) +child.info('hello child!') + +const childsChild = child.child({ another: 'property' }) +childsChild.info('hello baby..') + +logger.debug('this should be mute') + +logger.level = 'trace' + +logger.debug('this is a debug statement') + +logger.child({ another: 'property' }).debug('this is a debug statement via child') +logger.trace('this is a trace statement') + +logger.debug('this is a "debug" statement with "') + +logger.info(new Error('kaboom')) +logger.info(null) + +logger.info(new Error('kaboom'), 'with', 'a', 'message') diff --git a/node_modules/pino/favicon.ico b/node_modules/pino/favicon.ico new file mode 100644 index 0000000..1d7c6ce Binary files /dev/null and b/node_modules/pino/favicon.ico differ diff --git a/node_modules/pino/file.js b/node_modules/pino/file.js new file mode 100644 index 0000000..7e1192b --- /dev/null +++ b/node_modules/pino/file.js @@ -0,0 +1,12 @@ +'use strict' + +const pino = require('./pino') +const { once } = require('node:events') + +module.exports = async function (opts = {}) { + const destOpts = Object.assign({}, opts, { dest: opts.destination || 1, sync: false }) + delete destOpts.destination + const destination = pino.destination(destOpts) + await once(destination, 'ready') + return destination +} diff --git a/node_modules/pino/inc-version.sh b/node_modules/pino/inc-version.sh new file mode 100644 index 0000000..2ad7f52 --- /dev/null +++ b/node_modules/pino/inc-version.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +set -e + +PATH=./node_modules/.bin:${PATH} +CURRENT_VERSION=$(jq -r .version package.json) + +case ${1} in + Major | MAJOR | major) + LEVEL=major + ;; + + Minor | MINOR | minor) + LEVEL=minor + ;; + + Patch | PATCH | patch) + LEVEL=patch + ;; + + *) + LEVEL=patch + ;; +esac + +NEW_VERSION=$(semver -i ${LEVEL} ${CURRENT_VERSION}) +echo "${CURRENT_VERSION} => ${NEW_VERSION}" +read -n 1 -s -r -p "Press any key to continue (ctrl+c to abort)..." +echo "" + +echo "Patching package.json..." +cat package.json | \ + jq --arg vers "${NEW_VERSION}" '.version = $vers' | \ + tee package.json 1>/dev/null + +echo "Patching lib/meta.js ..." +SED_SCRIPT=$(printf 's/%s/%s/' ${CURRENT_VERSION//\./\\.} ${NEW_VERSION//\./\\.}) +cat ./lib/meta.js | \ + sed -e ${SED_SCRIPT} | \ + tee ./lib/meta.js 1>/dev/null + +echo "Done." diff --git a/node_modules/pino/index.html b/node_modules/pino/index.html new file mode 100644 index 0000000..aff1917 --- /dev/null +++ b/node_modules/pino/index.html @@ -0,0 +1,55 @@ + + + + + Pino - Super fast, all natural JSON logger for Node.js + + + + + + + + +
+ + + + + + + + + diff --git a/node_modules/pino/lib/caller.js b/node_modules/pino/lib/caller.js new file mode 100644 index 0000000..f39e087 --- /dev/null +++ b/node_modules/pino/lib/caller.js @@ -0,0 +1,30 @@ +'use strict' + +function noOpPrepareStackTrace (_, stack) { + return stack +} + +module.exports = function getCallers () { + const originalPrepare = Error.prepareStackTrace + Error.prepareStackTrace = noOpPrepareStackTrace + const stack = new Error().stack + Error.prepareStackTrace = originalPrepare + + if (!Array.isArray(stack)) { + return undefined + } + + const entries = stack.slice(2) + + const fileNames = [] + + for (const entry of entries) { + if (!entry) { + continue + } + + fileNames.push(entry.getFileName()) + } + + return fileNames +} diff --git a/node_modules/pino/lib/constants.js b/node_modules/pino/lib/constants.js new file mode 100644 index 0000000..f91f731 --- /dev/null +++ b/node_modules/pino/lib/constants.js @@ -0,0 +1,28 @@ +/** + * Represents default log level values + * + * @enum {number} + */ +const DEFAULT_LEVELS = { + trace: 10, + debug: 20, + info: 30, + warn: 40, + error: 50, + fatal: 60 +} + +/** + * Represents sort order direction: `ascending` or `descending` + * + * @enum {string} + */ +const SORTING_ORDER = { + ASC: 'ASC', + DESC: 'DESC' +} + +module.exports = { + DEFAULT_LEVELS, + SORTING_ORDER +} diff --git a/node_modules/pino/lib/deprecations.js b/node_modules/pino/lib/deprecations.js new file mode 100644 index 0000000..806c536 --- /dev/null +++ b/node_modules/pino/lib/deprecations.js @@ -0,0 +1,8 @@ +'use strict' + +const warning = require('process-warning')() +module.exports = warning + +// const warnName = 'PinoWarning' + +// warning.create(warnName, 'PINODEP010', 'A new deprecation') diff --git a/node_modules/pino/lib/levels.js b/node_modules/pino/lib/levels.js new file mode 100644 index 0000000..67e6a99 --- /dev/null +++ b/node_modules/pino/lib/levels.js @@ -0,0 +1,241 @@ +'use strict' +/* eslint no-prototype-builtins: 0 */ +const { + lsCacheSym, + levelValSym, + useOnlyCustomLevelsSym, + streamSym, + formattersSym, + hooksSym, + levelCompSym +} = require('./symbols') +const { noop, genLog } = require('./tools') +const { DEFAULT_LEVELS, SORTING_ORDER } = require('./constants') + +const levelMethods = { + fatal: (hook) => { + const logFatal = genLog(DEFAULT_LEVELS.fatal, hook) + return function (...args) { + const stream = this[streamSym] + logFatal.call(this, ...args) + if (typeof stream.flushSync === 'function') { + try { + stream.flushSync() + } catch (e) { + // https://github.com/pinojs/pino/pull/740#discussion_r346788313 + } + } + } + }, + error: (hook) => genLog(DEFAULT_LEVELS.error, hook), + warn: (hook) => genLog(DEFAULT_LEVELS.warn, hook), + info: (hook) => genLog(DEFAULT_LEVELS.info, hook), + debug: (hook) => genLog(DEFAULT_LEVELS.debug, hook), + trace: (hook) => genLog(DEFAULT_LEVELS.trace, hook) +} + +const nums = Object.keys(DEFAULT_LEVELS).reduce((o, k) => { + o[DEFAULT_LEVELS[k]] = k + return o +}, {}) + +const initialLsCache = Object.keys(nums).reduce((o, k) => { + o[k] = '{"level":' + Number(k) + return o +}, {}) + +function genLsCache (instance) { + const formatter = instance[formattersSym].level + const { labels } = instance.levels + const cache = {} + for (const label in labels) { + const level = formatter(labels[label], Number(label)) + cache[label] = JSON.stringify(level).slice(0, -1) + } + instance[lsCacheSym] = cache + return instance +} + +function isStandardLevel (level, useOnlyCustomLevels) { + if (useOnlyCustomLevels) { + return false + } + + switch (level) { + case 'fatal': + case 'error': + case 'warn': + case 'info': + case 'debug': + case 'trace': + return true + default: + return false + } +} + +function setLevel (level) { + const { labels, values } = this.levels + if (typeof level === 'number') { + if (labels[level] === undefined) throw Error('unknown level value' + level) + level = labels[level] + } + if (values[level] === undefined) throw Error('unknown level ' + level) + const preLevelVal = this[levelValSym] + const levelVal = this[levelValSym] = values[level] + const useOnlyCustomLevelsVal = this[useOnlyCustomLevelsSym] + const levelComparison = this[levelCompSym] + const hook = this[hooksSym].logMethod + + for (const key in values) { + if (levelComparison(values[key], levelVal) === false) { + this[key] = noop + continue + } + this[key] = isStandardLevel(key, useOnlyCustomLevelsVal) ? levelMethods[key](hook) : genLog(values[key], hook) + } + + this.emit( + 'level-change', + level, + levelVal, + labels[preLevelVal], + preLevelVal, + this + ) +} + +function getLevel (level) { + const { levels, levelVal } = this + // protection against potential loss of Pino scope from serializers (edge case with circular refs - https://github.com/pinojs/pino/issues/833) + return (levels && levels.labels) ? levels.labels[levelVal] : '' +} + +function isLevelEnabled (logLevel) { + const { values } = this.levels + const logLevelVal = values[logLevel] + return logLevelVal !== undefined && this[levelCompSym](logLevelVal, this[levelValSym]) +} + +/** + * Determine if the given `current` level is enabled by comparing it + * against the current threshold (`expected`). + * + * @param {SORTING_ORDER} direction comparison direction "ASC" or "DESC" + * @param {number} current current log level number representation + * @param {number} expected threshold value to compare with + * @returns {boolean} + */ +function compareLevel (direction, current, expected) { + if (direction === SORTING_ORDER.DESC) { + return current <= expected + } + + return current >= expected +} + +/** + * Create a level comparison function based on `levelComparison` + * it could a default function which compares levels either in "ascending" or "descending" order or custom comparison function + * + * @param {SORTING_ORDER | Function} levelComparison sort levels order direction or custom comparison function + * @returns Function + */ +function genLevelComparison (levelComparison) { + if (typeof levelComparison === 'string') { + return compareLevel.bind(null, levelComparison) + } + + return levelComparison +} + +function mappings (customLevels = null, useOnlyCustomLevels = false) { + const customNums = customLevels + /* eslint-disable */ + ? Object.keys(customLevels).reduce((o, k) => { + o[customLevels[k]] = k + return o + }, {}) + : null + /* eslint-enable */ + + const labels = Object.assign( + Object.create(Object.prototype, { Infinity: { value: 'silent' } }), + useOnlyCustomLevels ? null : nums, + customNums + ) + const values = Object.assign( + Object.create(Object.prototype, { silent: { value: Infinity } }), + useOnlyCustomLevels ? null : DEFAULT_LEVELS, + customLevels + ) + return { labels, values } +} + +function assertDefaultLevelFound (defaultLevel, customLevels, useOnlyCustomLevels) { + if (typeof defaultLevel === 'number') { + const values = [].concat( + Object.keys(customLevels || {}).map(key => customLevels[key]), + useOnlyCustomLevels ? [] : Object.keys(nums).map(level => +level), + Infinity + ) + if (!values.includes(defaultLevel)) { + throw Error(`default level:${defaultLevel} must be included in custom levels`) + } + return + } + + const labels = Object.assign( + Object.create(Object.prototype, { silent: { value: Infinity } }), + useOnlyCustomLevels ? null : DEFAULT_LEVELS, + customLevels + ) + if (!(defaultLevel in labels)) { + throw Error(`default level:${defaultLevel} must be included in custom levels`) + } +} + +function assertNoLevelCollisions (levels, customLevels) { + const { labels, values } = levels + for (const k in customLevels) { + if (k in values) { + throw Error('levels cannot be overridden') + } + if (customLevels[k] in labels) { + throw Error('pre-existing level values cannot be used for new levels') + } + } +} + +/** + * Validates whether `levelComparison` is correct + * + * @throws Error + * @param {SORTING_ORDER | Function} levelComparison - value to validate + * @returns + */ +function assertLevelComparison (levelComparison) { + if (typeof levelComparison === 'function') { + return + } + + if (typeof levelComparison === 'string' && Object.values(SORTING_ORDER).includes(levelComparison)) { + return + } + + throw new Error('Levels comparison should be one of "ASC", "DESC" or "function" type') +} + +module.exports = { + initialLsCache, + genLsCache, + levelMethods, + getLevel, + setLevel, + isLevelEnabled, + mappings, + assertNoLevelCollisions, + assertDefaultLevelFound, + genLevelComparison, + assertLevelComparison +} diff --git a/node_modules/pino/lib/meta.js b/node_modules/pino/lib/meta.js new file mode 100644 index 0000000..1d41ed8 --- /dev/null +++ b/node_modules/pino/lib/meta.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = { version: '10.1.0' } diff --git a/node_modules/pino/lib/multistream.js b/node_modules/pino/lib/multistream.js new file mode 100644 index 0000000..42cdbfb --- /dev/null +++ b/node_modules/pino/lib/multistream.js @@ -0,0 +1,203 @@ +'use strict' + +const metadata = Symbol.for('pino.metadata') +const { DEFAULT_LEVELS } = require('./constants') + +const DEFAULT_INFO_LEVEL = DEFAULT_LEVELS.info + +function multistream (streamsArray, opts) { + streamsArray = streamsArray || [] + opts = opts || { dedupe: false } + + const streamLevels = Object.create(DEFAULT_LEVELS) + streamLevels.silent = Infinity + if (opts.levels && typeof opts.levels === 'object') { + Object.keys(opts.levels).forEach(i => { + streamLevels[i] = opts.levels[i] + }) + } + + const res = { + write, + add, + remove, + emit, + flushSync, + end, + minLevel: 0, + lastId: 0, + streams: [], + clone, + [metadata]: true, + streamLevels + } + + if (Array.isArray(streamsArray)) { + streamsArray.forEach(add, res) + } else { + add.call(res, streamsArray) + } + + // clean this object up + // or it will stay allocated forever + // as it is closed on the following closures + streamsArray = null + + return res + + // we can exit early because the streams are ordered by level + function write (data) { + let dest + const level = this.lastLevel + const { streams } = this + // for handling situation when several streams has the same level + let recordedLevel = 0 + let stream + + // if dedupe set to true we send logs to the stream with the highest level + // therefore, we have to change sorting order + for (let i = initLoopVar(streams.length, opts.dedupe); checkLoopVar(i, streams.length, opts.dedupe); i = adjustLoopVar(i, opts.dedupe)) { + dest = streams[i] + if (dest.level <= level) { + if (recordedLevel !== 0 && recordedLevel !== dest.level) { + break + } + stream = dest.stream + if (stream[metadata]) { + const { lastTime, lastMsg, lastObj, lastLogger } = this + stream.lastLevel = level + stream.lastTime = lastTime + stream.lastMsg = lastMsg + stream.lastObj = lastObj + stream.lastLogger = lastLogger + } + stream.write(data) + if (opts.dedupe) { + recordedLevel = dest.level + } + } else if (!opts.dedupe) { + break + } + } + } + + function emit (...args) { + for (const { stream } of this.streams) { + if (typeof stream.emit === 'function') { + stream.emit(...args) + } + } + } + + function flushSync () { + for (const { stream } of this.streams) { + if (typeof stream.flushSync === 'function') { + stream.flushSync() + } + } + } + + function add (dest) { + if (!dest) { + return res + } + + // Check that dest implements either StreamEntry or DestinationStream + const isStream = typeof dest.write === 'function' || dest.stream + const stream_ = dest.write ? dest : dest.stream + // This is necessary to provide a meaningful error message, otherwise it throws somewhere inside write() + if (!isStream) { + throw Error('stream object needs to implement either StreamEntry or DestinationStream interface') + } + + const { streams, streamLevels } = this + + let level + if (typeof dest.levelVal === 'number') { + level = dest.levelVal + } else if (typeof dest.level === 'string') { + level = streamLevels[dest.level] + } else if (typeof dest.level === 'number') { + level = dest.level + } else { + level = DEFAULT_INFO_LEVEL + } + + const dest_ = { + stream: stream_, + level, + levelVal: undefined, + id: ++res.lastId + } + + streams.unshift(dest_) + streams.sort(compareByLevel) + + this.minLevel = streams[0].level + + return res + } + + function remove (id) { + const { streams } = this + const index = streams.findIndex(s => s.id === id) + + if (index >= 0) { + streams.splice(index, 1) + streams.sort(compareByLevel) + this.minLevel = streams.length > 0 ? streams[0].level : -1 + } + + return res + } + + function end () { + for (const { stream } of this.streams) { + if (typeof stream.flushSync === 'function') { + stream.flushSync() + } + stream.end() + } + } + + function clone (level) { + const streams = new Array(this.streams.length) + + for (let i = 0; i < streams.length; i++) { + streams[i] = { + level, + stream: this.streams[i].stream + } + } + + return { + write, + add, + remove, + minLevel: level, + streams, + clone, + emit, + flushSync, + [metadata]: true + } + } +} + +function compareByLevel (a, b) { + return a.level - b.level +} + +function initLoopVar (length, dedupe) { + return dedupe ? length - 1 : 0 +} + +function adjustLoopVar (i, dedupe) { + return dedupe ? i - 1 : i + 1 +} + +function checkLoopVar (i, length, dedupe) { + return dedupe ? i >= 0 : i < length +} + +module.exports = multistream diff --git a/node_modules/pino/lib/proto.js b/node_modules/pino/lib/proto.js new file mode 100644 index 0000000..a6ba722 --- /dev/null +++ b/node_modules/pino/lib/proto.js @@ -0,0 +1,256 @@ +'use strict' + +/* eslint no-prototype-builtins: 0 */ + +const { EventEmitter } = require('node:events') +const { + lsCacheSym, + levelValSym, + setLevelSym, + getLevelSym, + chindingsSym, + mixinSym, + asJsonSym, + writeSym, + mixinMergeStrategySym, + timeSym, + timeSliceIndexSym, + streamSym, + serializersSym, + formattersSym, + errorKeySym, + messageKeySym, + useOnlyCustomLevelsSym, + needsMetadataGsym, + redactFmtSym, + stringifySym, + formatOptsSym, + stringifiersSym, + msgPrefixSym, + hooksSym +} = require('./symbols') +const { + getLevel, + setLevel, + isLevelEnabled, + mappings, + initialLsCache, + genLsCache, + assertNoLevelCollisions +} = require('./levels') +const { + asChindings, + asJson, + buildFormatters, + stringify, + noop +} = require('./tools') +const { + version +} = require('./meta') +const redaction = require('./redaction') + +// note: use of class is satirical +// https://github.com/pinojs/pino/pull/433#pullrequestreview-127703127 +const constructor = class Pino {} +const prototype = { + constructor, + child, + bindings, + setBindings, + flush, + isLevelEnabled, + version, + get level () { return this[getLevelSym]() }, + set level (lvl) { this[setLevelSym](lvl) }, + get levelVal () { return this[levelValSym] }, + set levelVal (n) { throw Error('levelVal is read-only') }, + get msgPrefix () { return this[msgPrefixSym] }, + get [Symbol.toStringTag] () { return 'Pino' }, + [lsCacheSym]: initialLsCache, + [writeSym]: write, + [asJsonSym]: asJson, + [getLevelSym]: getLevel, + [setLevelSym]: setLevel +} + +Object.setPrototypeOf(prototype, EventEmitter.prototype) + +// exporting and consuming the prototype object using factory pattern fixes scoping issues with getters when serializing +module.exports = function () { + return Object.create(prototype) +} + +const resetChildingsFormatter = bindings => bindings +function child (bindings, options) { + if (!bindings) { + throw Error('missing bindings for child Pino') + } + const serializers = this[serializersSym] + const formatters = this[formattersSym] + const instance = Object.create(this) + + // If an `options` object was not supplied, we can improve + // the performance of child creation by skipping + // the checks for set options and simply return + // a baseline instance. + if (options == null) { + if (instance[formattersSym].bindings !== resetChildingsFormatter) { + instance[formattersSym] = buildFormatters( + formatters.level, + resetChildingsFormatter, + formatters.log + ) + } + + instance[chindingsSym] = asChindings(instance, bindings) + + if (this.onChild !== noop) { + this.onChild(instance) + } + + return instance + } + + if (options.hasOwnProperty('serializers') === true) { + instance[serializersSym] = Object.create(null) + + for (const k in serializers) { + instance[serializersSym][k] = serializers[k] + } + const parentSymbols = Object.getOwnPropertySymbols(serializers) + /* eslint no-var: off */ + for (var i = 0; i < parentSymbols.length; i++) { + const ks = parentSymbols[i] + instance[serializersSym][ks] = serializers[ks] + } + + for (const bk in options.serializers) { + instance[serializersSym][bk] = options.serializers[bk] + } + const bindingsSymbols = Object.getOwnPropertySymbols(options.serializers) + for (var bi = 0; bi < bindingsSymbols.length; bi++) { + const bks = bindingsSymbols[bi] + instance[serializersSym][bks] = options.serializers[bks] + } + } else instance[serializersSym] = serializers + if (options.hasOwnProperty('formatters')) { + const { level, bindings: chindings, log } = options.formatters + instance[formattersSym] = buildFormatters( + level || formatters.level, + chindings || resetChildingsFormatter, + log || formatters.log + ) + } else { + instance[formattersSym] = buildFormatters( + formatters.level, + resetChildingsFormatter, + formatters.log + ) + } + if (options.hasOwnProperty('customLevels') === true) { + assertNoLevelCollisions(this.levels, options.customLevels) + instance.levels = mappings(options.customLevels, instance[useOnlyCustomLevelsSym]) + genLsCache(instance) + } + + // redact must place before asChindings and only replace if exist + if ((typeof options.redact === 'object' && options.redact !== null) || Array.isArray(options.redact)) { + instance.redact = options.redact // replace redact directly + const stringifiers = redaction(instance.redact, stringify) + const formatOpts = { stringify: stringifiers[redactFmtSym] } + instance[stringifySym] = stringify + instance[stringifiersSym] = stringifiers + instance[formatOptsSym] = formatOpts + } + + if (typeof options.msgPrefix === 'string') { + instance[msgPrefixSym] = (this[msgPrefixSym] || '') + options.msgPrefix + } + + instance[chindingsSym] = asChindings(instance, bindings) + if ((options.level !== undefined && options.level !== this.level) || options.hasOwnProperty('customLevels')) { + const childLevel = options.level || this.level + instance[setLevelSym](childLevel) + } + this.onChild(instance) + return instance +} + +function bindings () { + const chindings = this[chindingsSym] + const chindingsJson = `{${chindings.substr(1)}}` // at least contains ,"pid":7068,"hostname":"myMac" + const bindingsFromJson = JSON.parse(chindingsJson) + delete bindingsFromJson.pid + delete bindingsFromJson.hostname + return bindingsFromJson +} + +function setBindings (newBindings) { + const chindings = asChindings(this, newBindings) + this[chindingsSym] = chindings +} + +/** + * Default strategy for creating `mergeObject` from arguments and the result from `mixin()`. + * Fields from `mergeObject` have higher priority in this strategy. + * + * @param {Object} mergeObject The object a user has supplied to the logging function. + * @param {Object} mixinObject The result of the `mixin` method. + * @return {Object} + */ +function defaultMixinMergeStrategy (mergeObject, mixinObject) { + return Object.assign(mixinObject, mergeObject) +} + +function write (_obj, msg, num) { + const t = this[timeSym]() + const mixin = this[mixinSym] + const errorKey = this[errorKeySym] + const messageKey = this[messageKeySym] + const mixinMergeStrategy = this[mixinMergeStrategySym] || defaultMixinMergeStrategy + let obj + const streamWriteHook = this[hooksSym].streamWrite + + if (_obj === undefined || _obj === null) { + obj = {} + } else if (_obj instanceof Error) { + obj = { [errorKey]: _obj } + if (msg === undefined) { + msg = _obj.message + } + } else { + obj = _obj + if (msg === undefined && _obj[messageKey] === undefined && _obj[errorKey]) { + msg = _obj[errorKey].message + } + } + + if (mixin) { + obj = mixinMergeStrategy(obj, mixin(obj, num, this)) + } + + const s = this[asJsonSym](obj, msg, num, t) + + const stream = this[streamSym] + if (stream[needsMetadataGsym] === true) { + stream.lastLevel = num + stream.lastObj = obj + stream.lastMsg = msg + stream.lastTime = t.slice(this[timeSliceIndexSym]) + stream.lastLogger = this // for child loggers + } + stream.write(streamWriteHook ? streamWriteHook(s) : s) +} + +function flush (cb) { + if (cb != null && typeof cb !== 'function') { + throw Error('callback must be a function') + } + + const stream = this[streamSym] + + if (typeof stream.flush === 'function') { + stream.flush(cb || noop) + } else if (cb) cb() +} diff --git a/node_modules/pino/lib/redaction.js b/node_modules/pino/lib/redaction.js new file mode 100644 index 0000000..4bcb6ca --- /dev/null +++ b/node_modules/pino/lib/redaction.js @@ -0,0 +1,114 @@ +'use strict' + +const Redact = require('@pinojs/redact') +const { redactFmtSym, wildcardFirstSym } = require('./symbols') + +// Custom rx regex equivalent to fast-redact's rx +const rx = /[^.[\]]+|\[([^[\]]*?)\]/g + +const CENSOR = '[Redacted]' +const strict = false // TODO should this be configurable? + +function redaction (opts, serialize) { + const { paths, censor, remove } = handle(opts) + + const shape = paths.reduce((o, str) => { + rx.lastIndex = 0 + const first = rx.exec(str) + const next = rx.exec(str) + + // ns is the top-level path segment, brackets + quoting removed. + let ns = first[1] !== undefined + ? first[1].replace(/^(?:"|'|`)(.*)(?:"|'|`)$/, '$1') + : first[0] + + if (ns === '*') { + ns = wildcardFirstSym + } + + // top level key: + if (next === null) { + o[ns] = null + return o + } + + // path with at least two segments: + // if ns is already redacted at the top level, ignore lower level redactions + if (o[ns] === null) { + return o + } + + const { index } = next + const nextPath = `${str.substr(index, str.length - 1)}` + + o[ns] = o[ns] || [] + + // shape is a mix of paths beginning with literal values and wildcard + // paths [ "a.b.c", "*.b.z" ] should reduce to a shape of + // { "a": [ "b.c", "b.z" ], *: [ "b.z" ] } + // note: "b.z" is in both "a" and * arrays because "a" matches the wildcard. + // (* entry has wildcardFirstSym as key) + if (ns !== wildcardFirstSym && o[ns].length === 0) { + // first time ns's get all '*' redactions so far + o[ns].push(...(o[wildcardFirstSym] || [])) + } + + if (ns === wildcardFirstSym) { + // new * path gets added to all previously registered literal ns's. + Object.keys(o).forEach(function (k) { + if (o[k]) { + o[k].push(nextPath) + } + }) + } + + o[ns].push(nextPath) + return o + }, {}) + + // the redactor assigned to the format symbol key + // provides top level redaction for instances where + // an object is interpolated into the msg string + const result = { + [redactFmtSym]: Redact({ paths, censor, serialize, strict, remove }) + } + + const topCensor = (...args) => { + return typeof censor === 'function' ? serialize(censor(...args)) : serialize(censor) + } + + return [...Object.keys(shape), ...Object.getOwnPropertySymbols(shape)].reduce((o, k) => { + // top level key: + if (shape[k] === null) { + o[k] = (value) => topCensor(value, [k]) + } else { + const wrappedCensor = typeof censor === 'function' + ? (value, path) => { + return censor(value, [k, ...path]) + } + : censor + o[k] = Redact({ + paths: shape[k], + censor: wrappedCensor, + serialize, + strict, + remove + }) + } + return o + }, result) +} + +function handle (opts) { + if (Array.isArray(opts)) { + opts = { paths: opts, censor: CENSOR } + return opts + } + let { paths, censor = CENSOR, remove } = opts + if (Array.isArray(paths) === false) { throw Error('pino – redact must contain an array of strings') } + if (remove === true) censor = undefined + + return { paths, censor, remove } +} + +module.exports = redaction diff --git a/node_modules/pino/lib/symbols.js b/node_modules/pino/lib/symbols.js new file mode 100644 index 0000000..69f1a9d --- /dev/null +++ b/node_modules/pino/lib/symbols.js @@ -0,0 +1,74 @@ +'use strict' + +const setLevelSym = Symbol('pino.setLevel') +const getLevelSym = Symbol('pino.getLevel') +const levelValSym = Symbol('pino.levelVal') +const levelCompSym = Symbol('pino.levelComp') +const useLevelLabelsSym = Symbol('pino.useLevelLabels') +const useOnlyCustomLevelsSym = Symbol('pino.useOnlyCustomLevels') +const mixinSym = Symbol('pino.mixin') + +const lsCacheSym = Symbol('pino.lsCache') +const chindingsSym = Symbol('pino.chindings') + +const asJsonSym = Symbol('pino.asJson') +const writeSym = Symbol('pino.write') +const redactFmtSym = Symbol('pino.redactFmt') + +const timeSym = Symbol('pino.time') +const timeSliceIndexSym = Symbol('pino.timeSliceIndex') +const streamSym = Symbol('pino.stream') +const stringifySym = Symbol('pino.stringify') +const stringifySafeSym = Symbol('pino.stringifySafe') +const stringifiersSym = Symbol('pino.stringifiers') +const endSym = Symbol('pino.end') +const formatOptsSym = Symbol('pino.formatOpts') +const messageKeySym = Symbol('pino.messageKey') +const errorKeySym = Symbol('pino.errorKey') +const nestedKeySym = Symbol('pino.nestedKey') +const nestedKeyStrSym = Symbol('pino.nestedKeyStr') +const mixinMergeStrategySym = Symbol('pino.mixinMergeStrategy') +const msgPrefixSym = Symbol('pino.msgPrefix') + +const wildcardFirstSym = Symbol('pino.wildcardFirst') + +// public symbols, no need to use the same pino +// version for these +const serializersSym = Symbol.for('pino.serializers') +const formattersSym = Symbol.for('pino.formatters') +const hooksSym = Symbol.for('pino.hooks') +const needsMetadataGsym = Symbol.for('pino.metadata') + +module.exports = { + setLevelSym, + getLevelSym, + levelValSym, + levelCompSym, + useLevelLabelsSym, + mixinSym, + lsCacheSym, + chindingsSym, + asJsonSym, + writeSym, + serializersSym, + redactFmtSym, + timeSym, + timeSliceIndexSym, + streamSym, + stringifySym, + stringifySafeSym, + stringifiersSym, + endSym, + formatOptsSym, + messageKeySym, + errorKeySym, + nestedKeySym, + wildcardFirstSym, + needsMetadataGsym, + useOnlyCustomLevelsSym, + formattersSym, + hooksSym, + nestedKeyStrSym, + mixinMergeStrategySym, + msgPrefixSym +} diff --git a/node_modules/pino/lib/time.js b/node_modules/pino/lib/time.js new file mode 100644 index 0000000..8275674 --- /dev/null +++ b/node_modules/pino/lib/time.js @@ -0,0 +1,39 @@ +'use strict' + +const nullTime = () => '' + +const epochTime = () => `,"time":${Date.now()}` + +const unixTime = () => `,"time":${Math.round(Date.now() / 1000.0)}` + +const isoTime = () => `,"time":"${new Date(Date.now()).toISOString()}"` // using Date.now() for testability + +const NS_PER_MS = 1_000_000n +const NS_PER_SEC = 1_000_000_000n + +const startWallTimeNs = BigInt(Date.now()) * NS_PER_MS +const startHrTime = process.hrtime.bigint() + +const isoTimeNano = () => { + const elapsedNs = process.hrtime.bigint() - startHrTime + const currentTimeNs = startWallTimeNs + elapsedNs + + const secondsSinceEpoch = currentTimeNs / NS_PER_SEC + const nanosWithinSecond = currentTimeNs % NS_PER_SEC + + const msSinceEpoch = Number(secondsSinceEpoch * 1000n + nanosWithinSecond / 1_000_000n) + const date = new Date(msSinceEpoch) + + const year = date.getUTCFullYear() + const month = (date.getUTCMonth() + 1).toString().padStart(2, '0') + const day = date.getUTCDate().toString().padStart(2, '0') + const hours = date.getUTCHours().toString().padStart(2, '0') + const minutes = date.getUTCMinutes().toString().padStart(2, '0') + const seconds = date.getUTCSeconds().toString().padStart(2, '0') + + return `,"time":"${year}-${month}-${day}T${hours}:${minutes}:${seconds}.${nanosWithinSecond + .toString() + .padStart(9, '0')}Z"` +} + +module.exports = { nullTime, epochTime, unixTime, isoTime, isoTimeNano } diff --git a/node_modules/pino/lib/tools.js b/node_modules/pino/lib/tools.js new file mode 100644 index 0000000..1734546 --- /dev/null +++ b/node_modules/pino/lib/tools.js @@ -0,0 +1,423 @@ +'use strict' + +/* eslint no-prototype-builtins: 0 */ + +const diagChan = require('node:diagnostics_channel') +const format = require('quick-format-unescaped') +const { mapHttpRequest, mapHttpResponse } = require('pino-std-serializers') +const SonicBoom = require('sonic-boom') +const onExit = require('on-exit-leak-free') +const { + lsCacheSym, + chindingsSym, + writeSym, + serializersSym, + formatOptsSym, + endSym, + stringifiersSym, + stringifySym, + stringifySafeSym, + wildcardFirstSym, + nestedKeySym, + formattersSym, + messageKeySym, + errorKeySym, + nestedKeyStrSym, + msgPrefixSym +} = require('./symbols') +const { isMainThread } = require('worker_threads') +const transport = require('./transport') + +const asJsonChan = diagChan.tracingChannel('pino_asJson') + +function noop () { +} + +function genLog (level, hook) { + if (!hook) return LOG + + return function hookWrappedLog (...args) { + hook.call(this, args, LOG, level) + } + + function LOG (o, ...n) { + if (typeof o === 'object') { + let msg = o + if (o !== null) { + if (o.method && o.headers && o.socket) { + o = mapHttpRequest(o) + } else if (typeof o.setHeader === 'function') { + o = mapHttpResponse(o) + } + } + let formatParams + if (msg === null && n.length === 0) { + formatParams = [null] + } else { + msg = n.shift() + formatParams = n + } + // We do not use a coercive check for `msg` as it is + // measurably slower than the explicit checks. + if (typeof this[msgPrefixSym] === 'string' && msg !== undefined && msg !== null) { + msg = this[msgPrefixSym] + msg + } + this[writeSym](o, format(msg, formatParams, this[formatOptsSym]), level) + } else { + let msg = o === undefined ? n.shift() : o + + // We do not use a coercive check for `msg` as it is + // measurably slower than the explicit checks. + if (typeof this[msgPrefixSym] === 'string' && msg !== undefined && msg !== null) { + msg = this[msgPrefixSym] + msg + } + this[writeSym](null, format(msg, n, this[formatOptsSym]), level) + } + } +} + +// magically escape strings for json +// relying on their charCodeAt +// everything below 32 needs JSON.stringify() +// 34 and 92 happens all the time, so we +// have a fast case for them +function asString (str) { + let result = '' + let last = 0 + let found = false + let point = 255 + const l = str.length + if (l > 100) { + return JSON.stringify(str) + } + for (var i = 0; i < l && point >= 32; i++) { + point = str.charCodeAt(i) + if (point === 34 || point === 92) { + result += str.slice(last, i) + '\\' + last = i + found = true + } + } + if (!found) { + result = str + } else { + result += str.slice(last) + } + return point < 32 ? JSON.stringify(str) : '"' + result + '"' +} + +/** + * `asJson` wraps `_asJson` in order to facilitate generating diagnostics. + * + * @param {object} obj The merging object passed to the log method. + * @param {string} msg The log message passed to the log method. + * @param {number} num The log level number. + * @param {number} time The log time in milliseconds. + * + * @returns {string} + */ +function asJson (obj, msg, num, time) { + if (asJsonChan.hasSubscribers === false) { + return _asJson.call(this, obj, msg, num, time) + } + + const store = { instance: this, arguments } + return asJsonChan.traceSync(_asJson, store, this, obj, msg, num, time) +} + +/** + * `_asJson` parses all collected data and generates the finalized newline + * delimited JSON string. + * + * @param {object} obj The merging object passed to the log method. + * @param {string} msg The log message passed to the log method. + * @param {number} num The log level number. + * @param {number} time The log time in milliseconds. + * + * @returns {string} The finalized log string terminated with a newline. + * @private + */ +function _asJson (obj, msg, num, time) { + const stringify = this[stringifySym] + const stringifySafe = this[stringifySafeSym] + const stringifiers = this[stringifiersSym] + const end = this[endSym] + const chindings = this[chindingsSym] + const serializers = this[serializersSym] + const formatters = this[formattersSym] + const messageKey = this[messageKeySym] + const errorKey = this[errorKeySym] + let data = this[lsCacheSym][num] + time + + // we need the child bindings added to the output first so instance logged + // objects can take precedence when JSON.parse-ing the resulting log line + data = data + chindings + + let value + if (formatters.log) { + obj = formatters.log(obj) + } + const wildcardStringifier = stringifiers[wildcardFirstSym] + let propStr = '' + for (const key in obj) { + value = obj[key] + if (Object.prototype.hasOwnProperty.call(obj, key) && value !== undefined) { + if (serializers[key]) { + value = serializers[key](value) + } else if (key === errorKey && serializers.err) { + value = serializers.err(value) + } + + const stringifier = stringifiers[key] || wildcardStringifier + + switch (typeof value) { + case 'undefined': + case 'function': + continue + case 'number': + /* eslint no-fallthrough: "off" */ + if (Number.isFinite(value) === false) { + value = null + } + // this case explicitly falls through to the next one + case 'boolean': + if (stringifier) value = stringifier(value) + break + case 'string': + value = (stringifier || asString)(value) + break + default: + value = (stringifier || stringify)(value, stringifySafe) + } + if (value === undefined) continue + const strKey = asString(key) + propStr += ',' + strKey + ':' + value + } + } + + let msgStr = '' + if (msg !== undefined) { + value = serializers[messageKey] ? serializers[messageKey](msg) : msg + const stringifier = stringifiers[messageKey] || wildcardStringifier + + switch (typeof value) { + case 'function': + break + case 'number': + if (Number.isFinite(value) === false) { + value = null + } + // this case explicitly falls through to the next one + case 'boolean': + if (stringifier) value = stringifier(value) + msgStr = ',"' + messageKey + '":' + value + break + case 'string': + value = (stringifier || asString)(value) + msgStr = ',"' + messageKey + '":' + value + break + default: + value = (stringifier || stringify)(value, stringifySafe) + msgStr = ',"' + messageKey + '":' + value + } + } + + if (this[nestedKeySym] && propStr) { + // place all the obj properties under the specified key + // the nested key is already formatted from the constructor + return data + this[nestedKeyStrSym] + propStr.slice(1) + '}' + msgStr + end + } else { + return data + propStr + msgStr + end + } +} + +function asChindings (instance, bindings) { + let value + let data = instance[chindingsSym] + const stringify = instance[stringifySym] + const stringifySafe = instance[stringifySafeSym] + const stringifiers = instance[stringifiersSym] + const wildcardStringifier = stringifiers[wildcardFirstSym] + const serializers = instance[serializersSym] + const formatter = instance[formattersSym].bindings + bindings = formatter(bindings) + + for (const key in bindings) { + value = bindings[key] + const valid = (key.length < 5 || (key !== 'level' && + key !== 'serializers' && + key !== 'formatters' && + key !== 'customLevels')) && + bindings.hasOwnProperty(key) && + value !== undefined + if (valid === true) { + value = serializers[key] ? serializers[key](value) : value + value = (stringifiers[key] || wildcardStringifier || stringify)(value, stringifySafe) + if (value === undefined) continue + data += ',"' + key + '":' + value + } + } + return data +} + +function hasBeenTampered (stream) { + return stream.write !== stream.constructor.prototype.write +} + +function buildSafeSonicBoom (opts) { + const stream = new SonicBoom(opts) + stream.on('error', filterBrokenPipe) + // If we are sync: false, we must flush on exit + if (!opts.sync && isMainThread) { + onExit.register(stream, autoEnd) + + stream.on('close', function () { + onExit.unregister(stream) + }) + } + return stream + + function filterBrokenPipe (err) { + // Impossible to replicate across all operating systems + /* istanbul ignore next */ + if (err.code === 'EPIPE') { + // If we get EPIPE, we should stop logging here + // however we have no control to the consumer of + // SonicBoom, so we just overwrite the write method + stream.write = noop + stream.end = noop + stream.flushSync = noop + stream.destroy = noop + return + } + stream.removeListener('error', filterBrokenPipe) + stream.emit('error', err) + } +} + +function autoEnd (stream, eventName) { + // This check is needed only on some platforms + /* istanbul ignore next */ + if (stream.destroyed) { + return + } + + if (eventName === 'beforeExit') { + // We still have an event loop, let's use it + stream.flush() + stream.on('drain', function () { + stream.end() + }) + } else { + // For some reason istanbul is not detecting this, but it's there + /* istanbul ignore next */ + // We do not have an event loop, so flush synchronously + stream.flushSync() + } +} + +function createArgsNormalizer (defaultOptions) { + return function normalizeArgs (instance, caller, opts = {}, stream) { + // support stream as a string + if (typeof opts === 'string') { + stream = buildSafeSonicBoom({ dest: opts }) + opts = {} + } else if (typeof stream === 'string') { + if (opts && opts.transport) { + throw Error('only one of option.transport or stream can be specified') + } + stream = buildSafeSonicBoom({ dest: stream }) + } else if (opts instanceof SonicBoom || opts.writable || opts._writableState) { + stream = opts + opts = {} + } else if (opts.transport) { + if (opts.transport instanceof SonicBoom || opts.transport.writable || opts.transport._writableState) { + throw Error('option.transport do not allow stream, please pass to option directly. e.g. pino(transport)') + } + if (opts.transport.targets && opts.transport.targets.length && opts.formatters && typeof opts.formatters.level === 'function') { + throw Error('option.transport.targets do not allow custom level formatters') + } + + let customLevels + if (opts.customLevels) { + customLevels = opts.useOnlyCustomLevels ? opts.customLevels : Object.assign({}, opts.levels, opts.customLevels) + } + stream = transport({ caller, ...opts.transport, levels: customLevels }) + } + opts = Object.assign({}, defaultOptions, opts) + opts.serializers = Object.assign({}, defaultOptions.serializers, opts.serializers) + opts.formatters = Object.assign({}, defaultOptions.formatters, opts.formatters) + + if (opts.prettyPrint) { + throw new Error('prettyPrint option is no longer supported, see the pino-pretty package (https://github.com/pinojs/pino-pretty)') + } + + const { enabled, onChild } = opts + if (enabled === false) opts.level = 'silent' + if (!onChild) opts.onChild = noop + if (!stream) { + if (!hasBeenTampered(process.stdout)) { + // If process.stdout.fd is undefined, it means that we are running + // in a worker thread. Let's assume we are logging to file descriptor 1. + stream = buildSafeSonicBoom({ fd: process.stdout.fd || 1 }) + } else { + stream = process.stdout + } + } + return { opts, stream } + } +} + +function stringify (obj, stringifySafeFn) { + try { + return JSON.stringify(obj) + } catch (_) { + try { + const stringify = stringifySafeFn || this[stringifySafeSym] + return stringify(obj) + } catch (_) { + return '"[unable to serialize, circular reference is too complex to analyze]"' + } + } +} + +function buildFormatters (level, bindings, log) { + return { + level, + bindings, + log + } +} + +/** + * Convert a string integer file descriptor to a proper native integer + * file descriptor. + * + * @param {string} destination The file descriptor string to attempt to convert. + * + * @returns {Number} + */ +function normalizeDestFileDescriptor (destination) { + const fd = Number(destination) + if (typeof destination === 'string' && Number.isFinite(fd)) { + return fd + } + // destination could be undefined if we are in a worker + if (destination === undefined) { + // This is stdout in UNIX systems + return 1 + } + return destination +} + +module.exports = { + noop, + buildSafeSonicBoom, + asChindings, + asJson, + genLog, + createArgsNormalizer, + stringify, + buildFormatters, + normalizeDestFileDescriptor +} diff --git a/node_modules/pino/lib/transport-stream.js b/node_modules/pino/lib/transport-stream.js new file mode 100644 index 0000000..22cb37e --- /dev/null +++ b/node_modules/pino/lib/transport-stream.js @@ -0,0 +1,56 @@ +'use strict' + +const { realImport, realRequire } = require('real-require') + +module.exports = loadTransportStreamBuilder + +/** + * Loads & returns a function to build transport streams + * @param {string} target + * @returns {Promise>} + * @throws {Error} In case the target module does not export a function + */ +async function loadTransportStreamBuilder (target) { + let fn + try { + const toLoad = target.startsWith('file://') ? target : 'file://' + target + + if (toLoad.endsWith('.ts') || toLoad.endsWith('.cts')) { + // TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ). + if (process[Symbol.for('ts-node.register.instance')]) { + realRequire('ts-node/register') + } else if (process.env && process.env.TS_NODE_DEV) { + realRequire('ts-node-dev') + } + // TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees. + fn = realRequire(decodeURIComponent(target)) + } else { + fn = (await realImport(toLoad)) + } + } catch (error) { + // See this PR for details: https://github.com/pinojs/thread-stream/pull/34 + if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND')) { + fn = realRequire(target) + } else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') { + // When bundled with pkg, an undefined error is thrown when called with realImport + // When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport + // More info at: https://github.com/pinojs/thread-stream/issues/143 + try { + fn = realRequire(decodeURIComponent(target)) + } catch { + throw error + } + } else { + throw error + } + } + + // Depending on how the default export is performed, and on how the code is + // transpiled, we may find cases of two nested "default" objects. + // See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762 + if (typeof fn === 'object') fn = fn.default + if (typeof fn === 'object') fn = fn.default + if (typeof fn !== 'function') throw Error('exported worker is not a function') + + return fn +} diff --git a/node_modules/pino/lib/transport.js b/node_modules/pino/lib/transport.js new file mode 100644 index 0000000..8b5b48a --- /dev/null +++ b/node_modules/pino/lib/transport.js @@ -0,0 +1,167 @@ +'use strict' + +const { createRequire } = require('module') +const getCallers = require('./caller') +const { join, isAbsolute, sep } = require('node:path') +const sleep = require('atomic-sleep') +const onExit = require('on-exit-leak-free') +const ThreadStream = require('thread-stream') + +function setupOnExit (stream) { + // This is leak free, it does not leave event handlers + onExit.register(stream, autoEnd) + onExit.registerBeforeExit(stream, flush) + + stream.on('close', function () { + onExit.unregister(stream) + }) +} + +function buildStream (filename, workerData, workerOpts, sync) { + const stream = new ThreadStream({ + filename, + workerData, + workerOpts, + sync + }) + + stream.on('ready', onReady) + stream.on('close', function () { + process.removeListener('exit', onExit) + }) + + process.on('exit', onExit) + + function onReady () { + process.removeListener('exit', onExit) + stream.unref() + + if (workerOpts.autoEnd !== false) { + setupOnExit(stream) + } + } + + function onExit () { + /* istanbul ignore next */ + if (stream.closed) { + return + } + stream.flushSync() + // Apparently there is a very sporadic race condition + // that in certain OS would prevent the messages to be flushed + // because the thread might not have been created still. + // Unfortunately we need to sleep(100) in this case. + sleep(100) + stream.end() + } + + return stream +} + +function autoEnd (stream) { + stream.ref() + stream.flushSync() + stream.end() + stream.once('close', function () { + stream.unref() + }) +} + +function flush (stream) { + stream.flushSync() +} + +function transport (fullOptions) { + const { pipeline, targets, levels, dedupe, worker = {}, caller = getCallers(), sync = false } = fullOptions + + const options = { + ...fullOptions.options + } + + // Backwards compatibility + const callers = typeof caller === 'string' ? [caller] : caller + + // This will be eventually modified by bundlers + const bundlerOverrides = '__bundlerPathsOverrides' in globalThis ? globalThis.__bundlerPathsOverrides : {} + + let target = fullOptions.target + + if (target && targets) { + throw new Error('only one of target or targets can be specified') + } + + if (targets) { + target = bundlerOverrides['pino-worker'] || join(__dirname, 'worker.js') + options.targets = targets.filter(dest => dest.target).map((dest) => { + return { + ...dest, + target: fixTarget(dest.target) + } + }) + options.pipelines = targets.filter(dest => dest.pipeline).map((dest) => { + return dest.pipeline.map((t) => { + return { + ...t, + level: dest.level, // duplicate the pipeline `level` property defined in the upper level + target: fixTarget(t.target) + } + }) + }) + } else if (pipeline) { + target = bundlerOverrides['pino-worker'] || join(__dirname, 'worker.js') + options.pipelines = [pipeline.map((dest) => { + return { + ...dest, + target: fixTarget(dest.target) + } + })] + } + + if (levels) { + options.levels = levels + } + + if (dedupe) { + options.dedupe = dedupe + } + + options.pinoWillSendConfig = true + + return buildStream(fixTarget(target), options, worker, sync) + + function fixTarget (origin) { + origin = bundlerOverrides[origin] || origin + + if (isAbsolute(origin) || origin.indexOf('file://') === 0) { + return origin + } + + if (origin === 'pino/file') { + return join(__dirname, '..', 'file.js') + } + + let fixTarget + + for (const filePath of callers) { + try { + const context = filePath === 'node:repl' + ? process.cwd() + sep + : filePath + + fixTarget = createRequire(context).resolve(origin) + break + } catch (err) { + // Silent catch + continue + } + } + + if (!fixTarget) { + throw new Error(`unable to determine transport target for "${origin}"`) + } + + return fixTarget + } +} + +module.exports = transport diff --git a/node_modules/pino/lib/worker.js b/node_modules/pino/lib/worker.js new file mode 100644 index 0000000..0bc035a --- /dev/null +++ b/node_modules/pino/lib/worker.js @@ -0,0 +1,194 @@ +'use strict' + +const EE = require('node:events') +const { pipeline, PassThrough } = require('node:stream') +const pino = require('../pino.js') +const build = require('pino-abstract-transport') +const loadTransportStreamBuilder = require('./transport-stream') + +// This file is not checked by the code coverage tool, +// as it is not reliable. + +/* istanbul ignore file */ + +/* + * > Multiple targets & pipelines + * + * + * ┌─────────────────────────────────────────────────┐ ┌─────┐ + * │ │ │ p │ + * │ │ │ i │ + * │ target │ │ n │ + * │ │ ────────────────────────────────┼────┤ o │ + * │ targets │ target │ │ . │ + * │ ────────────► │ ────────────────────────────────┼────┤ m │ source + * │ │ target │ │ u │ │ + * │ │ ────────────────────────────────┼────┤ l │ │write + * │ │ │ │ t │ ▼ + * │ │ pipeline ┌───────────────┐ │ │ i │ ┌────────┐ + * │ │ ──────────► │ PassThrough ├───┼────┤ s ├──────┤ │ + * │ │ └───────────────┘ │ │ t │ write│ Thread │ + * │ │ │ │ r │◄─────┤ Stream │ + * │ │ pipeline ┌───────────────┐ │ │ e │ │ │ + * │ │ ──────────► │ PassThrough ├───┼────┤ a │ └────────┘ + * │ └───────────────┘ │ │ m │ + * │ │ │ │ + * └─────────────────────────────────────────────────┘ └─────┘ + * + * + * + * > One single pipeline or target + * + * + * source + * │ + * ┌────────────────────────────────────────────────┐ │write + * │ │ ▼ + * │ │ ┌────────┐ + * │ targets │ target │ │ │ + * │ ────────────► │ ──────────────────────────────┤ │ │ + * │ │ │ │ │ + * │ ├──────┤ │ + * │ │ │ │ + * │ │ │ │ + * │ OR │ │ │ + * │ │ │ │ + * │ │ │ │ + * │ ┌──────────────┐ │ │ │ + * │ targets │ pipeline │ │ │ │ Thread │ + * │ ────────────► │ ────────────►│ PassThrough ├─┤ │ Stream │ + * │ │ │ │ │ │ │ + * │ └──────────────┘ │ │ │ + * │ │ │ │ + * │ OR │ write│ │ + * │ │◄─────┤ │ + * │ │ │ │ + * │ ┌──────────────┐ │ │ │ + * │ pipeline │ │ │ │ │ + * │ ──────────────►│ PassThrough ├────────────────┤ │ │ + * │ │ │ │ │ │ + * │ └──────────────┘ │ └────────┘ + * │ │ + * │ │ + * └────────────────────────────────────────────────┘ + */ + +module.exports = async function ({ targets, pipelines, levels, dedupe }) { + const targetStreams = [] + + // Process targets + if (targets && targets.length) { + targets = await Promise.all(targets.map(async (t) => { + const fn = await loadTransportStreamBuilder(t.target) + const stream = await fn(t.options) + return { + level: t.level, + stream + } + })) + + targetStreams.push(...targets) + } + + // Process pipelines + if (pipelines && pipelines.length) { + pipelines = await Promise.all( + pipelines.map(async (p) => { + let level + const pipeDests = await Promise.all( + p.map(async (t) => { + // level assigned to pipeline is duplicated over all its targets, just store it + level = t.level + const fn = await loadTransportStreamBuilder(t.target) + const stream = await fn(t.options) + return stream + } + )) + + return { + level, + stream: createPipeline(pipeDests) + } + }) + ) + targetStreams.push(...pipelines) + } + + // Skip building the multistream step if either one single pipeline or target is defined and + // return directly the stream instance back to TreadStream. + // This is equivalent to define either: + // + // pino.transport({ target: ... }) + // + // OR + // + // pino.transport({ pipeline: ... }) + if (targetStreams.length === 1) { + return targetStreams[0].stream + } else { + return build(process, { + parse: 'lines', + metadata: true, + close (err, cb) { + let expected = 0 + for (const transport of targetStreams) { + expected++ + transport.stream.on('close', closeCb) + transport.stream.end() + } + + function closeCb () { + if (--expected === 0) { + cb(err) + } + } + } + }) + } + + // TODO: Why split2 was not used for pipelines? + function process (stream) { + const multi = pino.multistream(targetStreams, { levels, dedupe }) + // TODO manage backpressure + stream.on('data', function (chunk) { + const { lastTime, lastMsg, lastObj, lastLevel } = this + multi.lastLevel = lastLevel + multi.lastTime = lastTime + multi.lastMsg = lastMsg + multi.lastObj = lastObj + + // TODO handle backpressure + multi.write(chunk + '\n') + }) + } + + /** + * Creates a pipeline using the provided streams and return an instance of `PassThrough` stream + * as a source for the pipeline. + * + * @param {(TransformStream|WritableStream)[]} streams An array of streams. + * All intermediate streams in the array *MUST* be `Transform` streams and only the last one `Writable`. + * @returns A `PassThrough` stream instance representing the source stream of the pipeline + */ + function createPipeline (streams) { + const ee = new EE() + const stream = new PassThrough({ + autoDestroy: true, + destroy (_, cb) { + ee.on('error', cb) + ee.on('closed', cb) + } + }) + + pipeline(stream, ...streams, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + ee.emit('error', err) + return + } + + ee.emit('closed') + }) + + return stream + } +} diff --git a/node_modules/pino/package.json b/node_modules/pino/package.json new file mode 100644 index 0000000..2227c34 --- /dev/null +++ b/node_modules/pino/package.json @@ -0,0 +1,120 @@ +{ + "name": "pino", + "version": "10.1.0", + "description": "super fast, all natural json logger", + "main": "pino.js", + "type": "commonjs", + "types": "pino.d.ts", + "browser": "./browser.js", + "scripts": { + "borp": "borp --timeout 60000 --coverage --check-coverage --lines 95 --functions 95 --branches 95 --statements 95", + "docs": "docsify serve", + "browser-test": "airtap --local 8080 test/browser*test.js", + "lint": "eslint .", + "prepublishOnly": "node test/internals/version.test.js", + "test": "npm run lint && npm run transpile && npm run borp && jest test/jest && npm run test-types", + "test-ci": "npm run lint && npm run transpile && npm run borp && npm run test-types", + "test-ci-pnpm": "pnpm run lint && npm run transpile && borp --timeout 60000 && pnpm run test-types", + "test-ci-yarn-pnp": "yarn run lint && npm run transpile && borp --timeout 60000", + "test-types": "tsc && tsd && ts-node test/types/pino.ts && attw --pack .", + "test:smoke": "smoker smoke:pino && smoker smoke:browser && smoker smoke:file", + "smoke:pino": "node ./pino.js", + "smoke:browser": "node ./browser.js", + "smoke:file": "node ./file.js", + "transpile": "node ./test/fixtures/ts/transpile.cjs", + "cov-ui": "tap --ts --coverage-report=html", + "bench": "node benchmarks/utils/runbench all", + "bench-basic": "node benchmarks/utils/runbench basic", + "bench-object": "node benchmarks/utils/runbench object", + "bench-deep-object": "node benchmarks/utils/runbench deep-object", + "bench-multi-arg": "node benchmarks/utils/runbench multi-arg", + "bench-long-string": "node benchmarks/utils/runbench long-string", + "bench-child": "node benchmarks/utils/runbench child", + "bench-child-child": "node benchmarks/utils/runbench child-child", + "bench-child-creation": "node benchmarks/utils/runbench child-creation", + "bench-formatters": "node benchmarks/utils/runbench formatters", + "update-bench-doc": "node benchmarks/utils/generate-benchmark-doc > docs/benchmarks.md" + }, + "bin": { + "pino": "./bin.js" + }, + "precommit": "test", + "repository": { + "type": "git", + "url": "git+https://github.com/pinojs/pino.git" + }, + "keywords": [ + "fast", + "logger", + "stream", + "json" + ], + "author": "Matteo Collina ", + "contributors": [ + "David Mark Clements ", + "James Sumners ", + "Thomas Watson Steen (https://twitter.com/wa7son)" + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/pinojs/pino/issues" + }, + "homepage": "https://getpino.io", + "devDependencies": { + "@arethetypeswrong/cli": "^0.18.1", + "@matteo.collina/tspl": "^0.2.0", + "@types/flush-write-stream": "^1.0.0", + "@types/node": "^24.0.8", + "airtap": "5.0.0", + "bole": "^5.0.5", + "borp": "^0.20.2", + "bunyan": "^1.8.14", + "debug": "^4.3.4", + "docsify-cli": "^4.4.4", + "eslint": "^9.37.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-n": "17.23.1", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^6.0.0", + "execa": "^5.0.0", + "fastbench": "^1.0.1", + "flush-write-stream": "^2.0.0", + "import-fresh": "^3.2.1", + "jest": "^30.0.3", + "log": "^6.0.0", + "loglevel": "^1.6.7", + "midnight-smoker": "1.1.1", + "neostandard": "^0.12.2", + "pino-pretty": "^13.0.0", + "pre-commit": "^1.2.2", + "proxyquire": "^2.1.3", + "pump": "^3.0.0", + "rimraf": "^6.0.1", + "semver": "^7.3.7", + "split2": "^4.0.0", + "steed": "^1.1.3", + "strip-ansi": "^6.0.0", + "tape": "^5.5.3", + "through2": "^4.0.0", + "ts-node": "^10.9.1", + "tsd": "^0.33.0", + "typescript": "~5.9.2", + "winston": "^3.7.2" + }, + "dependencies": { + "atomic-sleep": "^1.0.0", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^5.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "@pinojs/redact": "^0.4.0", + "sonic-boom": "^4.0.1", + "thread-stream": "^3.0.0" + }, + "tsd": { + "directory": "test/types" + } +} diff --git a/node_modules/pino/pino.d.ts b/node_modules/pino/pino.d.ts new file mode 100644 index 0000000..9a9e9ca --- /dev/null +++ b/node_modules/pino/pino.d.ts @@ -0,0 +1,904 @@ +// Project: https://github.com/pinojs/pino.git, http://getpino.io +// Definitions by: Peter Snider +// BendingBender +// Christian Rackerseder +// GP +// Alex Ferrando +// Oleksandr Sidko +// Harris Lummis +// Raoul Jaeckel +// Cory Donkin +// Adam Vigneaux +// Austin Beer +// Michel Nemnom +// Igor Savin +// James Bromwell +// TypeScript Version: 4.4 + +import type { EventEmitter } from "events"; +import * as pinoStdSerializers from "pino-std-serializers"; +import type { SonicBoom, SonicBoomOpts } from "sonic-boom"; +import type { WorkerOptions } from "worker_threads"; + +declare namespace pino { + //// Non-exported types and interfaces + + // ToDo https://github.com/pinojs/thread-stream/issues/24 + type ThreadStream = any + + type TimeFn = () => string; + type MixinFn = (mergeObject: object, level: number, logger:Logger) => object; + type MixinMergeStrategyFn = (mergeObject: object, mixinObject: object) => object; + + type CustomLevelLogger = { + /** + * Define additional logging levels. + */ + customLevels: { [level in CustomLevels]: number }; + /** + * Use only defined `customLevels` and omit Pino's levels. + */ + useOnlyCustomLevels: UseOnlyCustomLevels; + } & { + // This will override default log methods + [K in Exclude]: UseOnlyCustomLevels extends true ? never : LogFn; + } & { + [level in CustomLevels]: LogFn; + }; + + /** + * A synchronous callback that will run on each creation of a new child. + * @param child: The newly created child logger instance. + */ + type OnChildCallback = (child: Logger) => void + + export interface redactOptions { + paths: string[]; + censor?: string | ((value: unknown, path: string[]) => unknown); + remove?: boolean; + } + + export interface LoggerExtras extends EventEmitter { + /** + * Exposes the Pino package version. Also available on the exported pino function. + */ + readonly version: string; + + levels: LevelMapping; + + /** + * Outputs the level as a string instead of integer. + */ + useLevelLabels: boolean; + /** + * Returns the integer value for the logger instance's logging level. + */ + levelVal: number; + + /** + * Creates a child logger, setting all key-value pairs in `bindings` as properties in the log lines. All serializers will be applied to the given pair. + * Child loggers use the same output stream as the parent and inherit the current log level of the parent at the time they are spawned. + * From v2.x.x the log level of a child is mutable (whereas in v1.x.x it was immutable), and can be set independently of the parent. + * If a `level` property is present in the object passed to `child` it will override the child logger level. + * + * @param bindings: an object of key-value pairs to include in log lines as properties. + * @param options: an options object that will override child logger inherited options. + * @returns a child logger instance. + */ + child(bindings: Bindings, options?: ChildLoggerOptions): Logger; + + /** + * This can be used to modify the callback function on creation of a new child. + */ + onChild: OnChildCallback; + + /** + * Registers a listener function that is triggered when the level is changed. + * Note: When browserified, this functionality will only be available if the `events` module has been required elsewhere + * (e.g. if you're using streams in the browser). This allows for a trade-off between bundle size and functionality. + * + * @param event: only ever fires the `'level-change'` event + * @param listener: The listener is passed four arguments: `levelLabel`, `levelValue`, `previousLevelLabel`, `previousLevelValue`. + */ + on(event: "level-change", listener: LevelChangeEventListener): this; + addListener(event: "level-change", listener: LevelChangeEventListener): this; + once(event: "level-change", listener: LevelChangeEventListener): this; + prependListener(event: "level-change", listener: LevelChangeEventListener): this; + prependOnceListener(event: "level-change", listener: LevelChangeEventListener): this; + removeListener(event: "level-change", listener: LevelChangeEventListener): this; + + /** + * A utility method for determining if a given log level will write to the destination. + */ + isLevelEnabled(level: LevelWithSilentOrString): boolean; + + /** + * Returns an object containing all the current bindings, cloned from the ones passed in via logger.child(). + */ + bindings(): Bindings; + + /** + * Adds to the bindings of this logger instance. + * Note: Does not overwrite bindings. Can potentially result in duplicate keys in log lines. + * + * @param bindings: an object of key-value pairs to include in log lines as properties. + */ + setBindings(bindings: Bindings): void; + + /** + * Flushes the content of the buffer when using pino.destination({ sync: false }). + * call the callback when finished + */ + flush(cb?: (err?: Error) => void): void; + } + + //// Exported types and interfaces + export interface BaseLogger { + /** + * Set this property to the desired logging level. In order of priority, available levels are: + * + * - 'fatal' + * - 'error' + * - 'warn' + * - 'info' + * - 'debug' + * - 'trace' + * + * The logging level is a __minimum__ level. For instance if `logger.level` is `'info'` then all `'fatal'`, `'error'`, `'warn'`, + * and `'info'` logs will be enabled. + * + * You can pass `'silent'` to disable logging. + */ + level: LevelWithSilentOrString; + + /** + * Log at `'fatal'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line. + * If more args follows `msg`, these will be used to format `msg` using `util.format`. + * + * @typeParam T: the interface of the object being serialized. Default is object. + * @param obj: object to be serialized + * @param msg: the log message to write + * @param ...args: format string values when `msg` is a format string + */ + fatal: LogFn; + /** + * Log at `'error'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line. + * If more args follows `msg`, these will be used to format `msg` using `util.format`. + * + * @typeParam T: the interface of the object being serialized. Default is object. + * @param obj: object to be serialized + * @param msg: the log message to write + * @param ...args: format string values when `msg` is a format string + */ + error: LogFn; + /** + * Log at `'warn'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line. + * If more args follows `msg`, these will be used to format `msg` using `util.format`. + * + * @typeParam T: the interface of the object being serialized. Default is object. + * @param obj: object to be serialized + * @param msg: the log message to write + * @param ...args: format string values when `msg` is a format string + */ + warn: LogFn; + /** + * Log at `'info'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line. + * If more args follows `msg`, these will be used to format `msg` using `util.format`. + * + * @typeParam T: the interface of the object being serialized. Default is object. + * @param obj: object to be serialized + * @param msg: the log message to write + * @param ...args: format string values when `msg` is a format string + */ + info: LogFn; + /** + * Log at `'debug'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line. + * If more args follows `msg`, these will be used to format `msg` using `util.format`. + * + * @typeParam T: the interface of the object being serialized. Default is object. + * @param obj: object to be serialized + * @param msg: the log message to write + * @param ...args: format string values when `msg` is a format string + */ + debug: LogFn; + /** + * Log at `'trace'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line. + * If more args follows `msg`, these will be used to format `msg` using `util.format`. + * + * @typeParam T: the interface of the object being serialized. Default is object. + * @param obj: object to be serialized + * @param msg: the log message to write + * @param ...args: format string values when `msg` is a format string + */ + trace: LogFn; + /** + * Noop function. + */ + silent: LogFn; + + /** + * Get `msgPrefix` of the logger instance. + * + * See {@link https://github.com/pinojs/pino/blob/main/docs/api.md#msgprefix-string}. + */ + get msgPrefix(): string | undefined; + } + + export type Bindings = Record; + + export type Level = "fatal" | "error" | "warn" | "info" | "debug" | "trace"; + export type LevelOrString = Level | (string & {}); + export type LevelWithSilent = Level | "silent"; + export type LevelWithSilentOrString = LevelWithSilent | (string & {}); + + export type SerializerFn = (value: any) => any; + export type WriteFn = (o: object) => void; + + export type LevelChangeEventListener = ( + lvl: LevelWithSilentOrString, + val: number, + prevLvl: LevelWithSilentOrString, + prevVal: number, + logger: Logger + ) => void; + + export type LogDescriptor = Record; + + export type Logger = BaseLogger & LoggerExtras & CustomLevelLogger; + + export type SerializedError = pinoStdSerializers.SerializedError; + export type SerializedResponse = pinoStdSerializers.SerializedResponse; + export type SerializedRequest = pinoStdSerializers.SerializedRequest; + + + export interface TransportTargetOptions> { + target: string + options?: TransportOptions + level?: LevelWithSilentOrString + } + + export interface TransportBaseOptions> { + options?: TransportOptions + worker?: WorkerOptions & { autoEnd?: boolean} + } + + export interface TransportSingleOptions> extends TransportBaseOptions{ + target: string + } + + export interface TransportPipelineOptions> extends TransportBaseOptions{ + pipeline: TransportSingleOptions[] + level?: LevelWithSilentOrString + } + + export interface TransportMultiOptions> extends TransportBaseOptions{ + targets: readonly (TransportTargetOptions|TransportPipelineOptions)[], + levels?: Record + dedupe?: boolean + } + + export interface MultiStreamOptions { + levels?: Record + dedupe?: boolean + } + + export interface DestinationStream { + write(msg: string): void; + } + + interface DestinationStreamHasMetadata { + [symbols.needsMetadataGsym]: true; + lastLevel: number; + lastTime: string; + lastMsg: string; + lastObj: object; + lastLogger: Logger; + } + + export type DestinationStreamWithMetadata = DestinationStream & ({ [symbols.needsMetadataGsym]?: false } | DestinationStreamHasMetadata); + + export interface StreamEntry { + stream: DestinationStream + level?: TLevel + } + + export interface MultiStreamRes { + write: (data: any) => void, + add: (dest: StreamEntry | DestinationStream) => MultiStreamRes, + flushSync: () => void, + minLevel: number, + streams: StreamEntry[], + clone(level: TLevel): MultiStreamRes, + } + + export interface LevelMapping { + /** + * Returns the mappings of level names to their respective internal number representation. + */ + values: { [level: string]: number }; + /** + * Returns the mappings of level internal level numbers to their string representations. + */ + labels: { [level: number]: string }; + } + + type PlaceholderSpecifier = 'd' | 's' | 'j' | 'o' | 'O'; + type PlaceholderTypeMapping = T extends 'd' + ? number + : T extends 's' + ? unknown + : T extends 'j' | 'o' | 'O' + ? object + : never; + + type ParseLogFnArgs< + T, + Acc extends unknown[] = [], + > = T extends `${infer _}%${infer Placeholder}${infer Rest}` + ? Placeholder extends PlaceholderSpecifier + ? ParseLogFnArgs]> + : ParseLogFnArgs + : Acc; + + export interface LogFnFields {} + + export interface LogFn { + // Simple case: When first argument is always a string message, use parsed arguments directly + (msg: TMsg, ...args: ParseLogFnArgs): void; + // Complex case: When first argument can be any type - if it's a string, no message needed; otherwise require a message + (obj: T extends object ? T & LogFnFields : T, msg?: T extends string ? never: TMsg, ...args: ParseLogFnArgs | []): void; + // Complex case with type safety: Same as above but ensures ParseLogFnArgs is a valid tuple before using it + (obj: T extends object ? T & LogFnFields : T, msg?: T extends string ? never : TMsg, ...args: ParseLogFnArgs extends [unknown, ...unknown[]] ? ParseLogFnArgs : unknown[]): void; + } + + export interface LoggerOptions { + transport?: TransportSingleOptions | TransportMultiOptions | TransportPipelineOptions + /** + * Avoid error causes by circular references in the object tree. Default: `true`. + */ + safe?: boolean; + /** + * The name of the logger. Default: `undefined`. + */ + name?: string; + /** + * an object containing functions for custom serialization of objects. + * These functions should return an JSONifiable object and they should never throw. When logging an object, + * each top-level property matching the exact key of a serializer will be serialized using the defined serializer. + */ + serializers?: { [key: string]: SerializerFn }; + /** + * Enables or disables the inclusion of a timestamp in the log message. If a function is supplied, it must + * synchronously return a JSON string representation of the time. If set to `false`, no timestamp will be included in the output. + * See stdTimeFunctions for a set of available functions for passing in as a value for this option. + * Caution: any sort of formatted time will significantly slow down Pino's performance. + */ + timestamp?: TimeFn | boolean; + /** + * One of the supported levels or `silent` to disable logging. Any other value defines a custom level and + * requires supplying a level value via `levelVal`. Default: 'info'. + */ + level?: LevelWithSilentOrString; + + /** + * Use this option to define additional logging levels. + * The keys of the object correspond the namespace of the log level, and the values should be the numerical value of the level. + */ + customLevels?: { [level in CustomLevels]: number }; + + /** + * Use this option to only use defined `customLevels` and omit Pino's levels. + * Logger's default `level` must be changed to a value in `customLevels` in order to use `useOnlyCustomLevels` + * Warning: this option may not be supported by downstream transports. + */ + useOnlyCustomLevels?: UseOnlyCustomLevels; + + /** + * Use this option to define custom comparison of log levels. + * Useful to compare custom log levels or non-standard level values. + * Default: "ASC" + */ + levelComparison?: "ASC" | "DESC" | ((current: number, expected: number) => boolean); + + /** + * If provided, the `mixin` function is called each time one of the active logging methods + * is called. The function must synchronously return an object. The properties of the + * returned object will be added to the logged JSON. + */ + mixin?: MixinFn; + + /** + * If provided, the `mixinMergeStrategy` function is called each time one of the active + * logging methods is called. The first parameter is the value `mergeObject` or an empty object, + * the second parameter is the value resulting from `mixin()` or an empty object. + * The function must synchronously return an object. + */ + mixinMergeStrategy?: MixinMergeStrategyFn + + /** + * As an array, the redact option specifies paths that should have their values redacted from any log output. + * + * Each path must be a string using a syntax which corresponds to JavaScript dot and bracket notation. + * + * If an object is supplied, three options can be specified: + * + * paths (String[]): Required. An array of paths + * censor (String): Optional. A value to overwrite key which are to be redacted. Default: '[Redacted]' + * remove (Boolean): Optional. Instead of censoring the value, remove both the key and the value. Default: false + */ + redact?: string[] | redactOptions; + + /** + * When defining a custom log level via level, set to an integer value to define the new level. Default: `undefined`. + */ + levelVal?: number; + /** + * The string key for the 'message' in the JSON object. Default: "msg". + */ + messageKey?: string; + /** + * The string key for the 'error' in the JSON object. Default: "err". + */ + errorKey?: string; + /** + * The string key to place any logged object under. + */ + nestedKey?: string; + /** + * Enables logging. Default: `true`. + */ + enabled?: boolean; + /** + * Browser only, see http://getpino.io/#/docs/browser. + */ + browser?: { + /** + * The `asObject` option will create a pino-like log object instead of passing all arguments to a console + * method. When `write` is set, `asObject` will always be true. + * + * @example + * pino.info('hi') // creates and logs {msg: 'hi', level: 30, time: } + */ + asObject?: boolean; + /** + * The `asObjectBindingsOnly` option is similar to `asObject` but will keep the message and arguments + * unformatted. This allows to defer formatting the message to the actual call to `console` methods, + * where browsers then have richer formatting in their devtools than when pino will format the message to + * a string first. + * + * @example + * pino.info('hello %s', 'world') // creates and logs {level: 30, time: }, 'hello %s', 'world' + */ + asObjectBindingsOnly?: boolean; + formatters?: { + /** + * Changes the shape of the log level. + * The default shape is { level: number }. + */ + level?: (label: string, number: number) => object; + /** + * Changes the shape of the log object. + */ + log?: (object: Record) => Record; + } + /** + * Instead of passing log messages to `console.log` they can be passed to a supplied function. If `write` is + * set to a single function, all logging objects are passed to this function. If `write` is an object, it + * can have methods that correspond to the levels. When a message is logged at a given level, the + * corresponding method is called. If a method isn't present, the logging falls back to using the `console`. + * + * @example + * const pino = require('pino')({ + * browser: { + * write: (o) => { + * // do something with o + * } + * } + * }) + * + * @example + * const pino = require('pino')({ + * browser: { + * write: { + * info: function (o) { + * //process info log object + * }, + * error: function (o) { + * //process error log object + * } + * } + * } + * }) + */ + write?: + | WriteFn + | ({ + fatal?: WriteFn; + error?: WriteFn; + warn?: WriteFn; + info?: WriteFn; + debug?: WriteFn; + trace?: WriteFn; + } & { [logLevel: string]: WriteFn }); + + /** + * The serializers provided to `pino` are ignored by default in the browser, including the standard + * serializers provided with Pino. Since the default destination for log messages is the console, values + * such as `Error` objects are enhanced for inspection, which they otherwise wouldn't be if the Error + * serializer was enabled. We can turn all serializers on or we can selectively enable them via an array. + * + * When `serialize` is `true` the standard error serializer is also enabled (see + * {@link https://github.com/pinojs/pino/blob/master/docs/api.md#pino-stdserializers}). This is a global + * serializer which will apply to any `Error` objects passed to the logger methods. + * + * If `serialize` is an array the standard error serializer is also automatically enabled, it can be + * explicitly disabled by including a string in the serialize array: `!stdSerializers.err` (see example). + * + * The `serialize` array also applies to any child logger serializers (see + * {@link https://github.com/pinojs/pino/blob/master/docs/api.md#bindingsserializers-object} for how to + * set child-bound serializers). + * + * Unlike server pino the serializers apply to every object passed to the logger method, if the `asObject` + * option is `true`, this results in the serializers applying to the first object (as in server pino). + * + * For more info on serializers see + * {@link https://github.com/pinojs/pino/blob/master/docs/api.md#serializers-object}. + * + * @example + * const pino = require('pino')({ + * browser: { + * serialize: true + * } + * }) + * + * @example + * const pino = require('pino')({ + * serializers: { + * custom: myCustomSerializer, + * another: anotherSerializer + * }, + * browser: { + * serialize: ['custom'] + * } + * }) + * // following will apply myCustomSerializer to the custom property, + * // but will not apply anotherSerializer to another key + * pino.info({custom: 'a', another: 'b'}) + * + * @example + * const pino = require('pino')({ + * serializers: { + * custom: myCustomSerializer, + * another: anotherSerializer + * }, + * browser: { + * serialize: ['!stdSerializers.err', 'custom'] //will not serialize Errors, will serialize `custom` keys + * } + * }) + */ + serialize?: boolean | string[]; + + /** + * Options for transmission of logs. + * + * @example + * const pino = require('pino')({ + * browser: { + * transmit: { + * level: 'warn', + * send: function (level, logEvent) { + * if (level === 'warn') { + * // maybe send the logEvent to a separate endpoint + * // or maybe analyse the messages further before sending + * } + * // we could also use the `logEvent.level.value` property to determine + * // numerical value + * if (logEvent.level.value >= 50) { // covers error and fatal + * + * // send the logEvent somewhere + * } + * } + * } + * } + * }) + */ + transmit?: { + /** + * Specifies the minimum level (inclusive) of when the `send` function should be called, if not supplied + * the `send` function will be called based on the main logging `level` (set via `options.level`, + * defaulting to `info`). + */ + level?: LevelOrString; + /** + * Remotely record log messages. + * + * @description Called after writing the log message. + */ + send: (level: Level, logEvent: LogEvent) => void; + }; + /** + * The disabled option will disable logging in browser if set to true, by default it is set to false. + * + * @example + * const pino = require('pino')({browser: {disabled: true}}) + */ + disabled?: boolean; + }; + /** + * key-value object added as child logger to each log line. If set to null the base child logger is not added + */ + base?: { [key: string]: any } | null; + + /** + * An object containing functions for formatting the shape of the log lines. + * These functions should return a JSONifiable object and should never throw. + * These functions allow for full customization of the resulting log lines. + * For example, they can be used to change the level key name or to enrich the default metadata. + */ + formatters?: { + /** + * Changes the shape of the log level. + * The default shape is { level: number }. + * The function takes two arguments, the label of the level (e.g. 'info') and the numeric value (e.g. 30). + */ + level?: (label: string, number: number) => object; + /** + * Changes the shape of the bindings. + * The default shape is { pid, hostname }. + * The function takes a single argument, the bindings object. + * It will be called every time a child logger is created. + */ + bindings?: (bindings: Bindings) => object; + /** + * Changes the shape of the log object. + * This function will be called every time one of the log methods (such as .info) is called. + * All arguments passed to the log method, except the message, will be pass to this function. + * By default it does not change the shape of the log object. + */ + log?: (object: Record) => Record; + }; + + /** + * A string that would be prefixed to every message (and child message) + */ + msgPrefix?: string + + /** + * An object mapping to hook functions. Hook functions allow for customizing internal logger operations. + * Hook functions must be synchronous functions. + */ + hooks?: { + /** + * Allows for manipulating the parameters passed to logger methods. The signature for this hook is + * logMethod (args, method, level) {}, where args is an array of the arguments that were passed to the + * log method and method is the log method itself, and level is the log level. This hook must invoke the method function by + * using apply, like so: method.apply(this, newArgumentsArray). + */ + logMethod?: (this: Logger, args: Parameters, method: LogFn, level: number) => void; + + /** + * Allows for manipulating the stringified JSON log output just before writing to various transports. + * This function must return a string and must be valid JSON. + */ + streamWrite?: (s: string) => string; + }; + + /** + * Stringification limit at a specific nesting depth when logging circular object. Default: `5`. + */ + depthLimit?: number + + /** + * Stringification limit of properties/elements when logging a specific object/array with circular references. Default: `100`. + */ + edgeLimit?: number + + /** + * Optional child creation callback. + */ + onChild?: OnChildCallback; + + /** + * logs newline delimited JSON with `\r\n` instead of `\n`. Default: `false`. + */ + crlf?: boolean; + } + + export interface ChildLoggerOptions { + level?: LevelOrString; + serializers?: { [key: string]: SerializerFn }; + customLevels?: { [level in CustomLevels]: number }; + formatters?: { + level?: (label: string, number: number) => object; + bindings?: (bindings: Bindings) => object; + log?: (object: object) => object; + }; + redact?: string[] | redactOptions; + msgPrefix?: string + } + + /** + * A data structure representing a log message, it represents the arguments passed to a logger statement, the level + * at which they were logged and the hierarchy of child bindings. + * + * @description By default serializers are not applied to log output in the browser, but they will always be applied + * to `messages` and `bindings` in the `logEvent` object. This allows us to ensure a consistent format for all + * values between server and client. + */ + export interface LogEvent { + /** + * Unix epoch timestamp in milliseconds, the time is taken from the moment the logger method is called. + */ + ts: number; + /** + * All arguments passed to logger method, (for instance `logger.info('a', 'b', 'c')` would result in `messages` + * array `['a', 'b', 'c']`). + */ + messages: any[]; + /** + * Represents each child logger (if any), and the relevant bindings. + * + * @description For instance, given `logger.child({a: 1}).child({b: 2}).info({c: 3})`, the bindings array would + * hold `[{a: 1}, {b: 2}]` and the `messages` array would be `[{c: 3}]`. The `bindings` are ordered according to + * their position in the child logger hierarchy, with the lowest index being the top of the hierarchy. + */ + bindings: Bindings[]; + /** + * Holds the `label` (for instance `info`), and the corresponding numerical `value` (for instance `30`). + * This could be important in cases where client side level values and labels differ from server side. + */ + level: { + label: string; + value: number; + }; + } + + + + //// Top level variable (const) exports + + /** + * Provides functions for serializing objects common to many projects. + */ + export const stdSerializers: typeof pinoStdSerializers; + + /** + * Holds the current log format version (as output in the v property of each log record). + */ + export const levels: LevelMapping; + export const symbols: { + readonly setLevelSym: unique symbol; + readonly getLevelSym: unique symbol; + readonly levelValSym: unique symbol; + readonly useLevelLabelsSym: unique symbol; + readonly mixinSym: unique symbol; + readonly lsCacheSym: unique symbol; + readonly chindingsSym: unique symbol; + readonly asJsonSym: unique symbol; + readonly writeSym: unique symbol; + readonly serializersSym: unique symbol; + readonly redactFmtSym: unique symbol; + readonly timeSym: unique symbol; + readonly timeSliceIndexSym: unique symbol; + readonly streamSym: unique symbol; + readonly stringifySym: unique symbol; + readonly stringifySafeSym: unique symbol; + readonly stringifiersSym: unique symbol; + readonly endSym: unique symbol; + readonly formatOptsSym: unique symbol; + readonly messageKeySym: unique symbol; + readonly errorKeySym: unique symbol; + readonly nestedKeySym: unique symbol; + readonly wildcardFirstSym: unique symbol; + readonly needsMetadataGsym: unique symbol; + readonly useOnlyCustomLevelsSym: unique symbol; + readonly formattersSym: unique symbol; + readonly hooksSym: unique symbol; + }; + + /** + * Exposes the Pino package version. Also available on the logger instance. + */ + export const version: string; + + /** + * Provides functions for generating the timestamp property in the log output. You can set the `timestamp` option during + * initialization to one of these functions to adjust the output format. Alternatively, you can specify your own time function. + * A time function must synchronously return a string that would be a valid component of a JSON string. For example, + * the default function returns a string like `,"time":1493426328206`. + */ + export const stdTimeFunctions: { + /** + * The default time function for Pino. Returns a string like `,"time":1493426328206`. + */ + epochTime: TimeFn; + /* + * Returns the seconds since Unix epoch + */ + unixTime: TimeFn; + /** + * Returns an empty string. This function is used when the `timestamp` option is set to `false`. + */ + nullTime: TimeFn; + /* + * Returns ISO 8601-formatted time in UTC + */ + isoTime: TimeFn; + /* + * Returns RFC 3339-formatted time in UTC + */ + isoTimeNano: TimeFn; + }; + + //// Exported functions + + /** + * Create a Pino Destination instance: a stream-like object with significantly more throughput (over 30%) than a standard Node.js stream. + * @param [dest]: The `destination` parameter, can be a file descriptor, a file path, or an object with `dest` property pointing to a fd or path. + * An ordinary Node.js `stream` file descriptor can be passed as the destination (such as the result of `fs.createWriteStream`) + * but for peak log writing performance, it is strongly recommended to use `pino.destination` to create the destination stream. + * @returns A Sonic-Boom stream to be used as destination for the pino function + */ + export function destination( + dest?: number | object | string | DestinationStream | NodeJS.WritableStream | SonicBoomOpts, + ): SonicBoom; + + export function transport>( + options: TransportSingleOptions | TransportMultiOptions | TransportPipelineOptions + ): ThreadStream + + export function multistream( + streamsArray: (DestinationStream | StreamEntry)[] | DestinationStream | StreamEntry, + opts?: MultiStreamOptions + ): MultiStreamRes + + //// Nested version of default export for TypeScript/Babel compatibility + + /** + * @param [optionsOrStream]: an options object or a writable stream where the logs will be written. It can also receive some log-line metadata, if the + * relative protocol is enabled. Default: process.stdout + * @returns a new logger instance. + */ + function pino(optionsOrStream?: LoggerOptions | DestinationStream): Logger; + + /** + * @param [options]: an options object + * @param [stream]: a writable stream where the logs will be written. It can also receive some log-line metadata, if the + * relative protocol is enabled. Default: process.stdout + * @returns a new logger instance. + */ + function pino(options: LoggerOptions, stream?: DestinationStream | undefined): Logger; + + /** + * Attach selected static members to the nested callable export, so that + * `const { pino } = require('pino')` exposes them (e.g. `pino.stdTimeFunctions`). + */ + namespace pino { + const stdTimeFunctions: { + epochTime: TimeFn; + unixTime: TimeFn; + nullTime: TimeFn; + isoTime: TimeFn; + isoTimeNano: TimeFn; + }; + } +} + +//// Callable default export + +/** + * @param [optionsOrStream]: an options object or a writable stream where the logs will be written. It can also receive some log-line metadata, if the + * relative protocol is enabled. Default: process.stdout + * @returns a new logger instance. + */ +declare function pino(optionsOrStream?: pino.LoggerOptions | pino.DestinationStream): pino.Logger; + +/** + * @param [options]: an options object + * @param [stream]: a writable stream where the logs will be written. It can also receive some log-line metadata, if the + * relative protocol is enabled. Default: process.stdout + * @returns a new logger instance. + */ +declare function pino(options: pino.LoggerOptions, stream?: pino.DestinationStream | undefined): pino.Logger; + +export = pino; diff --git a/node_modules/pino/pino.js b/node_modules/pino/pino.js new file mode 100644 index 0000000..cabeaf6 --- /dev/null +++ b/node_modules/pino/pino.js @@ -0,0 +1,234 @@ +'use strict' + +const os = require('node:os') +const stdSerializers = require('pino-std-serializers') +const caller = require('./lib/caller') +const redaction = require('./lib/redaction') +const time = require('./lib/time') +const proto = require('./lib/proto') +const symbols = require('./lib/symbols') +const { configure } = require('safe-stable-stringify') +const { assertDefaultLevelFound, mappings, genLsCache, genLevelComparison, assertLevelComparison } = require('./lib/levels') +const { DEFAULT_LEVELS, SORTING_ORDER } = require('./lib/constants') +const { + createArgsNormalizer, + asChindings, + buildSafeSonicBoom, + buildFormatters, + stringify, + normalizeDestFileDescriptor, + noop +} = require('./lib/tools') +const { version } = require('./lib/meta') +const { + chindingsSym, + redactFmtSym, + serializersSym, + timeSym, + timeSliceIndexSym, + streamSym, + stringifySym, + stringifySafeSym, + stringifiersSym, + setLevelSym, + endSym, + formatOptsSym, + messageKeySym, + errorKeySym, + nestedKeySym, + mixinSym, + levelCompSym, + useOnlyCustomLevelsSym, + formattersSym, + hooksSym, + nestedKeyStrSym, + mixinMergeStrategySym, + msgPrefixSym +} = symbols +const { epochTime, nullTime } = time +const { pid } = process +const hostname = os.hostname() +const defaultErrorSerializer = stdSerializers.err +const defaultOptions = { + level: 'info', + levelComparison: SORTING_ORDER.ASC, + levels: DEFAULT_LEVELS, + messageKey: 'msg', + errorKey: 'err', + nestedKey: null, + enabled: true, + base: { pid, hostname }, + serializers: Object.assign(Object.create(null), { + err: defaultErrorSerializer + }), + formatters: Object.assign(Object.create(null), { + bindings (bindings) { + return bindings + }, + level (label, number) { + return { level: number } + } + }), + hooks: { + logMethod: undefined, + streamWrite: undefined + }, + timestamp: epochTime, + name: undefined, + redact: null, + customLevels: null, + useOnlyCustomLevels: false, + depthLimit: 5, + edgeLimit: 100 +} + +const normalize = createArgsNormalizer(defaultOptions) + +const serializers = Object.assign(Object.create(null), stdSerializers) + +function pino (...args) { + const instance = {} + const { opts, stream } = normalize(instance, caller(), ...args) + + if (opts.level && typeof opts.level === 'string' && DEFAULT_LEVELS[opts.level.toLowerCase()] !== undefined) opts.level = opts.level.toLowerCase() + + const { + redact, + crlf, + serializers, + timestamp, + messageKey, + errorKey, + nestedKey, + base, + name, + level, + customLevels, + levelComparison, + mixin, + mixinMergeStrategy, + useOnlyCustomLevels, + formatters, + hooks, + depthLimit, + edgeLimit, + onChild, + msgPrefix + } = opts + + const stringifySafe = configure({ + maximumDepth: depthLimit, + maximumBreadth: edgeLimit + }) + + const allFormatters = buildFormatters( + formatters.level, + formatters.bindings, + formatters.log + ) + + const stringifyFn = stringify.bind({ + [stringifySafeSym]: stringifySafe + }) + const stringifiers = redact ? redaction(redact, stringifyFn) : {} + const formatOpts = redact + ? { stringify: stringifiers[redactFmtSym] } + : { stringify: stringifyFn } + const end = '}' + (crlf ? '\r\n' : '\n') + const coreChindings = asChindings.bind(null, { + [chindingsSym]: '', + [serializersSym]: serializers, + [stringifiersSym]: stringifiers, + [stringifySym]: stringify, + [stringifySafeSym]: stringifySafe, + [formattersSym]: allFormatters + }) + + let chindings = '' + if (base !== null) { + if (name === undefined) { + chindings = coreChindings(base) + } else { + chindings = coreChindings(Object.assign({}, base, { name })) + } + } + + const time = (timestamp instanceof Function) + ? timestamp + : (timestamp ? epochTime : nullTime) + const timeSliceIndex = time().indexOf(':') + 1 + + if (useOnlyCustomLevels && !customLevels) throw Error('customLevels is required if useOnlyCustomLevels is set true') + if (mixin && typeof mixin !== 'function') throw Error(`Unknown mixin type "${typeof mixin}" - expected "function"`) + if (msgPrefix && typeof msgPrefix !== 'string') throw Error(`Unknown msgPrefix type "${typeof msgPrefix}" - expected "string"`) + + assertDefaultLevelFound(level, customLevels, useOnlyCustomLevels) + const levels = mappings(customLevels, useOnlyCustomLevels) + + if (typeof stream.emit === 'function') { + stream.emit('message', { code: 'PINO_CONFIG', config: { levels, messageKey, errorKey } }) + } + + assertLevelComparison(levelComparison) + const levelCompFunc = genLevelComparison(levelComparison) + + Object.assign(instance, { + levels, + [levelCompSym]: levelCompFunc, + [useOnlyCustomLevelsSym]: useOnlyCustomLevels, + [streamSym]: stream, + [timeSym]: time, + [timeSliceIndexSym]: timeSliceIndex, + [stringifySym]: stringify, + [stringifySafeSym]: stringifySafe, + [stringifiersSym]: stringifiers, + [endSym]: end, + [formatOptsSym]: formatOpts, + [messageKeySym]: messageKey, + [errorKeySym]: errorKey, + [nestedKeySym]: nestedKey, + // protect against injection + [nestedKeyStrSym]: nestedKey ? `,${JSON.stringify(nestedKey)}:{` : '', + [serializersSym]: serializers, + [mixinSym]: mixin, + [mixinMergeStrategySym]: mixinMergeStrategy, + [chindingsSym]: chindings, + [formattersSym]: allFormatters, + [hooksSym]: hooks, + silent: noop, + onChild, + [msgPrefixSym]: msgPrefix + }) + + Object.setPrototypeOf(instance, proto()) + + genLsCache(instance) + + instance[setLevelSym](level) + + return instance +} + +module.exports = pino + +module.exports.destination = (dest = process.stdout.fd) => { + if (typeof dest === 'object') { + dest.dest = normalizeDestFileDescriptor(dest.dest || process.stdout.fd) + return buildSafeSonicBoom(dest) + } else { + return buildSafeSonicBoom({ dest: normalizeDestFileDescriptor(dest), minLength: 0 }) + } +} + +module.exports.transport = require('./lib/transport') +module.exports.multistream = require('./lib/multistream') + +module.exports.levels = mappings() +module.exports.stdSerializers = serializers +module.exports.stdTimeFunctions = Object.assign({}, time) +module.exports.symbols = symbols +module.exports.version = version + +// Enables default and name export with TypeScript and Babel +module.exports.default = pino +module.exports.pino = pino diff --git a/node_modules/pino/test/basic.test.js b/node_modules/pino/test/basic.test.js new file mode 100644 index 0000000..411598f --- /dev/null +++ b/node_modules/pino/test/basic.test.js @@ -0,0 +1,886 @@ +'use strict' + +const os = require('node:os') +const { readFileSync } = require('node:fs') +const test = require('node:test') +const assert = require('node:assert') + +const { sink, check, match, once, watchFileCreated, file } = require('./helper') +const pino = require('../') +const { version } = require('../package.json') +const { pid } = process +const hostname = os.hostname() + +test('pino version is exposed on export', () => { + assert.equal(pino.version, version) +}) + +test('pino version is exposed on instance', () => { + const instance = pino() + assert.equal(instance.version, version) +}) + +test('child instance exposes pino version', () => { + const child = pino().child({ foo: 'bar' }) + assert.equal(child.version, version) +}) + +test('bindings are exposed on every instance', () => { + const instance = pino() + assert.deepEqual(instance.bindings(), {}) +}) + +test('bindings contain the name and the child bindings', () => { + const instance = pino({ name: 'basicTest', level: 'info' }).child({ foo: 'bar' }).child({ a: 2 }) + assert.deepEqual(instance.bindings(), { name: 'basicTest', foo: 'bar', a: 2 }) +}) + +test('set bindings on instance', () => { + const instance = pino({ name: 'basicTest', level: 'info' }) + instance.setBindings({ foo: 'bar' }) + assert.deepEqual(instance.bindings(), { name: 'basicTest', foo: 'bar' }) +}) + +test('newly set bindings overwrite old bindings', () => { + const instance = pino({ name: 'basicTest', level: 'info', base: { foo: 'bar' } }) + instance.setBindings({ foo: 'baz' }) + assert.deepEqual(instance.bindings(), { name: 'basicTest', foo: 'baz' }) +}) + +test('set bindings on child instance', () => { + const child = pino({ name: 'basicTest', level: 'info' }).child({}) + child.setBindings({ foo: 'bar' }) + assert.deepEqual(child.bindings(), { name: 'basicTest', foo: 'bar' }) +}) + +test('child should have bindings set by parent', () => { + const instance = pino({ name: 'basicTest', level: 'info' }) + instance.setBindings({ foo: 'bar' }) + const child = instance.child({}) + assert.deepEqual(child.bindings(), { name: 'basicTest', foo: 'bar' }) +}) + +test('child should not share bindings of parent set after child creation', () => { + const instance = pino({ name: 'basicTest', level: 'info' }) + const child = instance.child({}) + instance.setBindings({ foo: 'bar' }) + assert.deepEqual(instance.bindings(), { name: 'basicTest', foo: 'bar' }) + assert.deepEqual(child.bindings(), { name: 'basicTest' }) +}) + +function levelTest (name, level) { + test(`${name} logs as ${level}`, async () => { + const stream = sink() + const instance = pino(stream) + instance.level = name + instance[name]('hello world') + check(assert.equal, await once(stream, 'data'), level, 'hello world') + }) + + test(`passing objects at level ${name}`, async () => { + const stream = sink() + const instance = pino(stream) + instance.level = name + const obj = { hello: 'world' } + instance[name](obj) + + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + assert.equal(result.pid, pid) + assert.equal(result.hostname, hostname) + assert.equal(result.level, level) + assert.equal(result.hello, 'world') + assert.deepEqual(Object.keys(obj), ['hello']) + }) + + test(`passing an object and a string at level ${name}`, async () => { + const stream = sink() + const instance = pino(stream) + instance.level = name + const obj = { hello: 'world' } + instance[name](obj, 'a string') + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + msg: 'a string', + hello: 'world' + }) + assert.deepEqual(Object.keys(obj), ['hello']) + }) + + test(`passing a undefined and a string at level ${name}`, async () => { + const stream = sink() + const instance = pino(stream) + instance.level = name + instance[name](undefined, 'a string') + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + msg: 'a string' + }) + }) + + test(`overriding object key by string at level ${name}`, async () => { + const stream = sink() + const instance = pino(stream) + instance.level = name + instance[name]({ hello: 'world', msg: 'object' }, 'string') + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + msg: 'string', + hello: 'world' + }) + }) + + test(`formatting logs as ${name}`, async () => { + const stream = sink() + const instance = pino(stream) + instance.level = name + instance[name]('hello %d', 42) + const result = await once(stream, 'data') + check(assert.equal, result, level, 'hello 42') + }) + + test(`formatting a symbol at level ${name}`, async () => { + const stream = sink() + const instance = pino(stream) + instance.level = name + + const sym = Symbol('foo') + instance[name]('hello %s', sym) + + const result = await once(stream, 'data') + + check(assert.equal, result, level, 'hello Symbol(foo)') + }) + + test(`passing error with a serializer at level ${name}`, async () => { + const stream = sink() + const err = new Error('myerror') + const instance = pino({ + serializers: { + err: pino.stdSerializers.err + } + }, stream) + instance.level = name + instance[name]({ err }) + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + err: { + type: 'Error', + message: err.message, + stack: err.stack + }, + msg: err.message + }) + }) + + test(`child logger for level ${name}`, async () => { + const stream = sink() + const instance = pino(stream) + instance.level = name + const child = instance.child({ hello: 'world' }) + child[name]('hello world') + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + msg: 'hello world', + hello: 'world' + }) + }) +} + +levelTest('fatal', 60) +levelTest('error', 50) +levelTest('warn', 40) +levelTest('info', 30) +levelTest('debug', 20) +levelTest('trace', 10) + +test('serializers can return undefined to strip field', async () => { + const stream = sink() + const instance = pino({ + serializers: { + test () { return undefined } + } + }, stream) + + instance.info({ test: 'sensitive info' }) + const result = await once(stream, 'data') + assert.equal('test' in result, false) +}) + +test('streams receive a message event with PINO_CONFIG', (t, end) => { + const stream = sink() + stream.once('message', (message) => { + match(message, { + code: 'PINO_CONFIG', + config: { + errorKey: 'err', + levels: { + labels: { + 10: 'trace', + 20: 'debug', + 30: 'info', + 40: 'warn', + 50: 'error', + 60: 'fatal' + }, + values: { + debug: 20, + error: 50, + fatal: 60, + info: 30, + trace: 10, + warn: 40 + } + }, + messageKey: 'msg' + } + }) + end() + }) + pino(stream) +}) + +test('does not explode with a circular ref', () => { + const stream = sink() + const instance = pino(stream) + const b = {} + const a = { + hello: b + } + b.a = a // circular ref + assert.doesNotThrow(() => instance.info(a)) +}) + +test('set the name', async () => { + const stream = sink() + const instance = pino({ + name: 'hello' + }, stream) + instance.fatal('this is fatal') + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + name: 'hello', + msg: 'this is fatal' + }) +}) + +test('set the messageKey', async () => { + const stream = sink() + const message = 'hello world' + const messageKey = 'fooMessage' + const instance = pino({ + messageKey + }, stream) + instance.info(message) + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + fooMessage: message + }) +}) + +test('set the nestedKey', async () => { + const stream = sink() + const object = { hello: 'world' } + const nestedKey = 'stuff' + const instance = pino({ + nestedKey + }, stream) + instance.info(object) + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + stuff: object + }) +}) + +test('set undefined properties', async () => { + const stream = sink() + const instance = pino(stream) + instance.info({ hello: 'world', property: undefined }) + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + hello: 'world' + }) +}) + +test('prototype properties are not logged', async () => { + const stream = sink() + const instance = pino(stream) + instance.info(Object.create({ hello: 'world' })) + const { hello } = await once(stream, 'data') + assert.equal(hello, undefined) +}) + +test('set the base', async () => { + const stream = sink() + const instance = pino({ + base: { + a: 'b' + } + }, stream) + + instance.fatal('this is fatal') + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + a: 'b', + level: 60, + msg: 'this is fatal' + }) +}) + +test('set the base to null', async () => { + const stream = sink() + const instance = pino({ + base: null + }, stream) + instance.fatal('this is fatal') + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + level: 60, + msg: 'this is fatal' + }) +}) + +test('set the base to null and use a formatter', async () => { + const stream = sink() + const instance = pino({ + base: null, + formatters: { + log (input) { + return Object.assign({}, input, { additionalMessage: 'using pino' }) + } + } + }, stream) + instance.fatal('this is fatal too') + const result = await once(stream, 'data') + assert.equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + level: 60, + msg: 'this is fatal too', + additionalMessage: 'using pino' + }) +}) + +test('throw if creating child without bindings', () => { + const stream = sink() + const instance = pino(stream) + try { + instance.child() + assert.fail('it should throw') + } catch (err) { + assert.equal(err.message, 'missing bindings for child Pino') + } +}) + +test('correctly escapes msg strings with stray double quote at end', async () => { + const stream = sink() + const instance = pino({ + name: 'hello' + }, stream) + + instance.fatal('this contains "') + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + name: 'hello', + msg: 'this contains "' + }) +}) + +test('correctly escape msg strings with unclosed double quote', async () => { + const stream = sink() + const instance = pino({ + name: 'hello' + }, stream) + instance.fatal('" this contains') + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + name: 'hello', + msg: '" this contains' + }) +}) + +test('correctly escape quote in a key', async () => { + const stream = sink() + const instance = pino(stream) + const obj = { 'some"obj': 'world' } + instance.info(obj, 'a string') + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + level: 30, + pid, + hostname, + msg: 'a string', + 'some"obj': 'world' + }) + assert.deepEqual(Object.keys(obj), ['some"obj']) +}) + +// https://github.com/pinojs/pino/issues/139 +test('object and format string', async () => { + const stream = sink() + const instance = pino(stream) + instance.info({}, 'foo %s', 'bar') + + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'foo bar' + }) +}) + +test('object and format string property', async () => { + const stream = sink() + const instance = pino(stream) + instance.info({ answer: 42 }, 'foo %s', 'bar') + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'foo bar', + answer: 42 + }) +}) + +test('correctly strip undefined when returned from toJSON', async () => { + const stream = sink() + const instance = pino({ + test: 'this' + }, stream) + instance.fatal({ test: { toJSON () { return undefined } } }) + const result = await once(stream, 'data') + assert.equal('test' in result, false) +}) + +test('correctly supports stderr', (t, end) => { + // stderr inherits from Stream, rather than Writable + const dest = { + writable: true, + write (result) { + result = JSON.parse(result) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + msg: 'a message' + }) + end() + } + } + const instance = pino(dest) + instance.fatal('a message') +}) + +test('normalize number to string', async () => { + const stream = sink() + const instance = pino(stream) + instance.info(1) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: '1' + }) +}) + +test('normalize number to string with an object', async () => { + const stream = sink() + const instance = pino(stream) + instance.info({ answer: 42 }, 1) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: '1', + answer: 42 + }) +}) + +test('handles objects with null prototype', async () => { + const stream = sink() + const instance = pino(stream) + const o = Object.create(null) + o.test = 'test' + instance.info(o) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + test: 'test' + }) +}) + +test('pino.destination', async () => { + const tmp = file() + const instance = pino(pino.destination(tmp)) + instance.info('hello') + await watchFileCreated(tmp) + const result = JSON.parse(readFileSync(tmp).toString()) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('auto pino.destination with a string', async () => { + const tmp = file() + const instance = pino(tmp) + instance.info('hello') + await watchFileCreated(tmp) + const result = JSON.parse(readFileSync(tmp).toString()) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('auto pino.destination with a string as second argument', async () => { + const tmp = file() + const instance = pino(null, tmp) + instance.info('hello') + await watchFileCreated(tmp) + const result = JSON.parse(readFileSync(tmp).toString()) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('does not override opts with a string as second argument', async () => { + const tmp = file() + const instance = pino({ + timestamp: () => ',"time":"none"' + }, tmp) + instance.info('hello') + await watchFileCreated(tmp) + const result = JSON.parse(readFileSync(tmp).toString()) + assert.deepEqual(result, { + pid, + hostname, + level: 30, + time: 'none', + msg: 'hello' + }) +}) + +// https://github.com/pinojs/pino/issues/222 +test('children with same names render in correct order', async () => { + const stream = sink() + const root = pino(stream) + root.child({ a: 1 }).child({ a: 2 }).info({ a: 3 }) + const { a } = await once(stream, 'data') + assert.equal(a, 3, 'last logged object takes precedence') +}) + +test('use `safe-stable-stringify` to avoid circular dependencies', async () => { + const stream = sink() + const root = pino(stream) + // circular depth + const obj = {} + obj.a = obj + root.info(obj) + const { a } = await once(stream, 'data') + assert.deepEqual(a, { a: '[Circular]' }) +}) + +test('correctly log non circular objects', async () => { + const stream = sink() + const root = pino(stream) + const obj = {} + let parent = obj + for (let i = 0; i < 10; i++) { + parent.node = {} + parent = parent.node + } + root.info(obj) + const { node } = await once(stream, 'data') + assert.deepEqual(node, { node: { node: { node: { node: { node: { node: { node: { node: { node: {} } } } } } } } } }) +}) + +test('safe-stable-stringify must be used when interpolating', async () => { + const stream = sink() + const instance = pino(stream) + + const o = { a: { b: {} } } + o.a.b.c = o.a.b + instance.info('test %j', o) + + const { msg } = await once(stream, 'data') + assert.equal(msg, 'test {"a":{"b":{"c":"[Circular]"}}}') +}) + +test('throws when setting useOnlyCustomLevels without customLevels', () => { + assert.throws( + () => { + pino({ + useOnlyCustomLevels: true + }) + }, + /customLevels is required if useOnlyCustomLevels is set true/ + ) +}) + +test('correctly log Infinity', async () => { + const stream = sink() + const instance = pino(stream) + + const o = { num: Infinity } + instance.info(o) + + const { num } = await once(stream, 'data') + assert.equal(num, null) +}) + +test('correctly log -Infinity', async () => { + const stream = sink() + const instance = pino(stream) + + const o = { num: -Infinity } + instance.info(o) + + const { num } = await once(stream, 'data') + assert.equal(num, null) +}) + +test('correctly log NaN', async () => { + const stream = sink() + const instance = pino(stream) + + const o = { num: NaN } + instance.info(o) + + const { num } = await once(stream, 'data') + assert.equal(num, null) +}) + +test('offers a .default() method to please typescript', async () => { + assert.equal(pino.default, pino) + + const stream = sink() + const instance = pino.default(stream) + instance.info('hello world') + check(assert.equal, await once(stream, 'data'), 30, 'hello world') +}) + +test('correctly skip function', async () => { + const stream = sink() + const instance = pino(stream) + + const o = { num: NaN } + instance.info(o, () => {}) + + const { msg } = await once(stream, 'data') + assert.equal(msg, undefined) +}) + +test('correctly skip Infinity', async () => { + const stream = sink() + const instance = pino(stream) + + const o = { num: NaN } + instance.info(o, Infinity) + + const { msg } = await once(stream, 'data') + assert.equal(msg, null) +}) + +test('correctly log number', async () => { + const stream = sink() + const instance = pino(stream) + + const o = { num: NaN } + instance.info(o, 42) + + const { msg } = await once(stream, 'data') + assert.equal(msg, 42) +}) + +test('nestedKey should not be used for non-objects', async () => { + const stream = sink() + const message = 'hello' + const nestedKey = 'stuff' + const instance = pino({ + nestedKey + }, stream) + instance.info(message) + const result = await once(stream, 'data') + delete result.time + assert.deepStrictEqual(result, { + pid, + hostname, + level: 30, + msg: message + }) +}) + +test('throws if prettyPrint is passed in as an option', async () => { + assert.throws( + () => { + pino({ + prettyPrint: true + }) + }, + Error('prettyPrint option is no longer supported, see the pino-pretty package (https://github.com/pinojs/pino-pretty)') + ) +}) + +test('Should invoke `onChild` with the newly created child', () => { + let innerChild + const child = pino({ + onChild: (instance) => { + innerChild = instance + } + }).child({ foo: 'bar' }) + assert.equal(child, innerChild) +}) + +test('logger message should have the prefix message that defined in the logger creation', async () => { + const stream = sink() + const logger = pino({ + msgPrefix: 'My name is Bond ' + }, stream) + assert.equal(logger.msgPrefix, 'My name is Bond ') + logger.info('James Bond') + const { msg } = await once(stream, 'data') + assert.equal(msg, 'My name is Bond James Bond') +}) + +test('child message should have the prefix message that defined in the child creation', async () => { + const stream = sink() + const instance = pino(stream) + const child = instance.child({}, { msgPrefix: 'My name is Bond ' }) + child.info('James Bond') + const { msg } = await once(stream, 'data') + assert.equal(msg, 'My name is Bond James Bond') +}) + +test('child message should have the prefix message that defined in the child creation when logging with log meta', async () => { + const stream = sink() + const instance = pino(stream) + const child = instance.child({}, { msgPrefix: 'My name is Bond ' }) + child.info({ hello: 'world' }, 'James Bond') + const { msg, hello } = await once(stream, 'data') + assert.equal(hello, 'world') + assert.equal(msg, 'My name is Bond James Bond') +}) + +test('logged message should not have the prefix when not providing any message', async () => { + const stream = sink() + const instance = pino(stream) + const child = instance.child({}, { msgPrefix: 'This should not be shown ' }) + child.info({ hello: 'world' }) + const { msg, hello } = await once(stream, 'data') + assert.equal(hello, 'world') + assert.equal(msg, undefined) +}) + +test('child message should append parent prefix to current prefix that defined in the child creation', async () => { + const stream = sink() + const instance = pino({ + msgPrefix: 'My name is Bond ' + }, stream) + const child = instance.child({}, { msgPrefix: 'James ' }) + child.info('Bond') + assert.equal(child.msgPrefix, 'My name is Bond James ') + const { msg } = await once(stream, 'data') + assert.equal(msg, 'My name is Bond James Bond') +}) + +test('child message should inherent parent prefix', async () => { + const stream = sink() + const instance = pino({ + msgPrefix: 'My name is Bond ' + }, stream) + const child = instance.child({}) + child.info('James Bond') + const { msg } = await once(stream, 'data') + assert.equal(msg, 'My name is Bond James Bond') +}) + +test('grandchild message should inherent parent prefix', async () => { + const stream = sink() + const instance = pino(stream) + const child = instance.child({}, { msgPrefix: 'My name is Bond ' }) + const grandchild = child.child({}) + grandchild.info('James Bond') + const { msg } = await once(stream, 'data') + assert.equal(msg, 'My name is Bond James Bond') +}) diff --git a/node_modules/pino/test/broken-pipe.test.js b/node_modules/pino/test/broken-pipe.test.js new file mode 100644 index 0000000..de20b3f --- /dev/null +++ b/node_modules/pino/test/broken-pipe.test.js @@ -0,0 +1,59 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { join } = require('node:path') +const { fork } = require('node:child_process') +const tspl = require('@matteo.collina/tspl') +const { once } = require('./helper') +const pino = require('..') + +if (process.platform === 'win32') { + console.log('skipping on windows') + process.exit(0) +} + +if (process.env.CITGM) { + // This looks like a some form of limitations of the CITGM test runner + // or the HW/SW we run it on. This file can hang on Node.js v18.x. + // The failure does not reproduce locally or on our CI. + // Skipping it is the only way to keep pino in CITGM. + // https://github.com/nodejs/citgm/pull/1002#issuecomment-1751942988 + console.log('Skipping on Node.js core CITGM because it hangs on v18.x') + process.exit(0) +} + +function testFile (file) { + file = join('fixtures', 'broken-pipe', file) + test(file, async () => { + const child = fork(join(__dirname, file), { silent: true }) + child.stdout.destroy() + + child.stderr.pipe(process.stdout) + + const res = await once(child, 'close') + assert.equal(res, 0) // process exits successfully + }) +} + +testFile('basic.js') +testFile('destination.js') +testFile('syncfalse.js') + +test('let error pass through', async (t) => { + const plan = tspl(t, { plan: 3 }) + const stream = pino.destination({ sync: true }) + + // side effect of the pino constructor is that it will set an + // event handler for error + pino(stream) + + process.nextTick(() => stream.emit('error', new Error('kaboom'))) + process.nextTick(() => stream.emit('error', new Error('kaboom'))) + + stream.on('error', (err) => { + plan.equal(err.message, 'kaboom') + }) + + await plan +}) diff --git a/node_modules/pino/test/browser-child.test.js b/node_modules/pino/test/browser-child.test.js new file mode 100644 index 0000000..679261a --- /dev/null +++ b/node_modules/pino/test/browser-child.test.js @@ -0,0 +1,132 @@ +'use strict' +const test = require('tape') +const pino = require('../browser') + +test('child has parent level', ({ end, same, is }) => { + const instance = pino({ + level: 'error', + browser: {} + }) + + const child = instance.child({}) + + same(child.level, instance.level) + end() +}) + +test('child can set level at creation time', ({ end, same, is }) => { + const instance = pino({ + level: 'error', + browser: {} + }) + + const child = instance.child({}, { level: 'info' }) // first bindings, then options + + same(child.level, 'info') + end() +}) + +test('changing child level does not affect parent', ({ end, same, is }) => { + const instance = pino({ + level: 'error', + browser: {} + }) + + const child = instance.child({}) + child.level = 'info' + + same(instance.level, 'error') + end() +}) + +test('child should log, if its own level allows it', ({ end, same, is }) => { + const expected = [ + { + level: 30, + msg: 'this is info' + }, + { + level: 40, + msg: 'this is warn' + }, + { + level: 50, + msg: 'this is an error' + } + ] + const instance = pino({ + level: 'error', + browser: { + write (actual) { + checkLogObjects(is, same, actual, expected.shift()) + } + } + }) + + const child = instance.child({}) + child.level = 'info' + + child.debug('this is debug') + child.info('this is info') + child.warn('this is warn') + child.error('this is an error') + + same(expected.length, 0, 'not all messages were read') + end() +}) + +test('changing child log level should not affect parent log behavior', ({ end, same, is }) => { + const expected = [ + { + level: 50, + msg: 'this is an error' + }, + { + level: 60, + msg: 'this is fatal' + } + ] + const instance = pino({ + level: 'error', + browser: { + write (actual) { + checkLogObjects(is, same, actual, expected.shift()) + } + } + }) + + const child = instance.child({}) + child.level = 'info' + + instance.warn('this is warn') + instance.error('this is an error') + instance.fatal('this is fatal') + + same(expected.length, 0, 'not all messages were read') + end() +}) + +test('onChild callback should be called when new child is created', ({ end, pass, plan }) => { + plan(1) + const instance = pino({ + level: 'error', + browser: {}, + onChild: (_child) => { + pass('onChild callback was called') + end() + } + }) + + instance.child({}) +}) + +function checkLogObjects (is, same, actual, expected) { + is(actual.time <= Date.now(), true, 'time is greater than Date.now()') + + const actualCopy = Object.assign({}, actual) + const expectedCopy = Object.assign({}, expected) + delete actualCopy.time + delete expectedCopy.time + + same(actualCopy, expectedCopy) +} diff --git a/node_modules/pino/test/browser-disabled.test.js b/node_modules/pino/test/browser-disabled.test.js new file mode 100644 index 0000000..36d1b11 --- /dev/null +++ b/node_modules/pino/test/browser-disabled.test.js @@ -0,0 +1,87 @@ +'use strict' +const test = require('tape') +const pino = require('../browser') + +test('set browser opts disabled to true', ({ end, same }) => { + const instance = pino({ + browser: { + disabled: true, + write (actual) { + checkLogObjects(same, actual, []) + } + } + }) + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + + end() +}) + +test('set browser opts disabled to false', ({ end, same }) => { + const expected = [ + { + level: 30, + msg: 'hello world' + }, + { + level: 50, + msg: 'this is an error' + }, + { + level: 60, + msg: 'this is fatal' + } + ] + const instance = pino({ + browser: { + disabled: false, + write (actual) { + checkLogObjects(same, actual, expected.shift()) + } + } + }) + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + + end() +}) + +test('disabled is not set in browser opts', ({ end, same }) => { + const expected = [ + { + level: 30, + msg: 'hello world' + }, + { + level: 50, + msg: 'this is an error' + }, + { + level: 60, + msg: 'this is fatal' + } + ] + const instance = pino({ + browser: { + write (actual) { + checkLogObjects(same, actual, expected.shift()) + } + } + }) + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + + end() +}) + +function checkLogObjects (same, actual, expected, is) { + const actualCopy = Object.assign({}, actual) + const expectedCopy = Object.assign({}, expected) + delete actualCopy.time + delete expectedCopy.time + + same(actualCopy, expectedCopy) +} diff --git a/node_modules/pino/test/browser-early-console-freeze.test.js b/node_modules/pino/test/browser-early-console-freeze.test.js new file mode 100644 index 0000000..942abfa --- /dev/null +++ b/node_modules/pino/test/browser-early-console-freeze.test.js @@ -0,0 +1,12 @@ +'use strict' +Object.freeze(console) +const test = require('tape') +const pino = require('../browser') + +test('silent level', ({ end, fail, pass }) => { + pino({ + level: 'silent', + browser: { } + }) + end() +}) diff --git a/node_modules/pino/test/browser-is-level-enabled.test.js b/node_modules/pino/test/browser-is-level-enabled.test.js new file mode 100644 index 0000000..045c613 --- /dev/null +++ b/node_modules/pino/test/browser-is-level-enabled.test.js @@ -0,0 +1,101 @@ +'use strict' + +const { describe, test } = require('node:test') +const assert = require('node:assert') +const pino = require('../browser') + +const customLevels = { + trace: 10, + debug: 20, + info: 30, + warn: 40, + error: 50, + fatal: 60 +} + +describe('Default levels suite', () => { + test('can check if current level enabled', async () => { + const log = pino({ level: 'debug' }) + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if current level enabled when as object', async () => { + const log = pino({ asObject: true, level: 'debug' }) + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if level enabled after level set', async () => { + const log = pino() + assert.equal(false, log.isLevelEnabled('debug')) + log.level = 'debug' + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if higher level enabled', async () => { + const log = pino({ level: 'debug' }) + assert.equal(true, log.isLevelEnabled('error')) + }) + + test('can check if lower level is disabled', async () => { + const log = pino({ level: 'error' }) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('ASC: can check if child has current level enabled', async () => { + const log = pino().child({}, { level: 'debug' }) + assert.equal(true, log.isLevelEnabled('debug')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('can check if custom level is enabled', async () => { + const log = pino({ + customLevels: { foo: 35 }, + level: 'debug' + }) + assert.equal(true, log.isLevelEnabled('foo')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) +}) + +describe('Custom levels suite', () => { + test('can check if current level enabled', async () => { + const log = pino({ level: 'debug', customLevels }) + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if level enabled after level set', async () => { + const log = pino({ customLevels }) + assert.equal(false, log.isLevelEnabled('debug')) + log.level = 'debug' + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if higher level enabled', async () => { + const log = pino({ level: 'debug', customLevels }) + assert.equal(true, log.isLevelEnabled('error')) + }) + + test('can check if lower level is disabled', async () => { + const log = pino({ level: 'error', customLevels }) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('can check if child has current level enabled', async () => { + const log = pino().child({ customLevels }, { level: 'debug' }) + assert.equal(true, log.isLevelEnabled('debug')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('can check if custom level is enabled', async () => { + const log = pino({ + customLevels: { foo: 35, ...customLevels }, + level: 'debug' + }) + assert.equal(true, log.isLevelEnabled('foo')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) +}) diff --git a/node_modules/pino/test/browser-levels.test.js b/node_modules/pino/test/browser-levels.test.js new file mode 100644 index 0000000..a992905 --- /dev/null +++ b/node_modules/pino/test/browser-levels.test.js @@ -0,0 +1,241 @@ +'use strict' +const test = require('tape') +const pino = require('../browser') + +test('set the level by string', ({ end, same, is }) => { + const expected = [ + { + level: 50, + msg: 'this is an error' + }, + { + level: 60, + msg: 'this is fatal' + } + ] + const instance = pino({ + browser: { + write (actual) { + checkLogObjects(is, same, actual, expected.shift()) + } + } + }) + + instance.level = 'error' + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + + end() +}) + +test('set the level by string. init with silent', ({ end, same, is }) => { + const expected = [ + { + level: 50, + msg: 'this is an error' + }, + { + level: 60, + msg: 'this is fatal' + } + ] + const instance = pino({ + level: 'silent', + browser: { + write (actual) { + checkLogObjects(is, same, actual, expected.shift()) + } + } + }) + + instance.level = 'error' + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + + end() +}) + +test('set the level by string. init with silent and transmit', ({ end, same, is }) => { + const expected = [ + { + level: 50, + msg: 'this is an error' + }, + { + level: 60, + msg: 'this is fatal' + } + ] + const instance = pino({ + level: 'silent', + browser: { + write (actual) { + checkLogObjects(is, same, actual, expected.shift()) + } + }, + transmit: { + send () {} + } + }) + + instance.level = 'error' + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + + end() +}) + +test('set the level via constructor', ({ end, same, is }) => { + const expected = [ + { + level: 50, + msg: 'this is an error' + }, + { + level: 60, + msg: 'this is fatal' + } + ] + const instance = pino({ + level: 'error', + browser: { + write (actual) { + checkLogObjects(is, same, actual, expected.shift()) + } + } + }) + + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + + end() +}) + +test('set custom level and use it', ({ end, same, is }) => { + const expected = [ + { + level: 31, + msg: 'this is a custom level' + } + ] + const instance = pino({ + customLevels: { + success: 31 + }, + browser: { + write (actual) { + checkLogObjects(is, same, actual, expected.shift()) + } + } + }) + + instance.success('this is a custom level') + + end() +}) + +test('the wrong level throws', ({ end, throws }) => { + const instance = pino() + throws(() => { + instance.level = 'kaboom' + }) + end() +}) + +test('the wrong level by number throws', ({ end, throws }) => { + const instance = pino() + throws(() => { + instance.levelVal = 55 + }) + end() +}) + +test('exposes level string mappings', ({ end, is }) => { + is(pino.levels.values.error, 50) + end() +}) + +test('exposes level number mappings', ({ end, is }) => { + is(pino.levels.labels[50], 'error') + end() +}) + +test('returns level integer', ({ end, is }) => { + const instance = pino({ level: 'error' }) + is(instance.levelVal, 50) + end() +}) + +test('silent level via constructor', ({ end, fail }) => { + const instance = pino({ + level: 'silent', + browser: { + write () { + fail('no data should be logged') + } + } + }) + + Object.keys(pino.levels.values).forEach((level) => { + instance[level]('hello world') + }) + + end() +}) + +test('silent level by string', ({ end, fail }) => { + const instance = pino({ + browser: { + write () { + fail('no data should be logged') + } + } + }) + + instance.level = 'silent' + + Object.keys(pino.levels.values).forEach((level) => { + instance[level]('hello world') + }) + + end() +}) + +test('exposed levels', ({ end, same }) => { + same(Object.keys(pino.levels.values), [ + 'fatal', + 'error', + 'warn', + 'info', + 'debug', + 'trace' + ]) + end() +}) + +test('exposed labels', ({ end, same }) => { + same(Object.keys(pino.levels.labels), [ + '10', + '20', + '30', + '40', + '50', + '60' + ]) + end() +}) + +function checkLogObjects (is, same, actual, expected) { + is(actual.time <= Date.now(), true, 'time is greater than Date.now()') + + const actualCopy = Object.assign({}, actual) + const expectedCopy = Object.assign({}, expected) + delete actualCopy.time + delete expectedCopy.time + + same(actualCopy, expectedCopy) +} diff --git a/node_modules/pino/test/browser-serializers.test.js b/node_modules/pino/test/browser-serializers.test.js new file mode 100644 index 0000000..07cfa60 --- /dev/null +++ b/node_modules/pino/test/browser-serializers.test.js @@ -0,0 +1,352 @@ +'use strict' +// eslint-disable-next-line +if (typeof $1 !== 'undefined') $1 = arguments.callee.caller.arguments[0] + +const test = require('tape') +const fresh = require('import-fresh') +const pino = require('../browser') + +const parentSerializers = { + test: () => 'parent' +} + +const childSerializers = { + test: () => 'child' +} + +test('serializers override values', ({ end, is }) => { + const parent = pino({ + serializers: parentSerializers, + browser: { + serialize: true, + write (o) { + is(o.test, 'parent') + end() + } + } + }) + + parent.fatal({ test: 'test' }) +}) + +test('without the serialize option, serializers do not override values', ({ end, is }) => { + const parent = pino({ + serializers: parentSerializers, + browser: { + write (o) { + is(o.test, 'test') + end() + } + } + }) + + parent.fatal({ test: 'test' }) +}) + +if (process.title !== 'browser') { + test('if serialize option is true, standard error serializer is auto enabled', ({ end, same }) => { + const err = Error('test') + err.code = 'test' + err.type = 'Error' // get that cov + const expect = pino.stdSerializers.err(err) + + const consoleError = console.error + console.error = function (err) { + same(err, expect) + } + + const logger = fresh('../browser')({ + browser: { serialize: true } + }) + + console.error = consoleError + + logger.fatal(err) + end() + }) + + test('if serialize option is array, standard error serializer is auto enabled', ({ end, same }) => { + const err = Error('test') + err.code = 'test' + const expect = pino.stdSerializers.err(err) + + const consoleError = console.error + console.error = function (err) { + same(err, expect) + } + + const logger = fresh('../browser', require)({ + browser: { serialize: [] } + }) + + console.error = consoleError + + logger.fatal(err) + end() + }) + + test('if serialize option is array containing !stdSerializers.err, standard error serializer is disabled', ({ end, is }) => { + const err = Error('test') + err.code = 'test' + const expect = err + + const consoleError = console.error + console.error = function (err) { + is(err, expect) + } + + const logger = fresh('../browser', require)({ + browser: { serialize: ['!stdSerializers.err'] } + }) + + console.error = consoleError + + logger.fatal(err) + end() + }) + + test('in browser, serializers apply to all objects', ({ end, is }) => { + const consoleError = console.error + console.error = function (test, test2, test3, test4, test5) { + is(test.key, 'serialized') + is(test2.key2, 'serialized2') + is(test5.key3, 'serialized3') + } + + const logger = fresh('../browser', require)({ + serializers: { + key: () => 'serialized', + key2: () => 'serialized2', + key3: () => 'serialized3' + }, + browser: { serialize: true } + }) + + console.error = consoleError + + logger.fatal({ key: 'test' }, { key2: 'test' }, 'str should skip', [{ foo: 'array should skip' }], { key3: 'test' }) + end() + }) + + test('serialize can be an array of selected serializers', ({ end, is }) => { + const consoleError = console.error + console.error = function (test, test2, test3, test4, test5) { + is(test.key, 'test') + is(test2.key2, 'serialized2') + is(test5.key3, 'test') + } + + const logger = fresh('../browser', require)({ + serializers: { + key: () => 'serialized', + key2: () => 'serialized2', + key3: () => 'serialized3' + }, + browser: { serialize: ['key2'] } + }) + + console.error = consoleError + + logger.fatal({ key: 'test' }, { key2: 'test' }, 'str should skip', [{ foo: 'array should skip' }], { key3: 'test' }) + end() + }) + + test('serialize filter applies to child loggers', ({ end, is }) => { + const consoleError = console.error + console.error = function (binding, test, test2, test3, test4, test5) { + is(test.key, 'test') + is(test2.key2, 'serialized2') + is(test5.key3, 'test') + } + + const logger = fresh('../browser', require)({ + browser: { serialize: ['key2'] } + }) + + console.error = consoleError + + logger.child({ + aBinding: 'test' + }, { + serializers: { + key: () => 'serialized', + key2: () => 'serialized2', + key3: () => 'serialized3' + } + }).fatal({ key: 'test' }, { key2: 'test' }, 'str should skip', [{ foo: 'array should skip' }], { key3: 'test' }) + end() + }) + + test('serialize filter applies to child loggers through bindings', ({ end, is }) => { + const consoleError = console.error + console.error = function (binding, test, test2, test3, test4, test5) { + is(test.key, 'test') + is(test2.key2, 'serialized2') + is(test5.key3, 'test') + } + + const logger = fresh('../browser', require)({ + browser: { serialize: ['key2'] } + }) + + console.error = consoleError + + logger.child({ + aBinding: 'test', + serializers: { + key: () => 'serialized', + key2: () => 'serialized2', + key3: () => 'serialized3' + } + }).fatal({ key: 'test' }, { key2: 'test' }, 'str should skip', [{ foo: 'array should skip' }], { key3: 'test' }) + end() + }) + + test('parent serializers apply to child bindings', ({ end, is }) => { + const consoleError = console.error + console.error = function (binding) { + is(binding.key, 'serialized') + } + + const logger = fresh('../browser', require)({ + serializers: { + key: () => 'serialized' + }, + browser: { serialize: true } + }) + + console.error = consoleError + + logger.child({ key: 'test' }).fatal({ test: 'test' }) + end() + }) + + test('child serializers apply to child bindings', ({ end, is }) => { + const consoleError = console.error + console.error = function (binding) { + is(binding.key, 'serialized') + } + + const logger = fresh('../browser', require)({ + browser: { serialize: true } + }) + + console.error = consoleError + + logger.child({ + key: 'test' + }, { + serializers: { + key: () => 'serialized' + } + }).fatal({ test: 'test' }) + end() + }) +} + +test('child does not overwrite parent serializers', ({ end, is }) => { + let c = 0 + const parent = pino({ + serializers: parentSerializers, + browser: { + serialize: true, + write (o) { + c++ + if (c === 1) is(o.test, 'parent') + if (c === 2) { + is(o.test, 'child') + end() + } + } + } + }) + const child = parent.child({}, { serializers: childSerializers }) + + parent.fatal({ test: 'test' }) + child.fatal({ test: 'test' }) +}) + +test('children inherit parent serializers', ({ end, is }) => { + const parent = pino({ + serializers: parentSerializers, + browser: { + serialize: true, + write (o) { + is(o.test, 'parent') + } + } + }) + + const child = parent.child({ a: 'property' }) + child.fatal({ test: 'test' }) + end() +}) + +test('children serializers get called', ({ end, is }) => { + const parent = pino({ + browser: { + serialize: true, + write (o) { + is(o.test, 'child') + } + } + }) + + const child = parent.child({ a: 'property' }, { serializers: childSerializers }) + + child.fatal({ test: 'test' }) + end() +}) + +test('children serializers get called when inherited from parent', ({ end, is }) => { + const parent = pino({ + serializers: parentSerializers, + browser: { + serialize: true, + write: (o) => { + is(o.test, 'pass') + } + } + }) + + const child = parent.child({}, { serializers: { test: () => 'pass' } }) + + child.fatal({ test: 'fail' }) + end() +}) + +test('non overridden serializers are available in the children', ({ end, is }) => { + const pSerializers = { + onlyParent: () => 'parent', + shared: () => 'parent' + } + + const cSerializers = { + shared: () => 'child', + onlyChild: () => 'child' + } + + let c = 0 + + const parent = pino({ + serializers: pSerializers, + browser: { + serialize: true, + write (o) { + c++ + if (c === 1) is(o.shared, 'child') + if (c === 2) is(o.onlyParent, 'parent') + if (c === 3) is(o.onlyChild, 'child') + if (c === 4) is(o.onlyChild, 'test') + } + } + }) + + const child = parent.child({}, { serializers: cSerializers }) + + child.fatal({ shared: 'test' }) + child.fatal({ onlyParent: 'test' }) + child.fatal({ onlyChild: 'test' }) + parent.fatal({ onlyChild: 'test' }) + end() +}) diff --git a/node_modules/pino/test/browser-timestamp.test.js b/node_modules/pino/test/browser-timestamp.test.js new file mode 100644 index 0000000..994d835 --- /dev/null +++ b/node_modules/pino/test/browser-timestamp.test.js @@ -0,0 +1,88 @@ +'use strict' +const test = require('tape') +const pino = require('../browser') + +Date.now = () => 1599400603614 + +test('null timestamp', ({ end, is }) => { + const instance = pino({ + timestamp: pino.stdTimeFunctions.nullTime, + browser: { + asObject: true, + write: function (o) { + is(o.time, undefined) + } + } + }) + instance.info('hello world') + end() +}) + +test('iso timestamp', ({ end, is }) => { + const instance = pino({ + timestamp: pino.stdTimeFunctions.isoTime, + browser: { + asObject: true, + write: function (o) { + is(o.time, '2020-09-06T13:56:43.614Z') + } + } + }) + instance.info('hello world') + end() +}) + +test('epoch timestamp', ({ end, is }) => { + const instance = pino({ + timestamp: pino.stdTimeFunctions.epochTime, + browser: { + asObject: true, + write: function (o) { + is(o.time, 1599400603614) + } + } + }) + instance.info('hello world') + end() +}) + +test('unix timestamp', ({ end, is }) => { + const instance = pino({ + timestamp: pino.stdTimeFunctions.unixTime, + browser: { + asObject: true, + write: function (o) { + is(o.time, Math.round(1599400603614 / 1000.0)) + } + } + }) + instance.info('hello world') + end() +}) + +test('epoch timestamp by default', ({ end, is }) => { + const instance = pino({ + browser: { + asObject: true, + write: function (o) { + is(o.time, 1599400603614) + } + } + }) + instance.info('hello world') + end() +}) + +test('not print timestamp if the option is false', ({ end, is }) => { + const instance = pino({ + timestamp: false, + browser: { + asObject: true, + write: function (o) { + is(o.time, undefined) + } + } + }) + instance.info('hello world') + end() +}) diff --git a/node_modules/pino/test/browser-transmit.test.js b/node_modules/pino/test/browser-transmit.test.js new file mode 100644 index 0000000..d5063ca --- /dev/null +++ b/node_modules/pino/test/browser-transmit.test.js @@ -0,0 +1,417 @@ +'use strict' +const test = require('tape') +const pino = require('../browser') + +function noop () {} + +test('throws if transmit object does not have send function', ({ end, throws }) => { + throws(() => { + pino({ browser: { transmit: {} } }) + }) + + throws(() => { + pino({ browser: { transmit: { send: 'not a func' } } }) + }) + + end() +}) + +test('calls send function after write', ({ end, is }) => { + let c = 0 + const logger = pino({ + browser: { + write: () => { + c++ + }, + transmit: { + send () { is(c, 1) } + } + } + }) + + logger.fatal({ test: 'test' }) + end() +}) + +test('passes send function the logged level', ({ end, is }) => { + const logger = pino({ + browser: { + write () {}, + transmit: { + send (level) { + is(level, 'fatal') + } + } + } + }) + + logger.fatal({ test: 'test' }) + end() +}) + +test('passes send function message strings in logEvent object when asObject is not set', ({ end, same, is }) => { + const logger = pino({ + browser: { + write: noop, + transmit: { + send (level, { messages }) { + is(messages[0], 'test') + is(messages[1], 'another test') + } + } + } + }) + + logger.fatal('test', 'another test') + + end() +}) + +test('passes send function message objects in logEvent object when asObject is not set', ({ end, same, is }) => { + const logger = pino({ + browser: { + write: noop, + transmit: { + send (level, { messages }) { + same(messages[0], { test: 'test' }) + is(messages[1], 'another test') + } + } + } + }) + + logger.fatal({ test: 'test' }, 'another test') + + end() +}) + +test('passes send function message strings in logEvent object when asObject is set', ({ end, same, is }) => { + const logger = pino({ + browser: { + asObject: true, + write: noop, + transmit: { + send (level, { messages }) { + is(messages[0], 'test') + is(messages[1], 'another test') + } + } + } + }) + + logger.fatal('test', 'another test') + + end() +}) + +test('passes send function message objects in logEvent object when asObject is set', ({ end, same, is }) => { + const logger = pino({ + browser: { + asObject: true, + write: noop, + transmit: { + send (level, { messages }) { + same(messages[0], { test: 'test' }) + is(messages[1], 'another test') + } + } + } + }) + + logger.fatal({ test: 'test' }, 'another test') + + end() +}) + +test('supplies a timestamp (ts) in logEvent object which is exactly the same as the `time` property in asObject mode', ({ end, is }) => { + let expected + const logger = pino({ + browser: { + asObject: true, // implicit because `write`, but just to be explicit + write (o) { + expected = o.time + }, + transmit: { + send (level, logEvent) { + is(logEvent.ts, expected) + } + } + } + }) + + logger.fatal('test') + end() +}) + +test('passes send function child bindings via logEvent object', ({ end, same, is }) => { + const logger = pino({ + browser: { + write: noop, + transmit: { + send (level, logEvent) { + const messages = logEvent.messages + const bindings = logEvent.bindings + same(bindings[0], { first: 'binding' }) + same(bindings[1], { second: 'binding2' }) + same(messages[0], { test: 'test' }) + is(messages[1], 'another test') + } + } + } + }) + + logger + .child({ first: 'binding' }) + .child({ second: 'binding2' }) + .fatal({ test: 'test' }, 'another test') + end() +}) + +test('passes send function level:{label, value} via logEvent object', ({ end, is }) => { + const logger = pino({ + browser: { + write: noop, + transmit: { + send (level, logEvent) { + const label = logEvent.level.label + const value = logEvent.level.value + + is(label, 'fatal') + is(value, 60) + } + } + } + }) + + logger.fatal({ test: 'test' }, 'another test') + end() +}) + +test('calls send function according to transmit.level', ({ end, is }) => { + let c = 0 + const logger = pino({ + browser: { + write: noop, + transmit: { + level: 'error', + send (level) { + c++ + if (c === 1) is(level, 'error') + if (c === 2) is(level, 'fatal') + } + } + } + }) + logger.warn('ignored') + logger.error('test') + logger.fatal('test') + end() +}) + +test('transmit.level defaults to logger level', ({ end, is }) => { + let c = 0 + const logger = pino({ + level: 'error', + browser: { + write: noop, + transmit: { + send (level) { + c++ + if (c === 1) is(level, 'error') + if (c === 2) is(level, 'fatal') + } + } + } + }) + logger.warn('ignored') + logger.error('test') + logger.fatal('test') + end() +}) + +test('transmit.level is effective even if lower than logger level', ({ end, is }) => { + let c = 0 + const logger = pino({ + level: 'error', + browser: { + write: noop, + transmit: { + level: 'info', + send (level) { + c++ + if (c === 1) is(level, 'warn') + if (c === 2) is(level, 'error') + if (c === 3) is(level, 'fatal') + } + } + } + }) + logger.warn('ignored') + logger.error('test') + logger.fatal('test') + end() +}) + +test('applies all serializers to messages and bindings (serialize:false - default)', ({ end, same, is }) => { + const logger = pino({ + serializers: { + first: () => 'first', + second: () => 'second', + test: () => 'serialize it' + }, + browser: { + write: noop, + transmit: { + send (level, logEvent) { + const messages = logEvent.messages + const bindings = logEvent.bindings + same(bindings[0], { first: 'first' }) + same(bindings[1], { second: 'second' }) + same(messages[0], { test: 'serialize it' }) + is(messages[1].type, 'Error') + } + } + } + }) + + logger + .child({ first: 'binding' }) + .child({ second: 'binding2' }) + .fatal({ test: 'test' }, Error()) + end() +}) + +test('applies all serializers to messages and bindings (serialize:true)', ({ end, same, is }) => { + const logger = pino({ + serializers: { + first: () => 'first', + second: () => 'second', + test: () => 'serialize it' + }, + browser: { + serialize: true, + write: noop, + transmit: { + send (level, logEvent) { + const messages = logEvent.messages + const bindings = logEvent.bindings + same(bindings[0], { first: 'first' }) + same(bindings[1], { second: 'second' }) + same(messages[0], { test: 'serialize it' }) + is(messages[1].type, 'Error') + } + } + } + }) + + logger + .child({ first: 'binding' }) + .child({ second: 'binding2' }) + .fatal({ test: 'test' }, Error()) + end() +}) + +test('extracts correct bindings and raw messages over multiple transmits', ({ end, same, is }) => { + let messages = null + let bindings = null + + const logger = pino({ + browser: { + write: noop, + transmit: { + send (level, logEvent) { + messages = logEvent.messages + bindings = logEvent.bindings + } + } + } + }) + + const child = logger.child({ child: true }) + const grandchild = child.child({ grandchild: true }) + + logger.fatal({ test: 'parent:test1' }) + logger.fatal({ test: 'parent:test2' }) + same([], bindings) + same([{ test: 'parent:test2' }], messages) + + child.fatal({ test: 'child:test1' }) + child.fatal({ test: 'child:test2' }) + same([{ child: true }], bindings) + same([{ test: 'child:test2' }], messages) + + grandchild.fatal({ test: 'grandchild:test1' }) + grandchild.fatal({ test: 'grandchild:test2' }) + same([{ child: true }, { grandchild: true }], bindings) + same([{ test: 'grandchild:test2' }], messages) + + end() +}) + +test('does not log below configured level', ({ end, is }) => { + let message = null + const logger = pino({ + level: 'info', + browser: { + write (o) { + message = o.msg + }, + transmit: { + send () { } + } + } + }) + + logger.debug('this message is silent') + is(message, null) + + end() +}) + +test('silent level prevents logging even with transmit', ({ end, fail }) => { + const logger = pino({ + level: 'silent', + browser: { + write () { + fail('no data should be logged by the write method') + }, + transmit: { + send () { + fail('no data should be logged by the send method') + } + } + } + }) + + Object.keys(pino.levels.values).forEach((level) => { + logger[level]('ignored') + }) + + end() +}) + +test('does not call send when transmit.level is set to silent', ({ end, fail, is }) => { + let c = 0 + const logger = pino({ + level: 'trace', + browser: { + write () { + c++ + }, + transmit: { + level: 'silent', + send () { + fail('no data should be logged by the transmit method') + } + } + } + }) + + const levels = Object.keys(pino.levels.values) + levels.forEach((level) => { + logger[level]('message') + }) + + is(c, levels.length, 'write must be called exactly once per level') + end() +}) diff --git a/node_modules/pino/test/browser.test.js b/node_modules/pino/test/browser.test.js new file mode 100644 index 0000000..0712e48 --- /dev/null +++ b/node_modules/pino/test/browser.test.js @@ -0,0 +1,679 @@ +'use strict' +const test = require('tape') +const fresh = require('import-fresh') +const pinoStdSerializers = require('pino-std-serializers') +const pino = require('../browser') + +levelTest('fatal') +levelTest('error') +levelTest('warn') +levelTest('info') +levelTest('debug') +levelTest('trace') + +test('silent level', ({ end, fail, pass }) => { + const instance = pino({ + level: 'silent', + browser: { write: fail } + }) + instance.info('test') + const child = instance.child({ test: 'test' }) + child.info('msg-test') + // use setTimeout because setImmediate isn't supported in most browsers + setTimeout(() => { + pass() + end() + }, 0) +}) + +test('enabled false', ({ end, fail, pass }) => { + const instance = pino({ + enabled: false, + browser: { write: fail } + }) + instance.info('test') + const child = instance.child({ test: 'test' }) + child.info('msg-test') + // use setTimeout because setImmediate isn't supported in most browsers + setTimeout(() => { + pass() + end() + }, 0) +}) + +test('throw if creating child without bindings', ({ end, throws }) => { + const instance = pino() + throws(() => instance.child()) + end() +}) + +test('stubs write, flush and ee methods on instance', ({ end, ok, is }) => { + const instance = pino() + + ok(isFunc(instance.setMaxListeners)) + ok(isFunc(instance.getMaxListeners)) + ok(isFunc(instance.emit)) + ok(isFunc(instance.addListener)) + ok(isFunc(instance.on)) + ok(isFunc(instance.prependListener)) + ok(isFunc(instance.once)) + ok(isFunc(instance.prependOnceListener)) + ok(isFunc(instance.removeListener)) + ok(isFunc(instance.removeAllListeners)) + ok(isFunc(instance.listeners)) + ok(isFunc(instance.listenerCount)) + ok(isFunc(instance.eventNames)) + ok(isFunc(instance.write)) + ok(isFunc(instance.flush)) + + is(instance.on(), undefined) + + end() +}) + +test('exposes levels object', ({ end, same }) => { + same(pino.levels, { + values: { + fatal: 60, + error: 50, + warn: 40, + info: 30, + debug: 20, + trace: 10 + }, + labels: { + 10: 'trace', + 20: 'debug', + 30: 'info', + 40: 'warn', + 50: 'error', + 60: 'fatal' + } + }) + + end() +}) + +test('exposes faux stdSerializers', ({ end, ok, same }) => { + ok(pino.stdSerializers) + // make sure faux stdSerializers match pino-std-serializers + for (const serializer in pinoStdSerializers) { + ok(pino.stdSerializers[serializer], `pino.stdSerializers.${serializer}`) + } + // confirm faux methods return empty objects + same(pino.stdSerializers.req(), {}) + same(pino.stdSerializers.mapHttpRequest(), {}) + same(pino.stdSerializers.mapHttpResponse(), {}) + same(pino.stdSerializers.res(), {}) + // confirm wrapping function is a passthrough + const noChange = { foo: 'bar', fuz: 42 } + same(pino.stdSerializers.wrapRequestSerializer(noChange), noChange) + same(pino.stdSerializers.wrapResponseSerializer(noChange), noChange) + end() +}) + +test('exposes err stdSerializer', ({ end, ok }) => { + ok(pino.stdSerializers.err) + ok(pino.stdSerializers.err(Error())) + end() +}) + +consoleMethodTest('error') +consoleMethodTest('fatal', 'error') +consoleMethodTest('warn') +consoleMethodTest('info') +consoleMethodTest('debug') +consoleMethodTest('trace') +absentConsoleMethodTest('error', 'log') +absentConsoleMethodTest('warn', 'error') +absentConsoleMethodTest('info', 'log') +absentConsoleMethodTest('debug', 'log') +absentConsoleMethodTest('trace', 'log') + +// do not run this with airtap +if (process.title !== 'browser') { + test('in absence of console, log methods become noops', ({ end, ok }) => { + const console = global.console + delete global.console + const instance = fresh('../browser')() + global.console = console + ok(fnName(instance.log).match(/noop/)) + ok(fnName(instance.fatal).match(/noop/)) + ok(fnName(instance.error).match(/noop/)) + ok(fnName(instance.warn).match(/noop/)) + ok(fnName(instance.info).match(/noop/)) + ok(fnName(instance.debug).match(/noop/)) + ok(fnName(instance.trace).match(/noop/)) + end() + }) +} + +test('opts.browser.asObject logs pino-like object to console', ({ end, ok, is }) => { + const info = console.info + console.info = function (o) { + is(o.level, 30) + is(o.msg, 'test') + ok(o.time) + console.info = info + } + const instance = require('../browser')({ + browser: { + asObject: true + } + }) + + instance.info('test') + end() +}) + +test('opts.browser.asObject uses opts.messageKey in logs', ({ end, ok, is }) => { + const messageKey = 'message' + const instance = require('../browser')({ + messageKey, + browser: { + asObject: true, + write: function (o) { + is(o.level, 30) + is(o[messageKey], 'test') + ok(o.time) + } + } + }) + + instance.info('test') + end() +}) + +test('opts.browser.asObjectBindingsOnly passes the bindings but keep the message unformatted', ({ end, ok, is, deepEqual }) => { + const messageKey = 'message' + const instance = require('../browser')({ + messageKey, + browser: { + asObjectBindingsOnly: true, + write: function (o, msg, ...args) { + is(o.level, 30) + ok(o.time) + is(msg, 'test %s') + deepEqual(args, ['foo']) + } + } + }) + + instance.info('test %s', 'foo') + end() +}) + +test('opts.browser.formatters (level) logs pino-like object to console', ({ end, ok, is }) => { + const info = console.info + console.info = function (o) { + is(o.level, 30) + is(o.label, 'info') + is(o.msg, 'test') + ok(o.time) + console.info = info + } + const instance = require('../browser')({ + browser: { + formatters: { + level (label, number) { + return { label, level: number } + } + } + } + }) + + instance.info('test') + end() +}) + +test('opts.browser.formatters (log) logs pino-like object to console', ({ end, ok, is }) => { + const info = console.info + console.info = function (o) { + is(o.level, 30) + is(o.msg, 'test') + is(o.hello, 'world') + is(o.newField, 'test') + ok(o.time, `Logged at ${o.time}`) + console.info = info + } + const instance = require('../browser')({ + browser: { + formatters: { + log (o) { + return { ...o, newField: 'test', time: `Logged at ${o.time}` } + } + } + } + }) + + instance.info({ hello: 'world' }, 'test') + end() +}) + +test('opts.browser.serialize and opts.browser.transmit only serializes log data once', ({ end, ok, is }) => { + const instance = require('../browser')({ + serializers: { + extras (data) { + return { serializedExtras: data } + } + }, + browser: { + serialize: ['extras'], + transmit: { + level: 'info', + send (level, o) { + is(o.messages[0].extras.serializedExtras, 'world') + } + } + } + }) + + instance.info({ extras: 'world' }, 'test') + end() +}) + +test('opts.browser.serialize and opts.asObject only serializes log data once', ({ end, ok, is }) => { + const instance = require('../browser')({ + serializers: { + extras (data) { + return { serializedExtras: data } + } + }, + browser: { + serialize: ['extras'], + asObject: true, + write: function (o) { + is(o.extras.serializedExtras, 'world') + } + } + }) + + instance.info({ extras: 'world' }, 'test') + end() +}) + +test('opts.browser.serialize, opts.asObject and opts.browser.transmit only serializes log data once', ({ end, ok, is }) => { + const instance = require('../browser')({ + serializers: { + extras (data) { + return { serializedExtras: data } + } + }, + browser: { + serialize: ['extras'], + asObject: true, + transmit: { + send (level, o) { + is(o.messages[0].extras.serializedExtras, 'world') + } + } + } + }) + + instance.info({ extras: 'world' }, 'test') + end() +}) + +test('opts.browser.write func log single string', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write: function (o) { + is(o.level, 30) + is(o.msg, 'test') + ok(o.time) + } + } + }) + instance.info('test') + + end() +}) + +test('opts.browser.write func string joining', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write: function (o) { + is(o.level, 30) + is(o.msg, 'test test2 test3') + ok(o.time) + } + } + }) + instance.info('test %s %s', 'test2', 'test3') + + end() +}) + +test('opts.browser.write func string joining when asObject is true', ({ end, ok, is }) => { + const instance = pino({ + browser: { + asObject: true, + write: function (o) { + is(o.level, 30) + is(o.msg, 'test test2 test3') + ok(o.time) + } + } + }) + instance.info('test %s %s', 'test2', 'test3') + + end() +}) + +test('opts.browser.write func string object joining', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write: function (o) { + is(o.level, 30) + is(o.msg, 'test {"test":"test2"} {"test":"test3"}') + ok(o.time) + } + } + }) + instance.info('test %j %j', { test: 'test2' }, { test: 'test3' }) + + end() +}) + +test('opts.browser.write func string object joining when asObject is true', ({ end, ok, is }) => { + const instance = pino({ + browser: { + asObject: true, + write: function (o) { + is(o.level, 30) + is(o.msg, 'test {"test":"test2"} {"test":"test3"}') + ok(o.time) + } + } + }) + instance.info('test %j %j', { test: 'test2' }, { test: 'test3' }) + + end() +}) + +test('opts.browser.write func string interpolation', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write: function (o) { + is(o.level, 30) + is(o.msg, 'test2 test ({"test":"test3"})') + ok(o.time) + } + } + }) + instance.info('%s test (%j)', 'test2', { test: 'test3' }) + + end() +}) + +test('opts.browser.write func number', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write: function (o) { + is(o.level, 30) + is(o.msg, 1) + ok(o.time) + } + } + }) + instance.info(1) + + end() +}) + +test('opts.browser.write func log single object', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write: function (o) { + is(o.level, 30) + is(o.test, 'test') + ok(o.time) + } + } + }) + instance.info({ test: 'test' }) + + end() +}) + +test('opts.browser.write obj writes to methods corresponding to level', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write: { + error: function (o) { + is(o.level, 50) + is(o.test, 'test') + ok(o.time) + } + } + } + }) + instance.error({ test: 'test' }) + + end() +}) + +test('opts.browser.asObject/write supports child loggers', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write (o) { + is(o.level, 30) + is(o.test, 'test') + is(o.msg, 'msg-test') + ok(o.time) + } + } + }) + const child = instance.child({ test: 'test' }) + child.info('msg-test') + + end() +}) + +test('opts.browser.asObject/write supports child child loggers', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write (o) { + is(o.level, 30) + is(o.test, 'test') + is(o.foo, 'bar') + is(o.msg, 'msg-test') + ok(o.time) + } + } + }) + const child = instance.child({ test: 'test' }).child({ foo: 'bar' }) + child.info('msg-test') + + end() +}) + +test('opts.browser.asObject/write supports child child child loggers', ({ end, ok, is }) => { + const instance = pino({ + browser: { + write (o) { + is(o.level, 30) + is(o.test, 'test') + is(o.foo, 'bar') + is(o.baz, 'bop') + is(o.msg, 'msg-test') + ok(o.time) + } + } + }) + const child = instance.child({ test: 'test' }).child({ foo: 'bar' }).child({ baz: 'bop' }) + child.info('msg-test') + + end() +}) + +test('opts.browser.asObject defensively mitigates naughty numbers', ({ end, pass }) => { + const instance = pino({ + browser: { asObject: true, write: () => {} } + }) + const child = instance.child({ test: 'test' }) + child._childLevel = -10 + child.info('test') + pass() // if we reached here, there was no infinite loop, so, .. pass. + + end() +}) + +test('opts.browser.write obj falls back to console where a method is not supplied', ({ end, ok, is }) => { + const info = console.info + console.info = (o) => { + is(o.level, 30) + is(o.msg, 'test') + ok(o.time) + console.info = info + } + const instance = require('../browser')({ + browser: { + write: { + error (o) { + is(o.level, 50) + is(o.test, 'test') + ok(o.time) + } + } + } + }) + instance.error({ test: 'test' }) + instance.info('test') + + end() +}) + +function levelTest (name) { + test(name + ' logs', ({ end, is }) => { + const msg = 'hello world' + sink(name, (args) => { + is(args[0], msg) + end() + }) + pino({ level: name })[name](msg) + }) + + test('passing objects at level ' + name, ({ end, is }) => { + const msg = { hello: 'world' } + sink(name, (args) => { + is(args[0], msg) + end() + }) + pino({ level: name })[name](msg) + }) + + test('passing an object and a string at level ' + name, ({ end, is }) => { + const a = { hello: 'world' } + const b = 'a string' + sink(name, (args) => { + is(args[0], a) + is(args[1], b) + end() + }) + pino({ level: name })[name](a, b) + }) + + test('formatting logs as ' + name, ({ end, is }) => { + sink(name, (args) => { + is(args[0], 'hello %d') + is(args[1], 42) + end() + }) + pino({ level: name })[name]('hello %d', 42) + }) + + test('passing error at level ' + name, ({ end, is }) => { + const err = new Error('myerror') + sink(name, (args) => { + is(args[0], err) + end() + }) + pino({ level: name })[name](err) + }) + + test('passing error with a serializer at level ' + name, ({ end, is }) => { + // in browser - should have no effect (should not crash) + const err = new Error('myerror') + sink(name, (args) => { + is(args[0].err, err) + end() + }) + const instance = pino({ + level: name, + serializers: { + err: pino.stdSerializers.err + } + }) + instance[name]({ err }) + }) + + test('child logger for level ' + name, ({ end, is }) => { + const msg = 'hello world' + const parent = { hello: 'world' } + sink(name, (args) => { + is(args[0], parent) + is(args[1], msg) + end() + }) + const instance = pino({ level: name }) + const child = instance.child(parent) + child[name](msg) + }) + + test('child-child logger for level ' + name, ({ end, is }) => { + const msg = 'hello world' + const grandParent = { hello: 'world' } + const parent = { hello: 'you' } + sink(name, (args) => { + is(args[0], grandParent) + is(args[1], parent) + is(args[2], msg) + end() + }) + const instance = pino({ level: name }) + const child = instance.child(grandParent).child(parent) + child[name](msg) + }) +} + +function consoleMethodTest (level, method) { + if (!method) method = level + test('pino().' + level + ' uses console.' + method, ({ end, is }) => { + sink(method, (args) => { + is(args[0], 'test') + end() + }) + const instance = require('../browser')({ level }) + instance[level]('test') + }) +} + +function absentConsoleMethodTest (method, fallback) { + test('in absence of console.' + method + ', console.' + fallback + ' is used', ({ end, is }) => { + const fn = console[method] + console[method] = undefined + sink(fallback, function (args) { + is(args[0], 'test') + end() + console[method] = fn + }) + const instance = require('../browser')({ level: method }) + instance[method]('test') + }) +} + +function isFunc (fn) { return typeof fn === 'function' } +function fnName (fn) { + const rx = /^\s*function\s*([^(]*)/i + const match = rx.exec(fn) + return match && match[1] +} +function sink (method, fn) { + if (method === 'fatal') method = 'error' + const orig = console[method] + console[method] = function () { + console[method] = orig + fn(Array.prototype.slice.call(arguments)) + } +} diff --git a/node_modules/pino/test/complex-objects.test.js b/node_modules/pino/test/complex-objects.test.js new file mode 100644 index 0000000..0149531 --- /dev/null +++ b/node_modules/pino/test/complex-objects.test.js @@ -0,0 +1,36 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { PassThrough } = require('node:stream') + +const { sink, once } = require('./helper') +const pino = require('../') + +test('Proxy and stream objects', async () => { + const s = new PassThrough() + s.resume() + s.write('', () => {}) + const obj = { s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) } + const stream = sink() + const instance = pino(stream) + instance.info({ obj }) + + const result = await once(stream, 'data') + + assert.equal(result.obj, '[unable to serialize, circular reference is too complex to analyze]') +}) + +test('Proxy and stream objects', async () => { + const s = new PassThrough() + s.resume() + s.write('', () => {}) + const obj = { s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) } + const stream = sink() + const instance = pino(stream) + instance.info(obj) + + const result = await once(stream, 'data') + + assert.equal(result.p, '[unable to serialize, circular reference is too complex to analyze]') +}) diff --git a/node_modules/pino/test/crlf.test.js b/node_modules/pino/test/crlf.test.js new file mode 100644 index 0000000..56728a5 --- /dev/null +++ b/node_modules/pino/test/crlf.test.js @@ -0,0 +1,34 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') + +const writer = require('flush-write-stream') +const pino = require('../') + +function capture () { + const ws = writer((chunk, enc, cb) => { + ws.data += chunk.toString() + cb() + }) + ws.data = '' + return ws +} + +test('pino uses LF by default', async () => { + const stream = capture() + const logger = pino(stream) + logger.info('foo') + logger.error('bar') + assert.ok(/foo[^\r\n]+\n[^\r\n]+bar[^\r\n]+\n/.test(stream.data)) +}) + +test('pino can log CRLF', async () => { + const stream = capture() + const logger = pino({ + crlf: true + }, stream) + logger.info('foo') + logger.error('bar') + assert.ok(/foo[^\n]+\r\n[^\n]+bar[^\n]+\r\n/.test(stream.data)) +}) diff --git a/node_modules/pino/test/custom-levels.test.js b/node_modules/pino/test/custom-levels.test.js new file mode 100644 index 0000000..dae7292 --- /dev/null +++ b/node_modules/pino/test/custom-levels.test.js @@ -0,0 +1,267 @@ +'use strict' + +/* eslint no-prototype-builtins: 0 */ + +const test = require('node:test') +const assert = require('node:assert') + +const { sink, once } = require('./helper') +const pino = require('../') + +// Silence all warnings for this test +process.removeAllListeners('warning') +process.on('warning', () => {}) + +test('adds additional levels', async () => { + const stream = sink() + const logger = pino({ + customLevels: { + foo: 35, + bar: 45 + } + }, stream) + + logger.foo('test') + const { level } = await once(stream, 'data') + assert.equal(level, 35) +}) + +test('custom levels does not override default levels', async () => { + const stream = sink() + const logger = pino({ + customLevels: { + foo: 35 + } + }, stream) + + logger.info('test') + const { level } = await once(stream, 'data') + assert.equal(level, 30) +}) + +test('default levels can be redefined using custom levels', async () => { + const stream = sink() + const logger = pino({ + customLevels: { + info: 35, + debug: 45 + }, + useOnlyCustomLevels: true + }, stream) + + assert.equal(logger.hasOwnProperty('info'), true) + + logger.info('test') + const { level } = await once(stream, 'data') + assert.equal(level, 35) +}) + +test('custom levels overrides default level label if use useOnlyCustomLevels', async () => { + const stream = sink() + const logger = pino({ + customLevels: { + foo: 35 + }, + useOnlyCustomLevels: true, + level: 'foo' + }, stream) + + assert.equal(logger.hasOwnProperty('info'), false) +}) + +test('custom levels overrides default level value if use useOnlyCustomLevels', async () => { + const stream = sink() + const logger = pino({ + customLevels: { + foo: 35 + }, + useOnlyCustomLevels: true, + level: 35 + }, stream) + + assert.equal(logger.hasOwnProperty('info'), false) +}) + +test('custom levels are inherited by children', async () => { + const stream = sink() + const logger = pino({ + customLevels: { + foo: 35 + } + }, stream) + + logger.child({ childMsg: 'ok' }).foo('test') + const { msg, childMsg, level } = await once(stream, 'data') + assert.equal(level, 35) + assert.equal(childMsg, 'ok') + assert.equal(msg, 'test') +}) + +test('custom levels can be specified on child bindings', async () => { + const stream = sink() + const logger = pino(stream).child({ + childMsg: 'ok' + }, { + customLevels: { + foo: 35 + } + }) + + logger.foo('test') + const { msg, childMsg, level } = await once(stream, 'data') + assert.equal(level, 35) + assert.equal(childMsg, 'ok') + assert.equal(msg, 'test') +}) + +test('customLevels property child bindings does not get logged', async () => { + const stream = sink() + const logger = pino(stream).child({ + childMsg: 'ok' + }, { + customLevels: { + foo: 35 + } + }) + + logger.foo('test') + const { customLevels } = await once(stream, 'data') + assert.equal(customLevels, undefined) +}) + +test('throws when specifying pre-existing parent labels via child bindings', async () => { + const stream = sink() + assert.throws( + () => pino({ + customLevels: { + foo: 35 + } + }, stream).child({}, { + customLevels: { + foo: 45 + } + }), + /levels cannot be overridden/ + ) +}) + +test('throws when specifying pre-existing parent values via child bindings', async () => { + const stream = sink() + assert.throws( + () => pino({ + customLevels: { + foo: 35 + } + }, stream).child({}, { + customLevels: { + bar: 35 + } + }), + /pre-existing level values cannot be used for new levels/ + ) +}) + +test('throws when specifying core values via child bindings', async () => { + const stream = sink() + assert.throws( + () => pino(stream).child({}, { + customLevels: { + foo: 30 + } + }), + /pre-existing level values cannot be used for new levels/ + ) +}) + +test('throws when useOnlyCustomLevels is set true without customLevels', async () => { + const stream = sink() + assert.throws( + () => pino({ + useOnlyCustomLevels: true + }, stream), + /customLevels is required if useOnlyCustomLevels is set true/ + ) +}) + +test('custom level on one instance does not affect other instances', async () => { + pino({ + customLevels: { + foo: 37 + } + }) + assert.equal(typeof pino().foo, 'undefined') +}) + +test('setting level below or at custom level will successfully log', async () => { + const stream = sink() + const instance = pino({ customLevels: { foo: 35 } }, stream) + instance.level = 'foo' + instance.info('nope') + instance.foo('bar') + const { msg } = await once(stream, 'data') + assert.equal(msg, 'bar') +}) + +test('custom level below level threshold will not log', async () => { + const stream = sink() + const instance = pino({ customLevels: { foo: 15 } }, stream) + instance.level = 'info' + instance.info('bar') + instance.foo('nope') + const { msg } = await once(stream, 'data') + assert.equal(msg, 'bar') +}) + +test('does not share custom level state across siblings', async () => { + const stream = sink() + const logger = pino(stream) + logger.child({}, { + customLevels: { foo: 35 } + }) + assert.doesNotThrow(() => { + logger.child({}, { + customLevels: { foo: 35 } + }) + }) +}) + +test('custom level does not affect the levels serializer', async () => { + const stream = sink() + const logger = pino({ + customLevels: { + foo: 35, + bar: 45 + }, + formatters: { + level (label, number) { + return { priority: number } + } + } + }, stream) + + logger.foo('test') + const { priority } = await once(stream, 'data') + assert.equal(priority, 35) +}) + +test('When useOnlyCustomLevels is set to true, the level formatter should only get custom levels', async () => { + const stream = sink() + const logger = pino({ + customLevels: { + answer: 42 + }, + useOnlyCustomLevels: true, + level: 42, + formatters: { + level (label, number) { + assert.equal(label, 'answer') + assert.equal(number, 42) + return { level: number } + } + } + }, stream) + + logger.answer('test') + const { level } = await once(stream, 'data') + assert.equal(level, 42) +}) diff --git a/node_modules/pino/test/diagnostics.test.js b/node_modules/pino/test/diagnostics.test.js new file mode 100644 index 0000000..2291fa8 --- /dev/null +++ b/node_modules/pino/test/diagnostics.test.js @@ -0,0 +1,107 @@ +'use strict' + +const test = require('node:test') +const os = require('node:os') +const diagChan = require('node:diagnostics_channel') +const { AsyncLocalStorage } = require('node:async_hooks') +const { Writable } = require('node:stream') +const tspl = require('@matteo.collina/tspl') +const pino = require('../pino') + +const hostname = os.hostname() +const { pid } = process +const AS_JSON_START = 'tracing:pino_asJson:start' +const AS_JSON_END = 'tracing:pino_asJson:end' + +// Skip tests if diagnostics_channel.tracingChannel is not available (Node < 18.19) +const skip = typeof diagChan.tracingChannel !== 'function' + +test.beforeEach(ctx => { + ctx.pino = { + ts: 1757512800000, // 2025-09-10T10:00:00.000-05:00 + now: Date.now + } + + Date.now = () => ctx.pino.ts + + ctx.pino.dest = new Writable({ + objectMode: true, + write (data, enc, cb) { + cb() + } + }) +}) + +test.afterEach(ctx => { + Date.now = ctx.pino.now +}) + +test('asJson emits events', { skip }, async (t) => { + const plan = tspl(t, { plan: 8 }) + const { dest } = t.pino + const logger = pino({}, dest) + const expectedArguments = [ + {}, + 'testing', + 30, + `,"time":${t.pino.ts}` + ] + + let startEvent + diagChan.subscribe(AS_JSON_START, startHandler) + diagChan.subscribe(AS_JSON_END, endHandler) + + logger.info('testing') + await plan + + diagChan.unsubscribe(AS_JSON_START, startHandler) + diagChan.unsubscribe(AS_JSON_END, endHandler) + + function startHandler (event) { + startEvent = event + plan.equal(Object.prototype.toString.call(event.instance), '[object Pino]') + plan.equal(event.instance === logger, true) + plan.deepStrictEqual(Array.from(event.arguments ?? []), expectedArguments) + } + + function endHandler (event) { + plan.equal(Object.prototype.toString.call(event.instance), '[object Pino]') + plan.equal(event.instance === logger, true) + plan.deepStrictEqual(Array.from(event.arguments ?? []), expectedArguments) + plan.equal( + event.result, + `{"level":30,"time":${t.pino.ts},"pid":${pid},"hostname":"${hostname}","msg":"testing"}\n` + ) + + plan.equal(event.arguments === startEvent.arguments, true, 'same event object is supplied to both events') + } +}) + +test('asJson context is not lost', { skip }, async (t) => { + const plan = tspl(t, { plan: 2 }) + const { dest } = t.pino + const logger = pino({}, dest) + const asyncLocalStorage = new AsyncLocalStorage() + const localStore = { foo: 'bar' } + + diagChan.subscribe(AS_JSON_START, startHandler) + diagChan.subscribe(AS_JSON_END, endHandler) + + asyncLocalStorage.run(localStore, () => { + logger.info('testing') + }) + await plan + + diagChan.unsubscribe(AS_JSON_START, startHandler) + diagChan.unsubscribe(AS_JSON_END, endHandler) + + function startHandler () { + const store = asyncLocalStorage.getStore() + plan.equal(store === localStore, true) + } + + function endHandler () { + const store = asyncLocalStorage.getStore() + plan.equal(store === localStore, true) + } +}) diff --git a/node_modules/pino/test/error-key.test.js b/node_modules/pino/test/error-key.test.js new file mode 100644 index 0000000..532d5e4 --- /dev/null +++ b/node_modules/pino/test/error-key.test.js @@ -0,0 +1,37 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') + +const { sink, once } = require('./helper') +const stdSerializers = require('pino-std-serializers') +const pino = require('../') + +test('set the errorKey with error serializer', async () => { + const stream = sink() + const errorKey = 'error' + const instance = pino({ + errorKey, + serializers: { [errorKey]: stdSerializers.err } + }, stream) + instance.error(new ReferenceError('test')) + const o = await once(stream, 'data') + assert.equal(typeof o[errorKey], 'object') + assert.equal(o[errorKey].type, 'ReferenceError') + assert.equal(o[errorKey].message, 'test') + assert.equal(typeof o[errorKey].stack, 'string') +}) + +test('set the errorKey without error serializer', async () => { + const stream = sink() + const errorKey = 'error' + const instance = pino({ + errorKey + }, stream) + instance.error(new ReferenceError('test')) + const o = await once(stream, 'data') + assert.equal(typeof o[errorKey], 'object') + assert.equal(o[errorKey].type, 'ReferenceError') + assert.equal(o[errorKey].message, 'test') + assert.equal(typeof o[errorKey].stack, 'string') +}) diff --git a/node_modules/pino/test/error.test.js b/node_modules/pino/test/error.test.js new file mode 100644 index 0000000..9f0a94c --- /dev/null +++ b/node_modules/pino/test/error.test.js @@ -0,0 +1,403 @@ +'use strict' + +/* eslint no-prototype-builtins: 0 */ + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const tspl = require('@matteo.collina/tspl') + +const { sink, once } = require('./helper') +const pino = require('../') + +const { pid } = process +const hostname = os.hostname() +const level = 50 +const name = 'error' + +test('err is serialized with additional properties set on the Error object', async () => { + const stream = sink() + const err = Object.assign(new Error('myerror'), { foo: 'bar' }) + const instance = pino(stream) + instance.level = name + instance[name](err) + const result = await once(stream, 'data') + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + err: { + type: 'Error', + message: err.message, + stack: err.stack, + foo: err.foo + }, + msg: err.message + }) +}) + +test('type should be detected based on constructor', async () => { + class Bar extends Error {} + const stream = sink() + const err = new Bar('myerror') + const instance = pino(stream) + instance.level = name + instance[name](err) + const result = await once(stream, 'data') + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + err: { + type: 'Bar', + message: err.message, + stack: err.stack + }, + msg: err.message + }) +}) + +test('type, message and stack should be first level properties', async () => { + const stream = sink() + const err = Object.assign(new Error('foo'), { foo: 'bar' }) + const instance = pino(stream) + instance.level = name + instance[name](err) + + const result = await once(stream, 'data') + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + err: { + type: 'Error', + message: err.message, + stack: err.stack, + foo: err.foo + }, + msg: err.message + }) +}) + +test('err serializer', async () => { + const stream = sink() + const err = Object.assign(new Error('myerror'), { foo: 'bar' }) + const instance = pino({ + serializers: { + err: pino.stdSerializers.err + } + }, stream) + + instance.level = name + instance[name]({ err }) + const result = await once(stream, 'data') + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + err: { + type: 'Error', + message: err.message, + stack: err.stack, + foo: err.foo + }, + msg: err.message + }) +}) + +test('an error with statusCode property is not confused for a http response', async () => { + const stream = sink() + const err = Object.assign(new Error('StatusCodeErr'), { statusCode: 500 }) + const instance = pino(stream) + + instance.level = name + instance[name](err) + const result = await once(stream, 'data') + + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + err: { + type: 'Error', + message: err.message, + stack: err.stack, + statusCode: err.statusCode + }, + msg: err.message + }) +}) + +test('stack is omitted if it is not set on err', async (t) => { + const plan = tspl(t, { plan: 2 }) + const err = new Error('myerror') + delete err.stack + const instance = pino(sink(function (chunk, enc, cb) { + plan.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()') + delete chunk.time + plan.equal(chunk.hasOwnProperty('stack'), false) + cb() + })) + + instance.level = name + instance[name](err) + + await plan +}) + +test('correctly ignores toString on errors', async () => { + const err = new Error('myerror') + err.toString = () => undefined + const stream = sink() + const instance = pino({ + test: 'this' + }, stream) + instance.fatal(err) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + err: { + type: 'Error', + message: err.message, + stack: err.stack + }, + msg: err.message + }) +}) + +test('assign mixin()', async () => { + const err = new Error('myerror') + const stream = sink() + const instance = pino({ + mixin () { + return { hello: 'world' } + } + }, stream) + instance.fatal(err) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + hello: 'world', + err: { + type: 'Error', + message: err.message, + stack: err.stack + }, + msg: err.message + }) +}) + +test('no err serializer', async () => { + const err = new Error('myerror') + const stream = sink() + const instance = pino({ + serializers: {} + }, stream) + instance.fatal(err) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + err: { + type: 'Error', + message: err.message, + stack: err.stack + }, + msg: err.message + }) +}) + +test('empty serializer', async () => { + const err = new Error('myerror') + const stream = sink() + const instance = pino({ + serializers: { + err () {} + } + }, stream) + instance.fatal(err) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + msg: err.message + }) +}) + +test('assign mixin()', async () => { + const err = new Error('myerror') + const stream = sink() + const instance = pino({ + mixin () { + return { hello: 'world' } + } + }, stream) + instance.fatal(err) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + hello: 'world', + err: { + type: 'Error', + message: err.message, + stack: err.stack + }, + msg: err.message + }) +}) + +test('no err serializer', async () => { + const err = new Error('myerror') + const stream = sink() + const instance = pino({ + serializers: {} + }, stream) + instance.fatal(err) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + err: { + type: 'Error', + message: err.message, + stack: err.stack + }, + msg: err.message + }) +}) + +test('empty serializer', async () => { + const err = new Error('myerror') + const stream = sink() + const instance = pino({ + serializers: { + err () {} + } + }, stream) + instance.fatal(err) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + msg: err.message + }) +}) + +test('correctly adds error information when nestedKey is used', async () => { + const err = new Error('myerror') + err.toString = () => undefined + const stream = sink() + const instance = pino({ + test: 'this', + nestedKey: 'obj' + }, stream) + instance.fatal(err) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + obj: { + err: { + type: 'Error', + stack: err.stack, + message: err.message + } + }, + msg: err.message + }) +}) + +test('correctly adds msg on error when nestedKey is used', async () => { + const err = new Error('myerror') + err.toString = () => undefined + const stream = sink() + const instance = pino({ + test: 'this', + nestedKey: 'obj' + }, stream) + instance.fatal(err, 'msg message') + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + obj: { + err: { + type: 'Error', + stack: err.stack, + message: err.message + } + }, + msg: 'msg message' + }) +}) + +test('msg should take precedence over error message on mergingObject', async () => { + const err = new Error('myerror') + const stream = sink() + const instance = pino(stream) + instance.error({ msg: 'my message', err }) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 50, + err: { + type: 'Error', + stack: err.stack, + message: err.message + }, + msg: 'my message' + }) +}) + +test('considers messageKey when giving msg precedence over error', async () => { + const err = new Error('myerror') + const stream = sink() + const instance = pino({ messageKey: 'message' }, stream) + instance.error({ message: 'my message', err }) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 50, + err: { + type: 'Error', + stack: err.stack, + message: err.message + }, + message: 'my message' + }) +}) diff --git a/node_modules/pino/test/escaping.test.js b/node_modules/pino/test/escaping.test.js new file mode 100644 index 0000000..ea7db20 --- /dev/null +++ b/node_modules/pino/test/escaping.test.js @@ -0,0 +1,93 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') + +const { sink, once } = require('./helper') +const pino = require('../') + +const { pid } = process +const hostname = os.hostname() + +function testEscape (ch, key) { + test('correctly escape ' + ch, async () => { + const stream = sink() + const instance = pino({ + name: 'hello' + }, stream) + instance.fatal('this contains ' + key) + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + name: 'hello', + msg: 'this contains ' + key + }) + }) +} + +testEscape('\\n', '\n') +testEscape('\\/', '/') +testEscape('\\\\', '\\') +testEscape('\\r', '\r') +testEscape('\\t', '\t') +testEscape('\\b', '\b') + +const toEscape = [ + '\u0000', // NUL Null character + '\u0001', // SOH Start of Heading + '\u0002', // STX Start of Text + '\u0003', // ETX End-of-text character + '\u0004', // EOT End-of-transmission character + '\u0005', // ENQ Enquiry character + '\u0006', // ACK Acknowledge character + '\u0007', // BEL Bell character + '\u0008', // BS Backspace + '\u0009', // HT Horizontal tab + '\u000A', // LF Line feed + '\u000B', // VT Vertical tab + '\u000C', // FF Form feed + '\u000D', // CR Carriage return + '\u000E', // SO Shift Out + '\u000F', // SI Shift In + '\u0010', // DLE Data Link Escape + '\u0011', // DC1 Device Control 1 + '\u0012', // DC2 Device Control 2 + '\u0013', // DC3 Device Control 3 + '\u0014', // DC4 Device Control 4 + '\u0015', // NAK Negative-acknowledge character + '\u0016', // SYN Synchronous Idle + '\u0017', // ETB End of Transmission Block + '\u0018', // CAN Cancel character + '\u0019', // EM End of Medium + '\u001A', // SUB Substitute character + '\u001B', // ESC Escape character + '\u001C', // FS File Separator + '\u001D', // GS Group Separator + '\u001E', // RS Record Separator + '\u001F' // US Unit Separator +] + +toEscape.forEach((key) => { + testEscape(JSON.stringify(key), key) +}) + +test('correctly escape `hello \\u001F world \\n \\u0022`', async () => { + const stream = sink() + const instance = pino({ + name: 'hello' + }, stream) + instance.fatal('hello \u001F world \n \u0022') + const result = await once(stream, 'data') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 60, + name: 'hello', + msg: 'hello \u001F world \n \u0022' + }) +}) diff --git a/node_modules/pino/test/esm/esm.mjs b/node_modules/pino/test/esm/esm.mjs new file mode 100644 index 0000000..e32c2a9 --- /dev/null +++ b/node_modules/pino/test/esm/esm.mjs @@ -0,0 +1,14 @@ +import test from 'node:test' +import assert from 'node:assert' + +import pino from '../../pino.js' +import helper from '../helper.js' + +const { sink, check, once } = helper + +test('esm support', async () => { + const stream = sink() + const instance = pino(stream) + instance.info('hello world') + check(assert.equal, await once(stream, 'data'), 30, 'hello world') +}) diff --git a/node_modules/pino/test/esm/index.test.js b/node_modules/pino/test/esm/index.test.js new file mode 100644 index 0000000..f627b77 --- /dev/null +++ b/node_modules/pino/test/esm/index.test.js @@ -0,0 +1,21 @@ +'use strict' + +// Node v8 throw a `SyntaxError: Unexpected token import` +// even if this branch is never touched in the code, +// by using `eval` we can avoid this issue. +// eslint-disable-next-line + new Function('module', 'return import(module)')('./esm.mjs').catch((err) => { + process.nextTick(() => { + throw err + }) +}) + +// Node v8 throw a `SyntaxError: Unexpected token import` +// even if this branch is never touched in the code, +// by using `eval` we can avoid this issue. +// eslint-disable-next-line + new Function('module', 'return import(module)')('./named-exports.mjs').catch((err) => { + process.nextTick(() => { + throw err + }) +}) diff --git a/node_modules/pino/test/esm/named-exports.mjs b/node_modules/pino/test/esm/named-exports.mjs new file mode 100644 index 0000000..c338b03 --- /dev/null +++ b/node_modules/pino/test/esm/named-exports.mjs @@ -0,0 +1,29 @@ +import test from 'node:test' +import assert from 'node:assert' +import { hostname } from 'node:os' +import { readFileSync } from 'node:fs' + +import { sink, check, once, watchFileCreated, file } from '../helper.js' +import { pino, destination } from '../../pino.js' + +test('named exports support', async () => { + const stream = sink() + const instance = pino(stream) + instance.info('hello world') + check(assert.equal, await once(stream, 'data'), 30, 'hello world') +}) + +test('destination', async () => { + const tmp = file() + const instance = pino(destination(tmp)) + instance.info('hello') + await watchFileCreated(tmp) + const result = JSON.parse(readFileSync(tmp).toString()) + delete result.time + assert.deepEqual(result, { + pid: process.pid, + hostname, + level: 30, + msg: 'hello' + }) +}) diff --git a/node_modules/pino/test/exit.test.js b/node_modules/pino/test/exit.test.js new file mode 100644 index 0000000..4e3f6b6 --- /dev/null +++ b/node_modules/pino/test/exit.test.js @@ -0,0 +1,79 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { join } = require('node:path') + +const execa = require('execa') +const writer = require('flush-write-stream') +const { once } = require('./helper') + +// https://github.com/pinojs/pino/issues/542 +test('pino.destination log everything when calling process.exit(0)', async () => { + let actual = '' + const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'destination-exit.js')]) + + child.stdout.pipe(writer((s, enc, cb) => { + actual += s + cb() + })) + + await once(child, 'close') + + assert.equal(actual.match(/hello/) != null, true) + assert.equal(actual.match(/world/) != null, true) +}) + +test('pino with no args log everything when calling process.exit(0)', async () => { + let actual = '' + const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'default-exit.js')]) + + child.stdout.pipe(writer((s, enc, cb) => { + actual += s + cb() + })) + + await once(child, 'close') + + assert.equal(actual.match(/hello/) != null, true) + assert.equal(actual.match(/world/) != null, true) +}) + +test('sync false logs everything when calling process.exit(0)', async () => { + let actual = '' + const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'syncfalse-exit.js')]) + + child.stdout.pipe(writer((s, enc, cb) => { + actual += s + cb() + })) + + await once(child, 'close') + + assert.equal(actual.match(/hello/) != null, true) + assert.equal(actual.match(/world/) != null, true) +}) + +test('sync false logs everything when calling flushSync', async () => { + let actual = '' + const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'syncfalse-flush-exit.js')]) + + child.stdout.pipe(writer((s, enc, cb) => { + actual += s + cb() + })) + + await once(child, 'close') + + assert.equal(actual.match(/hello/) != null, true) + assert.equal(actual.match(/world/) != null, true) +}) + +test('transports exits gracefully when logging in exit', async () => { + const child = execa(process.argv[0], [join(__dirname, 'fixtures', 'transport-with-on-exit.js')]) + child.stdout.resume() + + const code = await once(child, 'close') + + assert.equal(code, 0) +}) diff --git a/node_modules/pino/test/fixtures/broken-pipe/basic.js b/node_modules/pino/test/fixtures/broken-pipe/basic.js new file mode 100644 index 0000000..cc33c9b --- /dev/null +++ b/node_modules/pino/test/fixtures/broken-pipe/basic.js @@ -0,0 +1,9 @@ +'use strict' + +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } + +const pino = require('../../..')() + +pino.info('hello world') diff --git a/node_modules/pino/test/fixtures/broken-pipe/destination.js b/node_modules/pino/test/fixtures/broken-pipe/destination.js new file mode 100644 index 0000000..701f686 --- /dev/null +++ b/node_modules/pino/test/fixtures/broken-pipe/destination.js @@ -0,0 +1,10 @@ +'use strict' + +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } + +const pino = require('../../..') +const logger = pino(pino.destination()) + +logger.info('hello world') diff --git a/node_modules/pino/test/fixtures/broken-pipe/syncfalse.js b/node_modules/pino/test/fixtures/broken-pipe/syncfalse.js new file mode 100644 index 0000000..de71431 --- /dev/null +++ b/node_modules/pino/test/fixtures/broken-pipe/syncfalse.js @@ -0,0 +1,12 @@ +'use strict' + +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } + +const pino = require('../../..') +const logger = pino(pino.destination({ sync: false })) + +for (var i = 0; i < 1000; i++) { + logger.info('hello world') +} diff --git a/node_modules/pino/test/fixtures/console-transport.js b/node_modules/pino/test/fixtures/console-transport.js new file mode 100644 index 0000000..9974ebc --- /dev/null +++ b/node_modules/pino/test/fixtures/console-transport.js @@ -0,0 +1,13 @@ +const { Writable } = require('node:stream') + +module.exports = (options) => { + const myTransportStream = new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + // apply a transform and send to stdout + console.log(chunk.toString().toUpperCase()) + cb() + } + }) + return myTransportStream +} diff --git a/node_modules/pino/test/fixtures/crashing-transport.js b/node_modules/pino/test/fixtures/crashing-transport.js new file mode 100644 index 0000000..1f3d46e --- /dev/null +++ b/node_modules/pino/test/fixtures/crashing-transport.js @@ -0,0 +1,13 @@ +const { Writable } = require('node:stream') + +module.exports = () => + new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + setImmediate(() => { + /* eslint-disable no-empty */ + for (let i = 0; i < 1e3; i++) {} + process.exit(0) + }) + } + }) diff --git a/node_modules/pino/test/fixtures/default-exit.js b/node_modules/pino/test/fixtures/default-exit.js new file mode 100644 index 0000000..3fd2a0e --- /dev/null +++ b/node_modules/pino/test/fixtures/default-exit.js @@ -0,0 +1,8 @@ +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } +const pino = require(require.resolve('./../../')) +const logger = pino() +logger.info('hello') +logger.info('world') +process.exit(0) diff --git a/node_modules/pino/test/fixtures/destination-exit.js b/node_modules/pino/test/fixtures/destination-exit.js new file mode 100644 index 0000000..63c6d69 --- /dev/null +++ b/node_modules/pino/test/fixtures/destination-exit.js @@ -0,0 +1,8 @@ +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } +const pino = require(require.resolve('./../../')) +const logger = pino({}, pino.destination(1)) +logger.info('hello') +logger.info('world') +process.exit(0) diff --git a/node_modules/pino/test/fixtures/eval/index.js b/node_modules/pino/test/fixtures/eval/index.js new file mode 100644 index 0000000..1d45ad0 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/index.js @@ -0,0 +1,13 @@ +/* eslint-disable no-eval */ + +eval(` +const pino = require('../../../') + +const logger = pino( + pino.transport({ + target: 'pino/file' + }) +) + +logger.info('done!') +`) diff --git a/node_modules/pino/test/fixtures/eval/node_modules/14-files.js b/node_modules/pino/test/fixtures/eval/node_modules/14-files.js new file mode 100644 index 0000000..32a20da --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/14-files.js @@ -0,0 +1,3 @@ +const file1 = require("./file1.js") + +file1() diff --git a/node_modules/pino/test/fixtures/eval/node_modules/2-files.js b/node_modules/pino/test/fixtures/eval/node_modules/2-files.js new file mode 100644 index 0000000..8c665ed --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/2-files.js @@ -0,0 +1,3 @@ +const file12 = require("./file12.js") + +file12() diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file1.js b/node_modules/pino/test/fixtures/eval/node_modules/file1.js new file mode 100644 index 0000000..4ce13fb --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file1.js @@ -0,0 +1,5 @@ +const file2 = require("./file2.js") + +module.exports = function () { + file2() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file10.js b/node_modules/pino/test/fixtures/eval/node_modules/file10.js new file mode 100644 index 0000000..136f0e0 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file10.js @@ -0,0 +1,5 @@ +const file11 = require("./file11.js") + +module.exports = function () { + file11() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file11.js b/node_modules/pino/test/fixtures/eval/node_modules/file11.js new file mode 100644 index 0000000..f8a731b --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file11.js @@ -0,0 +1,5 @@ +const file12 = require("./file12.js") + +module.exports = function () { + file12() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file12.js b/node_modules/pino/test/fixtures/eval/node_modules/file12.js new file mode 100644 index 0000000..e8e330f --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file12.js @@ -0,0 +1,5 @@ +const file13 = require("./file13.js") + +module.exports = function () { + file13() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file13.js b/node_modules/pino/test/fixtures/eval/node_modules/file13.js new file mode 100644 index 0000000..6db9a61 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file13.js @@ -0,0 +1,5 @@ +const file14 = require("./file14.js") + +module.exports = function () { + file14() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file14.js b/node_modules/pino/test/fixtures/eval/node_modules/file14.js new file mode 100644 index 0000000..443ca7f --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file14.js @@ -0,0 +1,11 @@ +const pino = require("../../../../"); + +module.exports = function() { + const logger = pino( + pino.transport({ + target: 'pino/file' + }) + ) + + logger.info('done!') +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file2.js b/node_modules/pino/test/fixtures/eval/node_modules/file2.js new file mode 100644 index 0000000..46877d5 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file2.js @@ -0,0 +1,5 @@ +const file3 = require("./file3.js") + +module.exports = function () { + file3() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file3.js b/node_modules/pino/test/fixtures/eval/node_modules/file3.js new file mode 100644 index 0000000..3a6ac78 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file3.js @@ -0,0 +1,5 @@ +const file4 = require("./file4.js") + +module.exports = function () { + file4() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file4.js b/node_modules/pino/test/fixtures/eval/node_modules/file4.js new file mode 100644 index 0000000..b679e24 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file4.js @@ -0,0 +1,5 @@ +const file5 = require("./file5.js") + +module.exports = function () { + file5() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file5.js b/node_modules/pino/test/fixtures/eval/node_modules/file5.js new file mode 100644 index 0000000..06cd045 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file5.js @@ -0,0 +1,5 @@ +const file6 = require("./file6.js") + +module.exports = function () { + file6() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file6.js b/node_modules/pino/test/fixtures/eval/node_modules/file6.js new file mode 100644 index 0000000..3abf1dc --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file6.js @@ -0,0 +1,5 @@ +const file7 = require("./file7.js") + +module.exports = function () { + file7() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file7.js b/node_modules/pino/test/fixtures/eval/node_modules/file7.js new file mode 100644 index 0000000..4d2f488 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file7.js @@ -0,0 +1,5 @@ +const file8 = require("./file8.js") + +module.exports = function () { + file8() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file8.js b/node_modules/pino/test/fixtures/eval/node_modules/file8.js new file mode 100644 index 0000000..e87f177 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file8.js @@ -0,0 +1,5 @@ +const file9 = require("./file9.js") + +module.exports = function () { + file9() +} diff --git a/node_modules/pino/test/fixtures/eval/node_modules/file9.js b/node_modules/pino/test/fixtures/eval/node_modules/file9.js new file mode 100644 index 0000000..0164926 --- /dev/null +++ b/node_modules/pino/test/fixtures/eval/node_modules/file9.js @@ -0,0 +1,5 @@ +const file10 = require("./file10.js") + +module.exports = function () { + file10() +} diff --git a/node_modules/pino/test/fixtures/noop-transport.js b/node_modules/pino/test/fixtures/noop-transport.js new file mode 100644 index 0000000..745504a --- /dev/null +++ b/node_modules/pino/test/fixtures/noop-transport.js @@ -0,0 +1,10 @@ +const { Writable } = require('node:stream') + +module.exports = () => { + return new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + cb() + } + }) +} diff --git a/node_modules/pino/test/fixtures/pretty/null-prototype.js b/node_modules/pino/test/fixtures/pretty/null-prototype.js new file mode 100644 index 0000000..c88e686 --- /dev/null +++ b/node_modules/pino/test/fixtures/pretty/null-prototype.js @@ -0,0 +1,8 @@ +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } +const pino = require(require.resolve('./../../../')) +const log = pino({ prettyPrint: true }) +const obj = Object.create(null) +Object.assign(obj, { foo: 'bar' }) +log.info(obj, 'hello') diff --git a/node_modules/pino/test/fixtures/stdout-hack-protection.js b/node_modules/pino/test/fixtures/stdout-hack-protection.js new file mode 100644 index 0000000..525ef62 --- /dev/null +++ b/node_modules/pino/test/fixtures/stdout-hack-protection.js @@ -0,0 +1,11 @@ +global.process = { __proto__: process, pid: 123456 } + +const write = process.stdout.write.bind(process.stdout) +process.stdout.write = function (chunk) { + write('hack ' + chunk) +} + +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } +const pino = require(require.resolve('../../'))() +pino.info('me') diff --git a/node_modules/pino/test/fixtures/syncfalse-child.js b/node_modules/pino/test/fixtures/syncfalse-child.js new file mode 100644 index 0000000..f858b3d --- /dev/null +++ b/node_modules/pino/test/fixtures/syncfalse-child.js @@ -0,0 +1,6 @@ +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } +const pino = require(require.resolve('./../../')) +const asyncLogger = pino(pino.destination({ sync: false })).child({ hello: 'world' }) +asyncLogger.info('h') diff --git a/node_modules/pino/test/fixtures/syncfalse-exit.js b/node_modules/pino/test/fixtures/syncfalse-exit.js new file mode 100644 index 0000000..fb09eab --- /dev/null +++ b/node_modules/pino/test/fixtures/syncfalse-exit.js @@ -0,0 +1,9 @@ +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } +const pino = require(require.resolve('./../../')) +const dest = pino.destination({ dest: 1, minLength: 4096, sync: false }) +const logger = pino({}, dest) +logger.info('hello') +logger.info('world') +process.exit(0) diff --git a/node_modules/pino/test/fixtures/syncfalse-flush-exit.js b/node_modules/pino/test/fixtures/syncfalse-flush-exit.js new file mode 100644 index 0000000..bf9cb4f --- /dev/null +++ b/node_modules/pino/test/fixtures/syncfalse-flush-exit.js @@ -0,0 +1,10 @@ +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } +const pino = require(require.resolve('./../../')) +const dest = pino.destination({ dest: 1, minLength: 4096, sync: false }) +const logger = pino({}, dest) +logger.info('hello') +logger.info('world') +dest.flushSync() +process.exit(0) diff --git a/node_modules/pino/test/fixtures/syncfalse.js b/node_modules/pino/test/fixtures/syncfalse.js new file mode 100644 index 0000000..4d36752 --- /dev/null +++ b/node_modules/pino/test/fixtures/syncfalse.js @@ -0,0 +1,6 @@ +global.process = { __proto__: process, pid: 123456 } +Date.now = function () { return 1459875739796 } +require('node:os').hostname = function () { return 'abcdefghijklmnopqr' } +const pino = require(require.resolve('./../../')) +const asyncLogger = pino(pino.destination({ minLength: 4096, sync: false })) +asyncLogger.info('h') diff --git a/node_modules/pino/test/fixtures/syntax-error-esm.mjs b/node_modules/pino/test/fixtures/syntax-error-esm.mjs new file mode 100644 index 0000000..021d53b --- /dev/null +++ b/node_modules/pino/test/fixtures/syntax-error-esm.mjs @@ -0,0 +1,2 @@ +// This is a syntax error +import diff --git a/node_modules/pino/test/fixtures/to-file-transport-with-transform.js b/node_modules/pino/test/fixtures/to-file-transport-with-transform.js new file mode 100644 index 0000000..89cf465 --- /dev/null +++ b/node_modules/pino/test/fixtures/to-file-transport-with-transform.js @@ -0,0 +1,20 @@ +'use strict' + +const fs = require('node:fs') +const { once } = require('node:events') +const { Transform } = require('node:stream') + +async function run (opts) { + if (!opts.destination) throw new Error('kaboom') + const stream = fs.createWriteStream(opts.destination) + await once(stream, 'open') + const t = new Transform({ + transform (chunk, enc, cb) { + setImmediate(cb, null, chunk.toString().toUpperCase()) + } + }) + t.pipe(stream) + return t +} + +module.exports = run diff --git a/node_modules/pino/test/fixtures/to-file-transport.js b/node_modules/pino/test/fixtures/to-file-transport.js new file mode 100644 index 0000000..09f1274 --- /dev/null +++ b/node_modules/pino/test/fixtures/to-file-transport.js @@ -0,0 +1,13 @@ +'use strict' + +const fs = require('node:fs') +const { once } = require('node:events') + +async function run (opts) { + if (!opts.destination) throw new Error('kaboom') + const stream = fs.createWriteStream(opts.destination) + await once(stream, 'open') + return stream +} + +module.exports = run diff --git a/node_modules/pino/test/fixtures/to-file-transport.mjs b/node_modules/pino/test/fixtures/to-file-transport.mjs new file mode 100644 index 0000000..4925d3b --- /dev/null +++ b/node_modules/pino/test/fixtures/to-file-transport.mjs @@ -0,0 +1,8 @@ +import { createWriteStream } from 'node:fs' +import { once } from 'node:events' + +export default async function run (opts) { + const stream = createWriteStream(opts.destination) + await once(stream, 'open') + return stream +} diff --git a/node_modules/pino/test/fixtures/transport-exit-immediately-with-async-dest.js b/node_modules/pino/test/fixtures/transport-exit-immediately-with-async-dest.js new file mode 100644 index 0000000..9837e33 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-exit-immediately-with-async-dest.js @@ -0,0 +1,16 @@ +'use strict' + +const pino = require('../..') +const transport = pino.transport({ + target: './to-file-transport-with-transform.js', + options: { + destination: process.argv[2] + } +}) +const logger = pino(transport) + +logger.info('Hello') + +logger.info('World') + +process.exit(0) diff --git a/node_modules/pino/test/fixtures/transport-exit-immediately.js b/node_modules/pino/test/fixtures/transport-exit-immediately.js new file mode 100644 index 0000000..5be55e4 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-exit-immediately.js @@ -0,0 +1,11 @@ +'use strict' + +const pino = require('../..') +const transport = pino.transport({ + target: 'pino/file' +}) +const logger = pino(transport) + +logger.info('Hello') + +process.exit(0) diff --git a/node_modules/pino/test/fixtures/transport-exit-on-ready.js b/node_modules/pino/test/fixtures/transport-exit-on-ready.js new file mode 100644 index 0000000..1520db5 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-exit-on-ready.js @@ -0,0 +1,12 @@ +'use strict' + +const pino = require('../..') +const transport = pino.transport({ + target: 'pino/file' +}) +const logger = pino(transport) + +transport.on('ready', function () { + logger.info('Hello') + process.exit(0) +}) diff --git a/node_modules/pino/test/fixtures/transport-main.js b/node_modules/pino/test/fixtures/transport-main.js new file mode 100644 index 0000000..cb02005 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-main.js @@ -0,0 +1,9 @@ +'use strict' + +const { join } = require('node:path') +const pino = require('../..') +const transport = pino.transport({ + target: join(__dirname, 'transport-worker.js') +}) +const logger = pino(transport) +logger.info('Hello') diff --git a/node_modules/pino/test/fixtures/transport-many-lines.js b/node_modules/pino/test/fixtures/transport-many-lines.js new file mode 100644 index 0000000..d8bb5e3 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-many-lines.js @@ -0,0 +1,29 @@ +'use strict' + +const pino = require('../..') +const transport = pino.transport({ + targets: [{ + level: 'info', + target: 'pino/file', + options: { + destination: process.argv[2] + } + }] +}) +const logger = pino(transport) + +const toWrite = 1000000 +transport.on('ready', run) + +let total = 0 + +function run () { + if (total++ === 8) { + return + } + + for (let i = 0; i < toWrite; i++) { + logger.info(`hello ${i}`) + } + transport.once('drain', run) +} diff --git a/node_modules/pino/test/fixtures/transport-string-stdout.js b/node_modules/pino/test/fixtures/transport-string-stdout.js new file mode 100644 index 0000000..64d8ac1 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-string-stdout.js @@ -0,0 +1,9 @@ +'use strict' + +const pino = require('../..') +const transport = pino.transport({ + target: 'pino/file', + options: { destination: '1' } +}) +const logger = pino(transport) +logger.info('Hello') diff --git a/node_modules/pino/test/fixtures/transport-transform.js b/node_modules/pino/test/fixtures/transport-transform.js new file mode 100644 index 0000000..4950236 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-transform.js @@ -0,0 +1,21 @@ +'use strict' + +const build = require('pino-abstract-transport') +const { pipeline, Transform } = require('node:stream') +module.exports = (options) => { + return build(function (source) { + const myTransportStream = new Transform({ + autoDestroy: true, + objectMode: true, + transform (chunk, enc, cb) { + chunk.service = 'pino' + this.push(JSON.stringify(chunk)) + cb() + } + }) + pipeline(source, myTransportStream, () => {}) + return myTransportStream + }, { + enablePipelining: true + }) +} diff --git a/node_modules/pino/test/fixtures/transport-uses-pino-config.js b/node_modules/pino/test/fixtures/transport-uses-pino-config.js new file mode 100644 index 0000000..0c87c94 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-uses-pino-config.js @@ -0,0 +1,33 @@ +'use strict' + +const build = require('pino-abstract-transport') +const { pipeline, Transform } = require('node:stream') +module.exports = () => { + return build(function (source) { + const myTransportStream = new Transform({ + autoDestroy: true, + objectMode: true, + transform (chunk, enc, cb) { + const { + time, + level, + [source.messageKey]: body, + [source.errorKey]: error, + ...attributes + } = chunk + this.push(JSON.stringify({ + severityText: source.levels.labels[level], + body, + attributes, + ...(error && { error }) + })) + cb() + } + }) + pipeline(source, myTransportStream, () => {}) + return myTransportStream + }, { + enablePipelining: true, + expectPinoConfig: true + }) +} diff --git a/node_modules/pino/test/fixtures/transport-with-on-exit.js b/node_modules/pino/test/fixtures/transport-with-on-exit.js new file mode 100644 index 0000000..655a173 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-with-on-exit.js @@ -0,0 +1,12 @@ +'use strict' +const pino = require('../..') +const log = pino({ + transport: { + target: 'pino/file', + options: { destination: 1 } + } +}) +log.info('hello world!') +process.on('exit', (code) => { + log.info('Exiting peacefully') +}) diff --git a/node_modules/pino/test/fixtures/transport-worker-data.js b/node_modules/pino/test/fixtures/transport-worker-data.js new file mode 100644 index 0000000..1e0e7a8 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-worker-data.js @@ -0,0 +1,19 @@ +'use strict' + +const { parentPort, workerData } = require('worker_threads') +const { Writable } = require('node:stream') + +module.exports = (options) => { + const myTransportStream = new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + parentPort.postMessage({ + code: 'EVENT', + name: 'workerData', + args: [workerData] + }) + cb() + } + }) + return myTransportStream +} diff --git a/node_modules/pino/test/fixtures/transport-worker.js b/node_modules/pino/test/fixtures/transport-worker.js new file mode 100644 index 0000000..8964b26 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-worker.js @@ -0,0 +1,15 @@ +'use strict' + +const { Writable } = require('node:stream') +const fs = require('node:fs') +module.exports = (options) => { + const myTransportStream = new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + // Bypass console.log() to avoid flakiness + fs.writeSync(1, chunk.toString()) + cb() + } + }) + return myTransportStream +} diff --git a/node_modules/pino/test/fixtures/transport-wrong-export-type.js b/node_modules/pino/test/fixtures/transport-wrong-export-type.js new file mode 100644 index 0000000..ed0affd --- /dev/null +++ b/node_modules/pino/test/fixtures/transport-wrong-export-type.js @@ -0,0 +1,3 @@ +module.exports = { + completelyUnrelatedProperty: 'Just a very incorrect transport worker implementation' +} diff --git a/node_modules/pino/test/fixtures/transport/index.js b/node_modules/pino/test/fixtures/transport/index.js new file mode 100644 index 0000000..f255858 --- /dev/null +++ b/node_modules/pino/test/fixtures/transport/index.js @@ -0,0 +1,12 @@ +'use strict' + +const fs = require('node:fs') +const { once } = require('node:events') + +async function run (opts) { + const stream = fs.createWriteStream(opts.destination) + await once(stream, 'open') + return stream +} + +module.exports = run diff --git a/node_modules/pino/test/fixtures/transport/package.json b/node_modules/pino/test/fixtures/transport/package.json new file mode 100644 index 0000000..26beeaa --- /dev/null +++ b/node_modules/pino/test/fixtures/transport/package.json @@ -0,0 +1,5 @@ +{ + "name": "transport", + "version": "0.0.1", + "main": "./index.js" +} diff --git a/node_modules/pino/test/fixtures/ts/to-file-transport-with-transform.ts b/node_modules/pino/test/fixtures/ts/to-file-transport-with-transform.ts new file mode 100644 index 0000000..aa56b3d --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/to-file-transport-with-transform.ts @@ -0,0 +1,18 @@ +import * as fs from 'node:fs' +import { once } from 'node:events' +import { Transform } from 'node:stream' + +async function run (opts: { destination?: fs.PathLike }): Promise { + if (!opts.destination) throw new Error('kaboom') + const stream = fs.createWriteStream(opts.destination) + await once(stream, 'open') + const t = new Transform({ + transform (chunk, enc, cb) { + setImmediate(cb, null, chunk.toString().toUpperCase()) + } + }) + t.pipe(stream) + return t +} + +export default run diff --git a/node_modules/pino/test/fixtures/ts/to-file-transport.ts b/node_modules/pino/test/fixtures/ts/to-file-transport.ts new file mode 100644 index 0000000..1860606 --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/to-file-transport.ts @@ -0,0 +1,11 @@ +import * as fs from 'node:fs' +import { once } from 'node:events' + +async function run (opts: { destination?: fs.PathLike }): Promise { + if (!opts.destination) throw new Error('kaboom') + const stream = fs.createWriteStream(opts.destination, { encoding: 'utf8' }) + await once(stream, 'open') + return stream +} + +export default run diff --git a/node_modules/pino/test/fixtures/ts/transpile.cjs b/node_modules/pino/test/fixtures/ts/transpile.cjs new file mode 100644 index 0000000..6c2af67 --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/transpile.cjs @@ -0,0 +1,36 @@ +#!/usr/bin/env node + +const execa = require('execa') +const fs = require('node:fs') + +const existsSync = fs.existsSync +const stat = fs.promises.stat + +// Hardcoded parameters +const esVersions = ['es5', 'es6', 'es2017', 'esnext'] +const filesToTranspile = ['to-file-transport.ts'] + +async function transpile () { + process.chdir(__dirname) + + for (const sourceFileName of filesToTranspile) { + const sourceStat = await stat(sourceFileName) + + for (const esVersion of esVersions) { + const intermediateFileName = sourceFileName.replace(/\.ts$/, '.js') + const targetFileName = sourceFileName.replace(/\.ts$/, `.${esVersion}.cjs`) + + const shouldTranspile = !existsSync(targetFileName) || (await stat(targetFileName)).mtimeMs < sourceStat.mtimeMs + + if (shouldTranspile) { + await execa('tsc', ['--target', esVersion, '--module', 'commonjs', sourceFileName]) + await execa('mv', [intermediateFileName, targetFileName]) + } + } + } +} + +transpile().catch(err => { + process.exitCode = 1 + throw err +}) diff --git a/node_modules/pino/test/fixtures/ts/transport-exit-immediately-with-async-dest.ts b/node_modules/pino/test/fixtures/ts/transport-exit-immediately-with-async-dest.ts new file mode 100644 index 0000000..f3e6f2e --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/transport-exit-immediately-with-async-dest.ts @@ -0,0 +1,15 @@ +import pino from '../../..' +import { join } from 'node:path' + +const transport = pino.transport({ + target: join(__dirname, 'to-file-transport-with-transform.ts'), + options: { + destination: process.argv[2] + } +}) +const logger = pino(transport) + +logger.info('Hello') +logger.info('World') + +process.exit(0) diff --git a/node_modules/pino/test/fixtures/ts/transport-exit-immediately.ts b/node_modules/pino/test/fixtures/ts/transport-exit-immediately.ts new file mode 100644 index 0000000..21f2ab7 --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/transport-exit-immediately.ts @@ -0,0 +1,10 @@ +import pino from '../../..' + +const transport = pino.transport({ + target: 'pino/file' +}) +const logger = pino(transport) + +logger.info('Hello') + +process.exit(0) diff --git a/node_modules/pino/test/fixtures/ts/transport-exit-on-ready.ts b/node_modules/pino/test/fixtures/ts/transport-exit-on-ready.ts new file mode 100644 index 0000000..a1f6a84 --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/transport-exit-on-ready.ts @@ -0,0 +1,11 @@ +import pino from '../../..' + +const transport = pino.transport({ + target: 'pino/file' +}) +const logger = pino(transport) + +transport.on('ready', function () { + logger.info('Hello') + process.exit(0) +}) diff --git a/node_modules/pino/test/fixtures/ts/transport-main.ts b/node_modules/pino/test/fixtures/ts/transport-main.ts new file mode 100644 index 0000000..f31f88c --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/transport-main.ts @@ -0,0 +1,8 @@ +import { join } from 'node:path' +import pino from '../../..' + +const transport = pino.transport({ + target: join(__dirname, 'transport-worker.ts') +}) +const logger = pino(transport) +logger.info('Hello') diff --git a/node_modules/pino/test/fixtures/ts/transport-string-stdout.ts b/node_modules/pino/test/fixtures/ts/transport-string-stdout.ts new file mode 100644 index 0000000..0c9cfa7 --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/transport-string-stdout.ts @@ -0,0 +1,8 @@ +import pino from '../../..' + +const transport = pino.transport({ + target: 'pino/file', + options: { destination: '1' } +}) +const logger = pino(transport) +logger.info('Hello') diff --git a/node_modules/pino/test/fixtures/ts/transport-worker.ts b/node_modules/pino/test/fixtures/ts/transport-worker.ts new file mode 100644 index 0000000..8061291 --- /dev/null +++ b/node_modules/pino/test/fixtures/ts/transport-worker.ts @@ -0,0 +1,14 @@ +import { Writable } from 'node:stream' + +export default (): Writable => { + const myTransportStream = new Writable({ + autoDestroy: true, + write (chunk, _enc, cb) { + console.log(chunk.toString()) + cb() + }, + defaultEncoding: 'utf8' + }) + + return myTransportStream +} diff --git a/node_modules/pino/test/formatters.test.js b/node_modules/pino/test/formatters.test.js new file mode 100644 index 0000000..bb7ae00 --- /dev/null +++ b/node_modules/pino/test/formatters.test.js @@ -0,0 +1,364 @@ +'use strict' +/* eslint no-prototype-builtins: 0 */ + +const test = require('node:test') +const assert = require('node:assert') +const { hostname } = require('node:os') +const { join } = require('node:path') +const { readFile } = require('node:fs').promises +const tspl = require('@matteo.collina/tspl') + +const { sink, match, once, watchFileCreated, file } = require('./helper') +const pino = require('../') + +test('level formatter', async () => { + const stream = sink() + const logger = pino({ + formatters: { + level (label, number) { + return { + log: { + level: label + } + } + } + } + }, stream) + + const o = once(stream, 'data') + logger.info('hello world') + match(await o, { + log: { + level: 'info' + } + }) +}) + +test('bindings formatter', async () => { + const stream = sink() + const logger = pino({ + formatters: { + bindings (bindings) { + return { + process: { + pid: bindings.pid + }, + host: { + name: bindings.hostname + } + } + } + } + }, stream) + + const o = once(stream, 'data') + logger.info('hello world') + match(await o, { + process: { + pid: process.pid + }, + host: { + name: hostname() + } + }) +}) + +test('no bindings formatter', async () => { + const stream = sink() + const logger = pino({ + formatters: { + bindings (bindings) { + return null + } + } + }, stream) + + const o = once(stream, 'data') + logger.info('hello world') + const log = await o + assert.equal(log.hasOwnProperty('pid'), false) + assert.equal(log.hasOwnProperty('hostname'), false) + match(log, { msg: 'hello world' }) +}) + +test('log formatter', async (t) => { + const plan = tspl(t, { plan: 1 }) + const stream = sink() + const logger = pino({ + formatters: { + log (obj) { + plan.equal(obj.hasOwnProperty('msg'), false) + return { hello: 'world', ...obj } + } + } + }, stream) + + const o = once(stream, 'data') + logger.info({ foo: 'bar', nested: { object: true } }, 'hello world') + match(await o, { + hello: 'world', + foo: 'bar', + nested: { object: true } + }) + + await plan +}) + +test('Formatters combined', async () => { + const stream = sink() + const logger = pino({ + formatters: { + level (label, number) { + return { + log: { + level: label + } + } + }, + bindings (bindings) { + return { + process: { + pid: bindings.pid + }, + host: { + name: bindings.hostname + } + } + }, + log (obj) { + return { hello: 'world', ...obj } + } + } + }, stream) + + const o = once(stream, 'data') + logger.info({ foo: 'bar', nested: { object: true } }, 'hello world') + match(await o, { + log: { + level: 'info' + }, + process: { + pid: process.pid + }, + host: { + name: hostname() + }, + hello: 'world', + foo: 'bar', + nested: { object: true } + }) +}) + +test('Formatters in child logger', async () => { + const stream = sink() + const logger = pino({ + formatters: { + level (label, number) { + return { + log: { + level: label + } + } + }, + bindings (bindings) { + return { + process: { + pid: bindings.pid + }, + host: { + name: bindings.hostname + } + } + }, + log (obj) { + return { hello: 'world', ...obj } + } + } + }, stream) + + const child = logger.child({ + foo: 'bar', + nested: { object: true } + }, { + formatters: { + bindings (bindings) { + return { ...bindings, faz: 'baz' } + } + } + }) + + const o = once(stream, 'data') + child.info('hello world') + match(await o, { + log: { + level: 'info' + }, + process: { + pid: process.pid + }, + host: { + name: hostname() + }, + hello: 'world', + foo: 'bar', + nested: { object: true }, + faz: 'baz' + }) +}) + +test('Formatters without bindings in child logger', async () => { + const stream = sink() + const logger = pino({ + formatters: { + level (label, number) { + return { + log: { + level: label + } + } + }, + bindings (bindings) { + return { + process: { + pid: bindings.pid + }, + host: { + name: bindings.hostname + } + } + }, + log (obj) { + return { hello: 'world', ...obj } + } + } + }, stream) + + const child = logger.child({ + foo: 'bar', + nested: { object: true } + }, { + formatters: { + log (obj) { + return { other: 'stuff', ...obj } + } + } + }) + + const o = once(stream, 'data') + child.info('hello world') + match(await o, { + log: { + level: 'info' + }, + process: { + pid: process.pid + }, + host: { + name: hostname() + }, + foo: 'bar', + other: 'stuff', + nested: { object: true } + }) +}) + +test('elastic common schema format', async () => { + const stream = sink() + const ecs = { + formatters: { + level (label, number) { + return { + log: { + level: label, + logger: 'pino' + } + } + }, + bindings (bindings) { + return { + process: { + pid: bindings.pid + }, + host: { + name: bindings.hostname + } + } + }, + log (obj) { + return { ecs: { version: '1.4.0' }, ...obj } + } + }, + messageKey: 'message', + timestamp: () => `,"@timestamp":"${new Date(Date.now()).toISOString()}"` + } + + const logger = pino({ ...ecs }, stream) + + const o = once(stream, 'data') + logger.info({ foo: 'bar' }, 'hello world') + const log = await o + assert.equal(typeof log['@timestamp'], 'string') + match(log, { + log: { level: 'info', logger: 'pino' }, + process: { pid: process.pid }, + host: { name: hostname() }, + ecs: { version: '1.4.0' }, + foo: 'bar', + message: 'hello world' + }) +}) + +test('formatter with transport', async (t) => { + const plan = tspl(t, { plan: 1 }) + const destination = file() + const logger = pino({ + formatters: { + log (obj) { + plan.equal(obj.hasOwnProperty('msg'), false) + return { hello: 'world', ...obj } + } + }, + transport: { + targets: [ + { + target: join(__dirname, 'fixtures', 'to-file-transport.js'), + options: { destination } + } + ] + } + }) + + logger.info({ foo: 'bar', nested: { object: true } }, 'hello world') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + match(result, { + hello: 'world', + foo: 'bar', + nested: { object: true } + }) +}) + +test('throws when custom level formatter is used with transport.targets', async () => { + assert.throws( + () => { + pino({ + formatters: { + level (label) { + return label + } + }, + transport: { + targets: [ + { + target: 'pino/file', + options: { destination: 'foo.log' } + } + ] + } + } + ) + }, + Error('option.transport.targets do not allow custom level formatters') + ) +}) diff --git a/node_modules/pino/test/helper.d.ts b/node_modules/pino/test/helper.d.ts new file mode 100644 index 0000000..5798d04 --- /dev/null +++ b/node_modules/pino/test/helper.d.ts @@ -0,0 +1,4 @@ +import { PathLike } from 'node:fs' + +export declare function watchFileCreated(filename: PathLike): Promise +export declare function watchForWrite(filename: PathLike, testString: string): Promise diff --git a/node_modules/pino/test/helper.js b/node_modules/pino/test/helper.js new file mode 100644 index 0000000..23c1803 --- /dev/null +++ b/node_modules/pino/test/helper.js @@ -0,0 +1,155 @@ +'use strict' + +const crypto = require('node:crypto') +const { join } = require('node:path') +const os = require('node:os') +const { existsSync, readFileSync, statSync, unlinkSync } = require('node:fs') +const writer = require('flush-write-stream') +const split = require('split2') + +const pid = process.pid +const hostname = os.hostname() +const { tmpdir } = os + +const isWin = process.platform === 'win32' +const isYarnPnp = process.versions.pnp !== undefined + +function getPathToNull () { + return isWin ? '\\\\.\\NUL' : '/dev/null' +} + +function once (emitter, name) { + return new Promise((resolve, reject) => { + if (name !== 'error') emitter.once('error', reject) + emitter.once(name, (...args) => { + emitter.removeListener('error', reject) + resolve(...args) + }) + }) +} + +function sink (func) { + const result = split((data) => { + try { + return JSON.parse(data) + } catch (err) { + console.log(err) + console.log(data) + } + }) + if (func) result.pipe(writer.obj(func)) + return result +} + +function check (is, chunk, level, msg) { + is(new Date(chunk.time) <= new Date(), true, 'time is greater than Date.now()') + delete chunk.time + is(chunk.pid, pid) + is(chunk.hostname, hostname) + is(chunk.level, level) + is(chunk.msg, msg) +} + +function sleep (ms) { + return new Promise((resolve) => { + setTimeout(resolve, ms) + }) +} + +function watchFileCreated (filename) { + return new Promise((resolve, reject) => { + const TIMEOUT = process.env.PINO_TEST_WAIT_WATCHFILE_TIMEOUT || 10000 + const INTERVAL = 100 + const threshold = TIMEOUT / INTERVAL + let counter = 0 + const interval = setInterval(() => { + const exists = existsSync(filename) + // On some CI runs file is created but not filled + if (exists && statSync(filename).size !== 0) { + clearInterval(interval) + resolve() + } else if (counter <= threshold) { + counter++ + } else { + clearInterval(interval) + reject(new Error( + `${filename} hasn't been created within ${TIMEOUT} ms. ` + + (exists ? 'File exist, but still empty.' : 'File not yet created.') + )) + } + }, INTERVAL) + }) +} + +function watchForWrite (filename, testString) { + return new Promise((resolve, reject) => { + const TIMEOUT = process.env.PINO_TEST_WAIT_WRITE_TIMEOUT || 10000 + const INTERVAL = 100 + const threshold = TIMEOUT / INTERVAL + let counter = 0 + const interval = setInterval(() => { + if (readFileSync(filename).includes(testString)) { + clearInterval(interval) + resolve() + } else if (counter <= threshold) { + counter++ + } else { + clearInterval(interval) + reject(new Error(`'${testString}' hasn't been written to ${filename} within ${TIMEOUT} ms.`)) + } + }, INTERVAL) + }) +} + +let files = [] + +function file () { + const hash = crypto.randomBytes(12).toString('hex') + const file = join(tmpdir(), `pino-${pid}-${hash}`) + files.push(file) + return file +} + +process.on('beforeExit', () => { + if (files.length === 0) return + for (const file of files) { + try { + unlinkSync(file) + } catch (e) { + } + } + files = [] +}) + +/** + * match is a bare-bones object shape matcher. We should be able to replace + * this with `assert.partialDeepStrictEqual` when v22 is our minimum. + * + * @param {object} found + * @param {object} expected + */ +function match (found, expected) { + for (const [key, value] of Object.entries(expected)) { + if (Object.prototype.toString.call(value) === '[object Object]') { + match(found[key], value) + continue + } + if (value !== found[key]) { + throw Error(`expected "${value}" but found "${found[key]}"`) + } + } +} + +module.exports = { + check, + file, + getPathToNull, + isWin, + isYarnPnp, + match, + once, + sink, + sleep, + watchFileCreated, + watchForWrite +} diff --git a/node_modules/pino/test/hooks.test.js b/node_modules/pino/test/hooks.test.js new file mode 100644 index 0000000..a54df03 --- /dev/null +++ b/node_modules/pino/test/hooks.test.js @@ -0,0 +1,114 @@ +'use strict' + +const { describe, test } = require('node:test') +const tspl = require('@matteo.collina/tspl') + +const { sink, match, once } = require('./helper') +const pino = require('../') + +describe('log method hook', () => { + test('gets invoked', async t => { + const plan = tspl(t, { plan: 7 }) + + const stream = sink() + const logger = pino({ + hooks: { + logMethod (args, method, level) { + plan.equal(Array.isArray(args), true) + plan.equal(typeof level, 'number') + plan.equal(args.length, 3) + plan.equal(level, this.levels.values.info) + plan.deepEqual(args, ['a', 'b', 'c']) + + plan.equal(typeof method, 'function') + plan.equal(method.name, 'LOG') + + method.apply(this, [args.join('-')]) + } + } + }, stream) + + const o = once(stream, 'data') + logger.info('a', 'b', 'c') + match(await o, { msg: 'a-b-c' }) + }) + + test('fatal method invokes hook', async t => { + const plan = tspl(t, { plan: 1 }) + + const stream = sink() + const logger = pino({ + hooks: { + logMethod (args, method) { + plan.ok(true) + method.apply(this, [args.join('-')]) + } + } + }, stream) + + const o = once(stream, 'data') + logger.fatal('a') + match(await o, { msg: 'a' }) + }) + + test('children get the hook', async t => { + const plan = tspl(t, { plan: 2 }) + + const stream = sink() + const root = pino({ + hooks: { + logMethod (args, method) { + plan.ok(true) + method.apply(this, [args.join('-')]) + } + } + }, stream) + const child = root.child({ child: 'one' }) + const grandchild = child.child({ child: 'two' }) + + let o = once(stream, 'data') + child.info('a', 'b') + match(await o, { msg: 'a-b' }) + + o = once(stream, 'data') + grandchild.info('c', 'd') + match(await o, { msg: 'c-d' }) + }) + + test('get log level', async t => { + const plan = tspl(t, { plan: 2 }) + + const stream = sink() + const logger = pino({ + hooks: { + logMethod (args, method, level) { + plan.equal(typeof level, 'number') + plan.equal(level, this.levels.values.error) + + method.apply(this, [args.join('-')]) + } + } + }, stream) + + const o = once(stream, 'data') + logger.error('a') + match(await o, { msg: 'a' }) + }) +}) + +describe('streamWrite hook', () => { + test('gets invoked', async () => { + const stream = sink() + const logger = pino({ + hooks: { + streamWrite (s) { + return s.replaceAll('redact-me', 'XXX') + } + } + }, stream) + + const o = once(stream, 'data') + logger.info('hide redact-me in this string') + match(await o, { msg: 'hide XXX in this string' }) + }) +}) diff --git a/node_modules/pino/test/http.test.js b/node_modules/pino/test/http.test.js new file mode 100644 index 0000000..72b400b --- /dev/null +++ b/node_modules/pino/test/http.test.js @@ -0,0 +1,214 @@ +'use strict' + +const test = require('node:test') +const http = require('node:http') +const os = require('node:os') +const tspl = require('@matteo.collina/tspl') + +const { sink, once } = require('./helper') +const pino = require('../') + +const { pid } = process +const hostname = os.hostname() + +test('http request support', async (t) => { + const plan = tspl(t, { plan: 3 }) + let originalReq + const instance = pino(sink((chunk, enc) => { + plan.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()') + delete chunk.time + plan.deepEqual(chunk, { + pid, + hostname, + level: 30, + msg: 'my request', + req: { + method: originalReq.method, + url: originalReq.url, + headers: originalReq.headers, + remoteAddress: originalReq.socket.remoteAddress, + remotePort: originalReq.socket.remotePort + } + }) + })) + + const server = http.createServer((req, res) => { + originalReq = req + instance.info(req, 'my request') + res.end('hello') + }) + server.unref() + server.listen() + const err = await once(server, 'listening') + plan.equal(err, undefined) + const res = await once(http.get('http://localhost:' + server.address().port), 'response') + res.resume() + server.close() + + await plan +}) + +test('http request support via serializer', async (t) => { + const plan = tspl(t, { plan: 3 }) + let originalReq + const instance = pino({ + serializers: { + req: pino.stdSerializers.req + } + }, sink((chunk, enc) => { + plan.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()') + delete chunk.time + plan.deepEqual(chunk, { + pid, + hostname, + level: 30, + msg: 'my request', + req: { + method: originalReq.method, + url: originalReq.url, + headers: originalReq.headers, + remoteAddress: originalReq.socket.remoteAddress, + remotePort: originalReq.socket.remotePort + } + }) + })) + + const server = http.createServer(function (req, res) { + originalReq = req + instance.info({ req }, 'my request') + res.end('hello') + }) + server.unref() + server.listen() + const err = await once(server, 'listening') + plan.equal(err, undefined) + + const res = await once(http.get('http://localhost:' + server.address().port), 'response') + res.resume() + server.close() + + await plan +}) + +test('http response support', async (t) => { + const plan = tspl(t, { plan: 3 }) + let originalRes + const instance = pino(sink((chunk, enc) => { + plan.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()') + delete chunk.time + plan.deepEqual(chunk, { + pid, + hostname, + level: 30, + msg: 'my response', + res: { + statusCode: originalRes.statusCode, + headers: originalRes.getHeaders() + } + }) + })) + + const server = http.createServer(function (req, res) { + originalRes = res + res.end('hello') + instance.info(res, 'my response') + }) + server.unref() + server.listen() + const err = await once(server, 'listening') + + plan.equal(err, undefined) + + const res = await once(http.get('http://localhost:' + server.address().port), 'response') + res.resume() + server.close() + + await plan +}) + +test('http response support via a serializer', async (t) => { + const plan = tspl(t, { plan: 3 }) + const instance = pino({ + serializers: { + res: pino.stdSerializers.res + } + }, sink((chunk, enc) => { + plan.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()') + delete chunk.time + plan.deepEqual(chunk, { + pid, + hostname, + level: 30, + msg: 'my response', + res: { + statusCode: 200, + headers: { + 'x-single': 'y', + 'x-multi': [1, 2] + } + } + }) + })) + + const server = http.createServer(function (req, res) { + res.setHeader('x-single', 'y') + res.setHeader('x-multi', [1, 2]) + res.end('hello') + instance.info({ res }, 'my response') + }) + + server.unref() + server.listen() + const err = await once(server, 'listening') + plan.equal(err, undefined) + + const res = await once(http.get('http://localhost:' + server.address().port), 'response') + res.resume() + server.close() + + await plan +}) + +test('http request support via serializer in a child', async (t) => { + const plan = tspl(t, { plan: 3 }) + let originalReq + const instance = pino({ + serializers: { + req: pino.stdSerializers.req + } + }, sink((chunk, enc) => { + plan.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()') + delete chunk.time + plan.deepEqual(chunk, { + pid, + hostname, + level: 30, + msg: 'my request', + req: { + method: originalReq.method, + url: originalReq.url, + headers: originalReq.headers, + remoteAddress: originalReq.socket.remoteAddress, + remotePort: originalReq.socket.remotePort + } + }) + })) + + const server = http.createServer(function (req, res) { + originalReq = req + const child = instance.child({ req }) + child.info('my request') + res.end('hello') + }) + + server.unref() + server.listen() + const err = await once(server, 'listening') + plan.equal(err, undefined) + + const res = await once(http.get('http://localhost:' + server.address().port), 'response') + res.resume() + server.close() + + await plan +}) diff --git a/node_modules/pino/test/internals/version.test.js b/node_modules/pino/test/internals/version.test.js new file mode 100644 index 0000000..8d260bd --- /dev/null +++ b/node_modules/pino/test/internals/version.test.js @@ -0,0 +1,17 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const fs = require('node:fs') +const path = require('node:path') + +const pino = require('../..')() + +test('should be the same as package.json', () => { + const json = JSON.parse( + fs.readFileSync(path.join(__dirname, '..', '..', 'package.json')) + .toString('utf8') + ) + + assert.equal(pino.version, json.version) +}) diff --git a/node_modules/pino/test/is-level-enabled.test.js b/node_modules/pino/test/is-level-enabled.test.js new file mode 100644 index 0000000..d55bfaf --- /dev/null +++ b/node_modules/pino/test/is-level-enabled.test.js @@ -0,0 +1,179 @@ +'use strict' + +const { describe, test } = require('node:test') +const assert = require('node:assert') + +const pino = require('../') + +const descLevels = { + trace: 60, + debug: 50, + info: 40, + warn: 30, + error: 20, + fatal: 10 +} + +const ascLevels = { + trace: 10, + debug: 20, + info: 30, + warn: 40, + error: 50, + fatal: 60 +} + +describe('Default levels suite', () => { + test('can check if current level enabled', async () => { + const log = pino({ level: 'debug' }) + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if level enabled after level set', async () => { + const log = pino() + assert.equal(false, log.isLevelEnabled('debug')) + log.level = 'debug' + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if higher level enabled', async () => { + const log = pino({ level: 'debug' }) + assert.equal(true, log.isLevelEnabled('error')) + }) + + test('can check if lower level is disabled', async () => { + const log = pino({ level: 'error' }) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('ASC: can check if child has current level enabled', async () => { + const log = pino().child({}, { level: 'debug' }) + assert.equal(true, log.isLevelEnabled('debug')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('can check if custom level is enabled', async () => { + const log = pino({ + customLevels: { foo: 35 }, + level: 'debug' + }) + assert.equal(true, log.isLevelEnabled('foo')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) +}) + +describe('Ascending levels suite', () => { + const customLevels = ascLevels + const levelComparison = 'ASC' + + test('can check if current level enabled', async () => { + const log = pino({ level: 'debug', levelComparison, customLevels, useOnlyCustomLevels: true }) + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if level enabled after level set', async () => { + const log = pino({ levelComparison, customLevels, useOnlyCustomLevels: true }) + assert.equal(false, log.isLevelEnabled('debug')) + log.level = 'debug' + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if higher level enabled', async () => { + const log = pino({ level: 'debug', levelComparison, customLevels, useOnlyCustomLevels: true }) + assert.equal(true, log.isLevelEnabled('error')) + }) + + test('can check if lower level is disabled', async () => { + const log = pino({ level: 'error', customLevels, useOnlyCustomLevels: true }) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('can check if child has current level enabled', async () => { + const log = pino().child({ levelComparison, customLevels, useOnlyCustomLevels: true }, { level: 'debug' }) + assert.equal(true, log.isLevelEnabled('debug')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('can check if custom level is enabled', async () => { + const log = pino({ + levelComparison, + useOnlyCustomLevels: true, + customLevels: { foo: 35, ...customLevels }, + level: 'debug' + }) + assert.equal(true, log.isLevelEnabled('foo')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) +}) + +describe('Descending levels suite', () => { + const customLevels = descLevels + const levelComparison = 'DESC' + + test('can check if current level enabled', async () => { + const log = pino({ level: 'debug', levelComparison, customLevels, useOnlyCustomLevels: true }) + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if level enabled after level set', async () => { + const log = pino({ levelComparison, customLevels, useOnlyCustomLevels: true }) + assert.equal(false, log.isLevelEnabled('debug')) + log.level = 'debug' + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('can check if higher level enabled', async () => { + const log = pino({ level: 'debug', levelComparison, customLevels, useOnlyCustomLevels: true }) + assert.equal(true, log.isLevelEnabled('error')) + }) + + test('can check if lower level is disabled', async () => { + const log = pino({ level: 'error', levelComparison, customLevels, useOnlyCustomLevels: true }) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('can check if child has current level enabled', async () => { + const log = pino({ levelComparison, customLevels, useOnlyCustomLevels: true }).child({}, { level: 'debug' }) + assert.equal(true, log.isLevelEnabled('debug')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) + + test('can check if custom level is enabled', async () => { + const log = pino({ + levelComparison, + customLevels: { foo: 35, ...customLevels }, + useOnlyCustomLevels: true, + level: 'debug' + }) + assert.equal(true, log.isLevelEnabled('foo')) + assert.equal(true, log.isLevelEnabled('error')) + assert.equal(false, log.isLevelEnabled('trace')) + }) +}) + +describe('Custom levels comparison', () => { + test('Custom comparison returns true cause level is enabled', async () => { + const log = pino({ level: 'error', levelComparison: () => true }) + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('Custom comparison returns false cause level is disabled', async () => { + const log = pino({ level: 'error', levelComparison: () => false }) + assert.equal(false, log.isLevelEnabled('debug')) + }) + + test('Custom comparison returns true cause child level is enabled', async () => { + const log = pino({ levelComparison: () => true }).child({ level: 'error' }) + assert.equal(true, log.isLevelEnabled('debug')) + }) + + test('Custom comparison returns false cause child level is disabled', async () => { + const log = pino({ levelComparison: () => false }).child({ level: 'error' }) + assert.equal(false, log.isLevelEnabled('debug')) + }) +}) diff --git a/node_modules/pino/test/jest/basic.spec.js b/node_modules/pino/test/jest/basic.spec.js new file mode 100644 index 0000000..46f381b --- /dev/null +++ b/node_modules/pino/test/jest/basic.spec.js @@ -0,0 +1,10 @@ +/* global test */ +const pino = require('../../pino') + +test('transport should work in jest', function () { + pino({ + transport: { + target: 'pino-pretty' + } + }) +}) diff --git a/node_modules/pino/test/levels.test.js b/node_modules/pino/test/levels.test.js new file mode 100644 index 0000000..904aef1 --- /dev/null +++ b/node_modules/pino/test/levels.test.js @@ -0,0 +1,810 @@ +'use strict' + +const { describe, test } = require('node:test') +const assert = require('node:assert') +const tspl = require('@matteo.collina/tspl') + +const { sink, once, check } = require('./helper') +const pino = require('../') + +const levelsLib = require('../lib/levels') + +// Silence all warnings for this test +process.removeAllListeners('warning') +process.on('warning', () => {}) + +test('set the level by string', async () => { + const expected = [{ + level: 50, + msg: 'this is an error' + }, { + level: 60, + msg: 'this is fatal' + }] + const stream = sink() + const instance = pino(stream) + instance.level = 'error' + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + const result = await once(stream, 'data') + const current = expected.shift() + check(assert.equal, result, current.level, current.msg) +}) + +test('the wrong level throws', async () => { + const instance = pino() + assert.throws(() => { + instance.level = 'kaboom' + }) +}) + +test('set the level by number', async () => { + const expected = [{ + level: 50, + msg: 'this is an error' + }, { + level: 60, + msg: 'this is fatal' + }] + const stream = sink() + const instance = pino(stream) + + instance.level = 50 + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + const result = await once(stream, 'data') + const current = expected.shift() + check(assert.equal, result, current.level, current.msg) +}) + +test('exposes level string mappings', async () => { + assert.equal(pino.levels.values.error, 50) +}) + +test('exposes level number mappings', async () => { + assert.equal(pino.levels.labels[50], 'error') +}) + +test('returns level integer', async () => { + const instance = pino({ level: 'error' }) + assert.equal(instance.levelVal, 50) +}) + +test('child returns level integer', async () => { + const parent = pino({ level: 'error' }) + const child = parent.child({ foo: 'bar' }) + assert.equal(child.levelVal, 50) +}) + +test('set the level via exported pino function', async () => { + const expected = [{ + level: 50, + msg: 'this is an error' + }, { + level: 60, + msg: 'this is fatal' + }] + const stream = sink() + const instance = pino({ level: 'error' }, stream) + + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + const result = await once(stream, 'data') + const current = expected.shift() + check(assert.equal, result, current.level, current.msg) +}) + +test('level-change event', async (t) => { + const plan = tspl(t, { plan: 8 }) + const instance = pino() + function handle (lvl, val, prevLvl, prevVal, logger) { + plan.equal(lvl, 'trace') + plan.equal(val, 10) + plan.equal(prevLvl, 'info') + plan.equal(prevVal, 30) + plan.equal(logger, instance) + } + instance.on('level-change', handle) + instance.level = 'trace' + instance.removeListener('level-change', handle) + instance.level = 'info' + + let count = 0 + + const l1 = () => count++ + const l2 = () => count++ + const l3 = () => count++ + instance.on('level-change', l1) + instance.on('level-change', l2) + instance.on('level-change', l3) + + instance.level = 'trace' + instance.removeListener('level-change', l3) + instance.level = 'fatal' + instance.removeListener('level-change', l1) + instance.level = 'debug' + instance.removeListener('level-change', l2) + instance.level = 'info' + + plan.equal(count, 6) + + instance.once('level-change', (lvl, val, prevLvl, prevVal, logger) => plan.equal(logger, instance)) + instance.level = 'info' + const child = instance.child({}) + instance.once('level-change', (lvl, val, prevLvl, prevVal, logger) => plan.equal(logger, child)) + child.level = 'trace' + + await plan +}) + +test('enable', async (t) => { + const instance = pino({ + level: 'trace', + enabled: false + }, sink((result, enc) => { + throw Error('no data should be logged') + })) + + Object.keys(pino.levels.values).forEach((level) => { + instance[level]('hello world') + }) +}) + +test('silent level', async () => { + const instance = pino({ + level: 'silent' + }, sink((result, enc) => { + throw Error('no data should be logged') + })) + + Object.keys(pino.levels.values).forEach((level) => { + instance[level]('hello world') + }) +}) + +test('set silent via Infinity', async () => { + const instance = pino({ + level: Infinity + }, sink((result, enc) => { + throw Error('no data should be logged') + })) + + Object.keys(pino.levels.values).forEach((level) => { + instance[level]('hello world') + }) +}) + +test('exposed levels', async () => { + assert.deepEqual(Object.keys(pino.levels.values), [ + 'trace', + 'debug', + 'info', + 'warn', + 'error', + 'fatal' + ]) +}) + +test('exposed labels', async () => { + assert.deepEqual(Object.keys(pino.levels.labels), [ + '10', + '20', + '30', + '40', + '50', + '60' + ]) +}) + +test('setting level in child', async (t) => { + const plan = tspl(t, { plan: 10 }) + const expected = [{ + level: 50, + msg: 'this is an error' + }, { + level: 60, + msg: 'this is fatal' + }] + const instance = pino(sink((result, enc, cb) => { + const current = expected.shift() + check(plan.equal, result, current.level, current.msg) + cb() + })).child({ level: 30 }) + + instance.level = 'error' + instance.info('hello world') + instance.error('this is an error') + instance.fatal('this is fatal') + + await plan +}) + +test('setting level by assigning a number to level', async () => { + const instance = pino() + assert.equal(instance.levelVal, 30) + assert.equal(instance.level, 'info') + instance.level = 50 + assert.equal(instance.levelVal, 50) + assert.equal(instance.level, 'error') +}) + +test('setting level by number to unknown value results in a throw', async () => { + const instance = pino() + assert.throws(() => { instance.level = 973 }) +}) + +test('setting level by assigning a known label to level', async () => { + const instance = pino() + assert.equal(instance.levelVal, 30) + assert.equal(instance.level, 'info') + instance.level = 'error' + assert.equal(instance.levelVal, 50) + assert.equal(instance.level, 'error') +}) + +test('levelVal is read only', async () => { + const instance = pino() + assert.throws(() => { instance.levelVal = 20 }) +}) + +test('produces labels when told to', async (t) => { + const plan = tspl(t, { plan: 5 }) + const expected = [{ + level: 'info', + msg: 'hello world' + }] + const instance = pino({ + formatters: { + level (label, number) { + return { level: label } + } + } + }, sink((result, enc, cb) => { + const current = expected.shift() + check(plan.equal, result, current.level, current.msg) + cb() + })) + + instance.info('hello world') + + await plan +}) + +test('resets levels from labels to numbers', async (t) => { + const plan = tspl(t, { plan: 5 }) + const expected = [{ + level: 30, + msg: 'hello world' + }] + pino({ useLevelLabels: true }) + const instance = pino({ useLevelLabels: false }, sink((result, enc, cb) => { + const current = expected.shift() + check(plan.equal, result, current.level, current.msg) + cb() + })) + + instance.info('hello world') + + await plan +}) + +test('changes label naming when told to', async (t) => { + const plan = tspl(t, { plan: 2 }) + const expected = [{ + priority: 30, + msg: 'hello world' + }] + const instance = pino({ + formatters: { + level (label, number) { + return { priority: number } + } + } + }, sink((result, enc, cb) => { + const current = expected.shift() + plan.equal(result.priority, current.priority) + plan.equal(result.msg, current.msg) + cb() + })) + + instance.info('hello world') + + await plan +}) + +test('children produce labels when told to', async (t) => { + const plan = tspl(t, { plan: 10 }) + const expected = [ + { + level: 'info', + msg: 'child 1' + }, + { + level: 'info', + msg: 'child 2' + } + ] + const instance = pino({ + formatters: { + level (label, number) { + return { level: label } + } + } + }, sink((result, enc, cb) => { + const current = expected.shift() + check(plan.equal, result, current.level, current.msg) + cb() + })) + + const child1 = instance.child({ name: 'child1' }) + const child2 = child1.child({ name: 'child2' }) + + child1.info('child 1') + child2.info('child 2') + + await plan +}) + +test('produces labels for custom levels', async (t) => { + const plan = tspl(t, { plan: 10 }) + const expected = [ + { + level: 'info', + msg: 'hello world' + }, + { + level: 'foo', + msg: 'foobar' + } + ] + const opts = { + formatters: { + level (label, number) { + return { level: label } + } + }, + customLevels: { + foo: 35 + } + } + const instance = pino(opts, sink((result, enc, cb) => { + const current = expected.shift() + check(plan.equal, result, current.level, current.msg) + cb() + })) + + instance.info('hello world') + instance.foo('foobar') + + await plan +}) + +test('setting levelKey does not affect labels when told to', async (t) => { + const plan = tspl(t, { plan: 1 }) + const instance = pino( + { + formatters: { + level (label, number) { + return { priority: label } + } + } + }, + sink((result, enc, cb) => { + plan.equal(result.priority, 'info') + cb() + }) + ) + + instance.info('hello world') + + await plan +}) + +test('throws when creating a default label that does not exist in logger levels', async () => { + const defaultLevel = 'foo' + assert.throws( + () => { + pino({ + customLevels: { + bar: 5 + }, + level: defaultLevel + }) + }, + Error(`default level:${defaultLevel} must be included in custom levels`) + ) +}) + +test('throws when creating a default value that does not exist in logger levels', async () => { + const defaultLevel = 15 + assert.throws( + () => { + pino({ + customLevels: { + bar: 5 + }, + level: defaultLevel + }) + }, + Error(`default level:${defaultLevel} must be included in custom levels`) + ) +}) + +test('throws when creating a default value that does not exist in logger levels', async ({ equal, throws }) => { + assert.throws( + () => { + pino({ + customLevels: { + foo: 5 + }, + useOnlyCustomLevels: true + }) + }, + /default level:info must be included in custom levels/ + ) +}) + +test('passes when creating a default value that exists in logger levels', async () => { + pino({ + level: 30 + }) +}) + +test('log null value when message is null', async () => { + const expected = { + msg: null, + level: 30 + } + + const stream = sink() + const instance = pino(stream) + instance.level = 'info' + instance.info(null) + + const result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) +}) + +test('formats when base param is null', async () => { + const expected = { + msg: 'a string', + level: 30 + } + + const stream = sink() + const instance = pino(stream) + instance.level = 'info' + instance.info(null, 'a %s', 'string') + + const result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) +}) + +test('fatal method sync-flushes the destination if sync flushing is available', async (t) => { + const plan = tspl(t, { plan: 2 }) + const stream = sink() + stream.flushSync = () => { + plan.ok('destination flushed') + } + const instance = pino(stream) + instance.fatal('this is fatal') + await once(stream, 'data') + plan.doesNotThrow(() => { + stream.flushSync = undefined + instance.fatal('this is fatal') + }) + + await plan +}) + +test('fatal method should call async when sync-flushing fails', async (t) => { + const plan = tspl(t, { plan: 1 }) + const messages = [ + 'this is fatal 1' + ] + const stream = sink((result) => assert.equal(result.msg, messages.shift())) + stream.flushSync = () => { throw new Error('Error') } + stream.flush = () => { throw Error('flush should be called') } + + const instance = pino(stream) + plan.doesNotThrow(() => instance.fatal(messages[0])) + + await plan +}) + +test('calling silent method on logger instance', async () => { + const instance = pino({ level: 'silent' }, sink((result, enc) => { + throw Error('no data should be logged') + })) + instance.silent('hello world') +}) + +test('calling silent method on child logger', async () => { + const child = pino({ level: 'silent' }, sink((result, enc) => { + throw Error('no data should be logged') + })).child({}) + child.silent('hello world') +}) + +test('changing level from info to silent and back to info', async () => { + const expected = { + level: 30, + msg: 'hello world' + } + const stream = sink() + const instance = pino({ level: 'info' }, stream) + + instance.level = 'silent' + instance.info('hello world') + let result = stream.read() + assert.equal(result, null) + + instance.level = 'info' + instance.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) +}) + +test('changing level from info to silent and back to info in child logger', async () => { + const expected = { + level: 30, + msg: 'hello world' + } + const stream = sink() + const child = pino({ level: 'info' }, stream).child({}) + + child.level = 'silent' + child.info('hello world') + let result = stream.read() + assert.equal(result, null) + + child.level = 'info' + child.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) +}) + +describe('changing level respects level comparison set to', () => { + const ascLevels = { + debug: 1, + info: 2, + warn: 3 + } + + const descLevels = { + debug: 3, + info: 2, + warn: 1 + } + + const expected = { + level: 2, + msg: 'hello world' + } + + test('ASC in parent logger', async () => { + const customLevels = ascLevels + const levelComparison = 'ASC' + + const stream = sink() + const logger = pino({ levelComparison, customLevels, useOnlyCustomLevels: true, level: 'info' }, stream) + + logger.level = 'warn' + logger.info('hello world') + let result = stream.read() + assert.equal(result, null) + + logger.level = 'debug' + logger.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) + }) + + test('DESC in parent logger', async () => { + const customLevels = descLevels + const levelComparison = 'DESC' + + const stream = sink() + const logger = pino({ levelComparison, customLevels, useOnlyCustomLevels: true, level: 'info' }, stream) + + logger.level = 'warn' + logger.info('hello world') + let result = stream.read() + assert.equal(result, null) + + logger.level = 'debug' + logger.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) + }) + + test('custom function in parent logger', async () => { + const customLevels = { + info: 2, + debug: 345, + warn: 789 + } + const levelComparison = (current, expected) => { + if (expected === customLevels.warn) return false + return true + } + + const stream = sink() + const logger = pino({ levelComparison, customLevels, useOnlyCustomLevels: true, level: 'info' }, stream) + + logger.level = 'warn' + logger.info('hello world') + let result = stream.read() + assert.equal(result, null) + + logger.level = 'debug' + logger.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) + }) + + test('ASC in child logger', async () => { + const customLevels = ascLevels + const levelComparison = 'ASC' + + const stream = sink() + const logger = pino({ levelComparison, customLevels, useOnlyCustomLevels: true, level: 'info' }, stream).child({ }) + + logger.level = 'warn' + logger.info('hello world') + let result = stream.read() + assert.equal(result, null) + + logger.level = 'debug' + logger.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) + }) + + test('DESC in parent logger', async () => { + const customLevels = descLevels + const levelComparison = 'DESC' + + const stream = sink() + const logger = pino({ levelComparison, customLevels, useOnlyCustomLevels: true, level: 'info' }, stream).child({ }) + + logger.level = 'warn' + logger.info('hello world') + let result = stream.read() + assert.equal(result, null) + + logger.level = 'debug' + logger.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) + }) + + test('custom function in child logger', async () => { + const customLevels = { + info: 2, + debug: 345, + warn: 789 + } + const levelComparison = (current, expected) => { + if (expected === customLevels.warn) return false + return true + } + + const stream = sink() + const logger = pino({ levelComparison, customLevels, useOnlyCustomLevels: true, level: 'info' }, stream).child({ }) + + logger.level = 'warn' + logger.info('hello world') + let result = stream.read() + assert.equal(result, null) + + logger.level = 'debug' + logger.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) + }) +}) + +test('changing level respects level comparison DESC', async () => { + const customLevels = { + warn: 1, + info: 2, + debug: 3 + } + + const levelComparison = 'DESC' + + const expected = { + level: 2, + msg: 'hello world' + } + + const stream = sink() + const logger = pino({ levelComparison, customLevels, useOnlyCustomLevels: true, level: 'info' }, stream) + + logger.level = 'warn' + logger.info('hello world') + let result = stream.read() + assert.equal(result, null) + + logger.level = 'debug' + logger.info('hello world') + result = await once(stream, 'data') + check(assert.equal, result, expected.level, expected.msg) +}) + +// testing for potential loss of Pino constructor scope from serializers - an edge case with circular refs see: https://github.com/pinojs/pino/issues/833 +test('trying to get levels when `this` is no longer a Pino instance returns an empty string', async () => { + const notPinoInstance = { some: 'object', getLevel: levelsLib.getLevel } + const blankedLevelValue = notPinoInstance.getLevel() + assert.equal(blankedLevelValue, '') +}) + +test('accepts capital letter for INFO level', async () => { + const stream = sink() + const logger = pino({ + level: 'INFO' + }, stream) + + logger.info('test') + const { level } = await once(stream, 'data') + assert.equal(level, 30) +}) + +test('accepts capital letter for FATAL level', async () => { + const stream = sink() + const logger = pino({ + level: 'FATAL' + }, stream) + + logger.fatal('test') + const { level } = await once(stream, 'data') + assert.equal(level, 60) +}) + +test('accepts capital letter for ERROR level', async () => { + const stream = sink() + const logger = pino({ + level: 'ERROR' + }, stream) + + logger.error('test') + const { level } = await once(stream, 'data') + assert.equal(level, 50) +}) + +test('accepts capital letter for WARN level', async () => { + const stream = sink() + const logger = pino({ + level: 'WARN' + }, stream) + + logger.warn('test') + const { level } = await once(stream, 'data') + assert.equal(level, 40) +}) + +test('accepts capital letter for DEBUG level', async () => { + const stream = sink() + const logger = pino({ + level: 'DEBUG' + }, stream) + + logger.debug('test') + const { level } = await once(stream, 'data') + assert.equal(level, 20) +}) + +test('accepts capital letter for TRACE level', async () => { + const stream = sink() + const logger = pino({ + level: 'TRACE' + }, stream) + + logger.trace('test') + const { level } = await once(stream, 'data') + assert.equal(level, 10) +}) diff --git a/node_modules/pino/test/metadata.test.js b/node_modules/pino/test/metadata.test.js new file mode 100644 index 0000000..f4f088b --- /dev/null +++ b/node_modules/pino/test/metadata.test.js @@ -0,0 +1,120 @@ +'use strict' + +const test = require('node:test') +const os = require('node:os') +const tspl = require('@matteo.collina/tspl') + +const pino = require('../') + +const { pid } = process +const hostname = os.hostname() + +test('metadata works', async (t) => { + const plan = tspl(t, { plan: 7 }) + const now = Date.now() + const instance = pino({}, { + [Symbol.for('pino.metadata')]: true, + write (chunk) { + plan.equal(instance, this.lastLogger) + plan.equal(30, this.lastLevel) + plan.equal('a msg', this.lastMsg) + plan.ok(Number(this.lastTime) >= now) + plan.deepEqual(this.lastObj, { hello: 'world' }) + const result = JSON.parse(chunk) + plan.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + plan.deepEqual(result, { + pid, + hostname, + level: 30, + hello: 'world', + msg: 'a msg' + }) + } + }) + + instance.info({ hello: 'world' }, 'a msg') + + await plan +}) + +test('child loggers works', async (t) => { + const plan = tspl(t, { plan: 6 }) + const instance = pino({}, { + [Symbol.for('pino.metadata')]: true, + write (chunk) { + plan.equal(child, this.lastLogger) + plan.equal(30, this.lastLevel) + plan.equal('a msg', this.lastMsg) + plan.deepEqual(this.lastObj, { from: 'child' }) + const result = JSON.parse(chunk) + plan.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + plan.deepEqual(result, { + pid, + hostname, + level: 30, + hello: 'world', + from: 'child', + msg: 'a msg' + }) + } + }) + + const child = instance.child({ hello: 'world' }) + child.info({ from: 'child' }, 'a msg') + + await plan +}) + +test('without object', async (t) => { + const plan = tspl(t, { plan: 6 }) + const instance = pino({}, { + [Symbol.for('pino.metadata')]: true, + write (chunk) { + plan.equal(instance, this.lastLogger) + plan.equal(30, this.lastLevel) + plan.equal('a msg', this.lastMsg) + plan.deepEqual({ }, this.lastObj) + const result = JSON.parse(chunk) + plan.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + plan.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'a msg' + }) + } + }) + + instance.info('a msg') + + await plan +}) + +test('without msg', async (t) => { + const plan = tspl(t, { plan: 6 }) + const instance = pino({}, { + [Symbol.for('pino.metadata')]: true, + write (chunk) { + plan.equal(instance, this.lastLogger) + plan.equal(30, this.lastLevel) + plan.equal(undefined, this.lastMsg) + plan.deepEqual({ hello: 'world' }, this.lastObj) + const result = JSON.parse(chunk) + plan.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + plan.deepEqual(result, { + pid, + hostname, + level: 30, + hello: 'world' + }) + } + }) + + instance.info({ hello: 'world' }) + + await plan +}) diff --git a/node_modules/pino/test/mixin-merge-strategy.test.js b/node_modules/pino/test/mixin-merge-strategy.test.js new file mode 100644 index 0000000..d78cbe5 --- /dev/null +++ b/node_modules/pino/test/mixin-merge-strategy.test.js @@ -0,0 +1,57 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') + +const { sink, once } = require('./helper') +const pino = require('../') + +const level = 50 +const name = 'error' + +test('default merge strategy', async () => { + const stream = sink() + const instance = pino({ + base: {}, + mixin () { + return { tag: 'k8s' } + } + }, stream) + instance.level = name + instance[name]({ + tag: 'local' + }, 'test') + const result = await once(stream, 'data') + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + level, + msg: 'test', + tag: 'local' + }) +}) + +test('custom merge strategy with mixin priority', async () => { + const stream = sink() + const instance = pino({ + base: {}, + mixin () { + return { tag: 'k8s' } + }, + mixinMergeStrategy (mergeObject, mixinObject) { + return Object.assign(mergeObject, mixinObject) + } + }, stream) + instance.level = name + instance[name]({ + tag: 'local' + }, 'test') + const result = await once(stream, 'data') + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + level, + msg: 'test', + tag: 'k8s' + }) +}) diff --git a/node_modules/pino/test/mixin.test.js b/node_modules/pino/test/mixin.test.js new file mode 100644 index 0000000..b944683 --- /dev/null +++ b/node_modules/pino/test/mixin.test.js @@ -0,0 +1,241 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const tspl = require('@matteo.collina/tspl') + +const { sink, once } = require('./helper') +const pino = require('../') + +const { pid } = process +const hostname = os.hostname() +const level = 50 +const name = 'error' + +test('mixin object is included', async () => { + let n = 0 + const stream = sink() + const instance = pino({ + mixin () { + return { hello: ++n } + } + }, stream) + instance.level = name + instance[name]('test') + const result = await once(stream, 'data') + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + msg: 'test', + hello: 1 + }) +}) + +test('mixin object is new every time', async (t) => { + const plan = tspl(t, { plan: 6 }) + + let n = 0 + const stream = sink() + const instance = pino({ + mixin () { + return { hello: n } + } + }, stream) + instance.level = name + + while (++n < 4) { + const msg = `test #${n}` + stream.pause() + instance[name](msg) + stream.resume() + const result = await once(stream, 'data') + plan.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + plan.deepEqual(result, { + pid, + hostname, + level, + msg, + hello: n + }) + } + + await plan +}) + +test('mixin object is not called if below log level', async () => { + const stream = sink() + const instance = pino({ + mixin () { + throw Error('should not call mixin function') + } + }, stream) + instance.level = 'error' + instance.info('test') +}) + +test('mixin object + logged object', async () => { + const stream = sink() + const instance = pino({ + mixin () { + return { foo: 1, bar: 2 } + } + }, stream) + instance.level = name + instance[name]({ bar: 3, baz: 4 }) + const result = await once(stream, 'data') + assert.ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()') + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level, + foo: 1, + bar: 3, + baz: 4 + }) +}) + +test('mixin not a function', async () => { + const stream = sink() + assert.throws(function () { + pino({ mixin: 'not a function' }, stream) + }) +}) + +test('mixin can use context', async (t) => { + const plan = tspl(t, { plan: 3 }) + const stream = sink() + const instance = pino({ + mixin (context) { + plan.ok(context !== null, 'context should be defined') + plan.ok(context !== undefined, 'context should be defined') + plan.deepEqual(context, { + message: '123', + stack: 'stack' + }) + return Object.assign({ + error: context.message, + stack: context.stack + }) + } + }, stream) + instance.level = name + instance[name]({ + message: '123', + stack: 'stack' + }, 'test') + + await plan +}) + +test('mixin works without context', async (t) => { + const plan = tspl(t, { plan: 3 }) + const stream = sink() + const instance = pino({ + mixin (context) { + plan.ok(context !== null, 'context is still defined w/o passing mergeObject') + plan.ok(context !== undefined, 'context is still defined w/o passing mergeObject') + plan.deepEqual(context, {}) + return { + something: true + } + } + }, stream) + instance.level = name + instance[name]('test') + + await plan +}) + +test('mixin can use level number', async (t) => { + const plan = tspl(t, { plan: 3 }) + const stream = sink() + const instance = pino({ + mixin (context, num) { + plan.ok(num !== null, 'level should be defined') + plan.ok(num !== undefined, 'level should be defined') + plan.deepEqual(num, level) + return Object.assign({ + error: context.message, + stack: context.stack + }) + } + }, stream) + instance.level = name + instance[name]({ + message: '123', + stack: 'stack' + }, 'test') + + await plan +}) + +test('mixin receives logger as third parameter', async (t) => { + const plan = tspl(t, { plan: 3 }) + const stream = sink() + const instance = pino({ + mixin (context, num, logger) { + plan.ok(logger !== null, 'logger should be defined') + plan.ok(logger !== undefined, 'logger should be defined') + plan.deepEqual(logger, instance) + return { ...context, num } + } + }, stream) + instance.level = name + instance[name]({ + message: '123' + }, 'test') + + await plan +}) + +test('mixin receives child logger', async (t) => { + const plan = tspl(t, { plan: 3 }) + const stream = sink() + let child = null + const instance = pino({ + mixin (context, num, logger) { + plan.ok(logger !== null, 'logger should be defined') + plan.ok(logger !== undefined, 'logger should be defined') + plan.deepEqual(logger.expected, child.expected) + return { ...context, num } + } + }, stream) + instance.level = name + instance.expected = false + child = instance.child({}) + child.expected = true + child[name]({ + message: '123' + }, 'test') + + await plan +}) + +test('mixin receives logger even if child exists', async (t) => { + const plan = tspl(t, { plan: 3 }) + const stream = sink() + let child = null + const instance = pino({ + mixin (context, num, logger) { + plan.ok(logger !== null, 'logger should be defined') + plan.ok(logger !== undefined, 'logger should be defined') + plan.deepEqual(logger.expected, instance.expected) + return { ...context, num } + } + }, stream) + instance.level = name + instance.expected = false + child = instance.child({}) + child.expected = true + instance[name]({ + message: '123' + }, 'test') + + await plan +}) diff --git a/node_modules/pino/test/multistream.test.js b/node_modules/pino/test/multistream.test.js new file mode 100644 index 0000000..178cfbb --- /dev/null +++ b/node_modules/pino/test/multistream.test.js @@ -0,0 +1,729 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { readFileSync } = require('node:fs') +const { join } = require('node:path') +const proxyquire = require('proxyquire') +const strip = require('strip-ansi') +const tspl = require('@matteo.collina/tspl') + +const writeStream = require('flush-write-stream') +const pino = require('../') +const multistream = pino.multistream +const { file, sink } = require('./helper') + +test('sends to multiple streams using string levels', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const streams = [ + { stream }, + { level: 'debug', stream }, + { level: 'trace', stream }, + { level: 'fatal', stream }, + { level: 'silent', stream } + ] + const log = pino({ + level: 'trace' + }, multistream(streams)) + log.info('info stream') + log.debug('debug stream') + log.fatal('fatal stream') + assert.equal(messageCount, 9) +}) + +test('sends to multiple streams using custom levels', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const streams = [ + { stream }, + { level: 'debug', stream }, + { level: 'trace', stream }, + { level: 'fatal', stream }, + { level: 'silent', stream } + ] + const log = pino({ + level: 'trace' + }, multistream(streams)) + log.info('info stream') + log.debug('debug stream') + log.fatal('fatal stream') + assert.equal(messageCount, 9) +}) + +test('sends to multiple streams using optionally predefined levels', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const opts = { + levels: { + silent: Infinity, + fatal: 60, + error: 50, + warn: 50, + info: 30, + debug: 20, + trace: 10 + } + } + const streams = [ + { stream }, + { level: 'trace', stream }, + { level: 'debug', stream }, + { level: 'info', stream }, + { level: 'warn', stream }, + { level: 'error', stream }, + { level: 'fatal', stream }, + { level: 'silent', stream } + ] + const mstream = multistream(streams, opts) + const log = pino({ + level: 'trace' + }, mstream) + log.trace('trace stream') + log.debug('debug stream') + log.info('info stream') + log.warn('warn stream') + log.error('error stream') + log.fatal('fatal stream') + log.silent('silent stream') + assert.equal(messageCount, 24) +}) + +test('sends to multiple streams using number levels', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const streams = [ + { stream }, + { level: 20, stream }, + { level: 60, stream } + ] + const log = pino({ + level: 'debug' + }, multistream(streams)) + log.info('info stream') + log.debug('debug stream') + log.fatal('fatal stream') + assert.equal(messageCount, 6) +}) + +test('level include higher levels', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const log = pino({}, multistream([{ level: 'info', stream }])) + log.fatal('message') + assert.equal(messageCount, 1) +}) + +test('supports multiple arguments', async (t) => { + const plan = tspl(t, { plan: 2 }) + const messages = [] + const stream = writeStream(function (data, enc, cb) { + messages.push(JSON.parse(data)) + if (messages.length === 2) { + const msg1 = messages[0] + plan.equal(msg1.msg, 'foo bar baz foobar') + + const msg2 = messages[1] + plan.equal(msg2.msg, 'foo bar baz foobar barfoo foofoo') + } + cb() + }) + const log = pino({}, multistream({ stream })) + log.info('%s %s %s %s', 'foo', 'bar', 'baz', 'foobar') // apply not invoked + log.info('%s %s %s %s %s %s', 'foo', 'bar', 'baz', 'foobar', 'barfoo', 'foofoo') // apply invoked + + await plan +}) + +test('supports children', async (t) => { + const plan = tspl(t, { plan: 2 }) + const stream = writeStream(function (data, enc, cb) { + const input = JSON.parse(data) + plan.equal(input.msg, 'child stream') + plan.equal(input.child, 'one') + cb() + }) + const streams = [ + { stream } + ] + const log = pino({}, multistream(streams)).child({ child: 'one' }) + log.info('child stream') + + await plan +}) + +test('supports grandchildren', async (t) => { + const plan = tspl(t, { plan: 9 }) + const messages = [] + const stream = writeStream(function (data, enc, cb) { + messages.push(JSON.parse(data)) + if (messages.length === 3) { + const msg1 = messages[0] + plan.equal(msg1.msg, 'grandchild stream') + plan.equal(msg1.child, 'one') + plan.equal(msg1.grandchild, 'two') + + const msg2 = messages[1] + plan.equal(msg2.msg, 'grandchild stream') + plan.equal(msg2.child, 'one') + plan.equal(msg2.grandchild, 'two') + + const msg3 = messages[2] + plan.equal(msg3.msg, 'debug grandchild') + plan.equal(msg3.child, 'one') + plan.equal(msg3.grandchild, 'two') + } + cb() + }) + const streams = [ + { stream }, + { level: 'debug', stream } + ] + const log = pino({ + level: 'debug' + }, multistream(streams)).child({ child: 'one' }).child({ grandchild: 'two' }) + log.info('grandchild stream') + log.debug('debug grandchild') + + await plan +}) + +test('supports custom levels', (t, end) => { + const stream = writeStream(function (data, enc, cb) { + assert.equal(JSON.parse(data).msg, 'bar') + end() + }) + const log = pino({ + customLevels: { + foo: 35 + } + }, multistream([{ level: 35, stream }])) + log.foo('bar') +}) + +test('supports pretty print', async (t) => { + const plan = tspl(t, { plan: 2 }) + const stream = writeStream(function (data, enc, cb) { + plan.equal(strip(data.toString()).match(/INFO.*: pretty print/) != null, true) + cb() + }) + + const safeBoom = proxyquire('pino-pretty/lib/utils/build-safe-sonic-boom.js', { + 'sonic-boom': function () { + plan.ok('sonic created') + stream.flushSync = () => {} + stream.flush = () => {} + return stream + } + }) + const nested = proxyquire('pino-pretty/lib/utils/index.js', { + './build-safe-sonic-boom.js': safeBoom + }) + const pretty = proxyquire('pino-pretty', { + './lib/utils/index.js': nested + }) + + const log = pino({ + level: 'debug', + name: 'helloName' + }, multistream([ + { stream: pretty() } + ])) + + log.info('pretty print') + + await plan +}) + +test('emit propagates events to each stream', async (t) => { + const plan = tspl(t, { plan: 3 }) + const handler = function (data) { + plan.equal(data.msg, 'world') + } + const streams = [sink(), sink(), sink()] + streams.forEach(function (s) { + s.once('hello', handler) + }) + const stream = multistream(streams) + stream.emit('hello', { msg: 'world' }) + + await plan +}) + +test('children support custom levels', async (t) => { + const plan = tspl(t, { plan: 1 }) + const stream = writeStream(function (data, enc, cb) { + plan.equal(JSON.parse(data).msg, 'bar') + }) + const parent = pino({ + customLevels: { + foo: 35 + } + }, multistream([{ level: 35, stream }])) + const child = parent.child({ child: 'yes' }) + child.foo('bar') + + await plan +}) + +test('levelVal overrides level', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const streams = [ + { stream }, + { level: 'blabla', levelVal: 15, stream }, + { level: 60, stream } + ] + const log = pino({ + level: 'debug' + }, multistream(streams)) + log.info('info stream') + log.debug('debug stream') + log.fatal('fatal stream') + assert.equal(messageCount, 6) +}) + +test('forwards metadata', async (t) => { + const plan = tspl(t, { plan: 4 }) + const streams = [ + { + stream: { + [Symbol.for('pino.metadata')]: true, + write (chunk) { + plan.equal(log, this.lastLogger) + plan.equal(30, this.lastLevel) + plan.deepEqual({ hello: 'world' }, this.lastObj) + plan.deepEqual('a msg', this.lastMsg) + } + } + } + ] + + const log = pino({ + level: 'debug' + }, multistream(streams)) + + log.info({ hello: 'world' }, 'a msg') + + await plan +}) + +test('forward name', async (t) => { + const plan = tspl(t, { plan: 2 }) + const streams = [ + { + stream: { + [Symbol.for('pino.metadata')]: true, + write (chunk) { + const line = JSON.parse(chunk) + plan.equal(line.name, 'helloName') + plan.equal(line.hello, 'world') + } + } + } + ] + + const log = pino({ + level: 'debug', + name: 'helloName' + }, multistream(streams)) + + log.info({ hello: 'world' }, 'a msg') + + await plan +}) + +test('forward name with child', async (t) => { + const plan = tspl(t, { plan: 3 }) + const streams = [ + { + stream: { + write (chunk) { + const line = JSON.parse(chunk) + plan.equal(line.name, 'helloName') + plan.equal(line.hello, 'world') + plan.equal(line.component, 'aComponent') + } + } + } + ] + + const log = pino({ + level: 'debug', + name: 'helloName' + }, multistream(streams)).child({ component: 'aComponent' }) + + log.info({ hello: 'world' }, 'a msg') + + await plan +}) + +test('clone generates a new multistream with all stream at the same level', async (t) => { + const plan = tspl(t, { plan: 14 }) + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const streams = [ + { stream }, + { level: 'debug', stream }, + { level: 'trace', stream }, + { level: 'fatal', stream } + ] + const ms = multistream(streams) + const clone = ms.clone(30) + + // eslint-disable-next-line eqeqeq + plan.equal(clone != ms, true) + + clone.streams.forEach((s, i) => { + // eslint-disable-next-line eqeqeq + plan.equal(s != streams[i], true) + plan.equal(s.stream, streams[i].stream) + plan.equal(s.level, 30) + }) + + const log = pino({ + level: 'trace' + }, clone) + + log.info('info stream') + log.debug('debug message not counted') + log.fatal('fatal stream') + plan.equal(messageCount, 8) + + await plan +}) + +test('one stream', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const log = pino({ + level: 'trace' + }, multistream({ stream, level: 'fatal' })) + log.info('info stream') + log.debug('debug stream') + log.fatal('fatal stream') + assert.equal(messageCount, 1) +}) + +test('dedupe', async () => { + let messageCount = 0 + const stream1 = writeStream(function (data, enc, cb) { + messageCount -= 1 + cb() + }) + + const stream2 = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + + const streams = [ + { + stream: stream1, + level: 'info' + }, + { + stream: stream2, + level: 'fatal' + } + ] + + const log = pino({ + level: 'trace' + }, multistream(streams, { dedupe: true })) + log.info('info stream') + log.fatal('fatal stream') + log.fatal('fatal stream') + assert.equal(messageCount, 1) +}) + +test('dedupe when logs have different levels', async () => { + let messageCount = 0 + const stream1 = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + + const stream2 = writeStream(function (data, enc, cb) { + messageCount += 2 + cb() + }) + + const streams = [ + { + stream: stream1, + level: 'info' + }, + { + stream: stream2, + level: 'error' + } + ] + + const log = pino({ + level: 'trace' + }, multistream(streams, { dedupe: true })) + + log.info('info stream') + log.warn('warn stream') + log.error('error streams') + log.fatal('fatal streams') + assert.equal(messageCount, 6) +}) + +test('dedupe when some streams has the same level', async () => { + let messageCount = 0 + const stream1 = writeStream(function (data, enc, cb) { + messageCount -= 1 + cb() + }) + + const stream2 = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + + const stream3 = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + + const streams = [ + { + stream: stream1, + level: 'info' + }, + { + stream: stream2, + level: 'fatal' + }, + { + stream: stream3, + level: 'fatal' + } + ] + + const log = pino({ + level: 'trace' + }, multistream(streams, { dedupe: true })) + log.info('info stream') + log.fatal('fatal streams') + log.fatal('fatal streams') + assert.equal(messageCount, 3) +}) + +test('no stream', async () => { + const log = pino({ + level: 'trace' + }, multistream()) + log.info('info stream') + log.debug('debug stream') + log.fatal('fatal stream') +}) + +test('one stream', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const log = pino({ + level: 'trace' + }, multistream(stream)) + log.info('info stream') + log.debug('debug stream') + log.fatal('fatal stream') + assert.equal(messageCount, 2) +}) + +test('add a stream', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + + const log = pino({ + level: 'trace' + }, multistream().add(stream)) + log.info('info stream') + log.debug('debug stream') + log.fatal('fatal stream') + assert.equal(messageCount, 2) +}) + +test('remove a stream', async () => { + let messageCount1 = 0 + let messageCount2 = 0 + let messageCount3 = 0 + + const stream1 = writeStream(function (data, enc, cb) { + messageCount1 += 1 + cb() + }) + + const stream2 = writeStream(function (data, enc, cb) { + messageCount2 += 1 + cb() + }) + + const stream3 = writeStream(function (data, enc, cb) { + messageCount3 += 1 + cb() + }) + + const multi = multistream() + const log = pino({ level: 'trace', sync: true }, multi) + + multi.add(stream1) + const id1 = multi.lastId + + multi.add(stream2) + const id2 = multi.lastId + + multi.add(stream3) + const id3 = multi.lastId + + log.info('line') + multi.remove(id1) + + log.info('line') + multi.remove(id2) + + log.info('line') + multi.remove(id3) + + log.info('line') + multi.remove(Math.floor(Math.random() * 1000)) // non-existing id + + assert.equal(messageCount1, 1) + assert.equal(messageCount2, 2) + assert.equal(messageCount3, 3) +}) + +test('multistream.add throws if not a stream', async () => { + try { + pino({ + level: 'trace' + }, multistream().add({})) + } catch (_) { + } +}) + +test('multistream throws if not a stream', async () => { + try { + pino({ + level: 'trace' + }, multistream({})) + } catch (_) { + } +}) + +test('multistream.write should not throw if one stream fails', async () => { + let messageCount = 0 + const stream = writeStream(function (data, enc, cb) { + messageCount += 1 + cb() + }) + const noopStream = pino.transport({ + target: join(__dirname, 'fixtures', 'noop-transport.js') + }) + // eslint-disable-next-line + noopStream.on('error', function (err) { + // something went wrong while writing to noop stream, ignoring! + }) + const log = pino({ + level: 'trace' + }, + multistream([ + { + level: 'trace', + stream + }, + { + level: 'debug', + stream: noopStream + } + ]) + ) + log.debug('0') + noopStream.end() + // noop stream is ending, should emit an error but not throw + log.debug('1') + log.debug('2') + assert.equal(messageCount, 3) +}) + +test('flushSync', async (t) => { + const plan = tspl(t, { plan: 2 }) + const tmp = file() + const destination = pino.destination({ dest: tmp, sync: false, minLength: 4096 }) + const stream = multistream([{ level: 'info', stream: destination }]) + const log = pino({ level: 'info' }, stream) + destination.on('ready', () => { + log.info('foo') + log.info('bar') + stream.flushSync() + plan.equal(readFileSync(tmp, { encoding: 'utf-8' }).split('\n').length - 1, 2) + log.info('biz') + stream.flushSync() + plan.equal(readFileSync(tmp, { encoding: 'utf-8' }).split('\n').length - 1, 3) + }) + + await plan +}) + +test('ends all streams', async (t) => { + const plan = tspl(t, { plan: 7 }) + const stream = writeStream(function (data, enc, cb) { + plan.ok('message') + cb() + }) + stream.flushSync = function () { + plan.ok('flushSync') + } + // stream2 has no flushSync + const stream2 = writeStream(function (data, enc, cb) { + plan.ok('message2') + cb() + }) + const streams = [ + { stream }, + { level: 'debug', stream }, + { level: 'trace', stream: stream2 }, + { level: 'fatal', stream }, + { level: 'silent', stream } + ] + const multi = multistream(streams) + const log = pino({ + level: 'trace' + }, multi) + log.info('info stream') + multi.end() + + await plan +}) diff --git a/node_modules/pino/test/redact.test.js b/node_modules/pino/test/redact.test.js new file mode 100644 index 0000000..456a7f5 --- /dev/null +++ b/node_modules/pino/test/redact.test.js @@ -0,0 +1,893 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') + +const { sink, once } = require('./helper') +const pino = require('../') + +test('redact option – throws if not array', async () => { + assert.throws(() => { + pino({ redact: 'req.headers.cookie' }) + }) +}) + +test('redact option – throws if array does not only contain strings', async () => { + assert.throws(() => { + pino({ redact: ['req.headers.cookie', {}] }) + }) +}) + +test('redact option – throws if array contains an invalid path', async () => { + assert.throws(() => { + pino({ redact: ['req,headers.cookie'] }) + }) +}) + +test('redact.paths option – throws if not array', async () => { + assert.throws(() => { + pino({ redact: { paths: 'req.headers.cookie' } }) + }) +}) + +test('redact.paths option – throws if array does not only contain strings', async () => { + assert.throws(() => { + pino({ redact: { paths: ['req.headers.cookie', {}] } }) + }) +}) + +test('redact.paths option – throws if array contains an invalid path', async () => { + assert.throws(() => { + pino({ redact: { paths: ['req,headers.cookie'] } }) + }) +}) + +test('redact option – top level key', async () => { + const stream = sink() + const instance = pino({ redact: ['key'] }, stream) + instance.info({ + key: { redact: 'me' } + }) + const { key } = await once(stream, 'data') + assert.equal(key, '[Redacted]') +}) + +test('redact option – top level key next level key', async () => { + const stream = sink() + const instance = pino({ redact: ['key', 'key.foo'] }, stream) + instance.info({ + key: { redact: 'me' } + }) + const { key } = await once(stream, 'data') + assert.equal(key, '[Redacted]') +}) + +test('redact option – next level key then top level key', async () => { + const stream = sink() + const instance = pino({ redact: ['key.foo', 'key'] }, stream) + instance.info({ + key: { redact: 'me' } + }) + const { key } = await once(stream, 'data') + assert.equal(key, '[Redacted]') +}) + +test('redact option – object', async () => { + const stream = sink() + const instance = pino({ redact: ['req.headers.cookie'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') +}) + +test('redact option – child object', async () => { + const stream = sink() + const instance = pino({ redact: ['req.headers.cookie'] }, stream) + instance.child({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }).info('message completed') + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') +}) + +test('redact option – interpolated object', async () => { + const stream = sink() + const instance = pino({ redact: ['req.headers.cookie'] }, stream) + + instance.info('test %j', { + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { msg } = await once(stream, 'data') + assert.equal(JSON.parse(msg.replace(/test /, '')).req.headers.cookie, '[Redacted]') +}) + +test('redact.paths option – object', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'] } }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') +}) + +test('redact.paths option – child object', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'] } }, stream) + instance.child({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }).info('message completed') + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') +}) + +test('redact.paths option – interpolated object', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'] } }, stream) + + instance.info('test %j', { + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { msg } = await once(stream, 'data') + assert.equal(JSON.parse(msg.replace(/test /, '')).req.headers.cookie, '[Redacted]') +}) + +test('redact.censor option – sets the redact value', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'], censor: 'test' } }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, 'test') +}) + +test('redact.censor option – can be a function that accepts value and path arguments', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['topLevel'], censor: (value, path) => value + ' ' + path.join('.') } }, stream) + instance.info({ + topLevel: 'test' + }) + const { topLevel } = await once(stream, 'data') + assert.equal(topLevel, 'test topLevel') +}) + +test('redact.censor option – can be a function that accepts value and path arguments (nested path)', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'], censor: (value, path) => value + ' ' + path.join('.') } }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1; req.headers.cookie') +}) + +test('redact.remove option – removes both key and value', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'], remove: true } }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal('cookie' in req.headers, false) +}) + +test('redact.remove – top level key - object value', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['key'], remove: true } }, stream) + instance.info({ + key: { redact: 'me' } + }) + const o = await once(stream, 'data') + assert.equal('key' in o, false) +}) + +test('redact.remove – top level key - number value', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['key'], remove: true } }, stream) + instance.info({ + key: 1 + }) + const o = await once(stream, 'data') + assert.equal('key' in o, false) +}) + +test('redact.remove – top level key - boolean value', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['key'], remove: true } }, stream) + instance.info({ + key: false + }) + const o = await once(stream, 'data') + assert.equal('key' in o, false) +}) + +test('redact.remove – top level key in child logger', async () => { + const stream = sink() + const opts = { redact: { paths: ['key'], remove: true } } + const instance = pino(opts, stream).child({ key: { redact: 'me' } }) + instance.info('test') + const o = await once(stream, 'data') + assert.equal('key' in o, false) +}) + +test('redact.paths preserves original object values after the log write', async () => { + const stream = sink() + const instance = pino({ redact: ['req.headers.cookie'] }, stream) + const obj = { + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.req.headers.cookie, '[Redacted]') + assert.equal(obj.req.headers.cookie, 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;') +}) + +test('redact.paths preserves original object values after the log write', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'] } }, stream) + const obj = { + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.req.headers.cookie, '[Redacted]') + assert.equal(obj.req.headers.cookie, 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;') +}) + +test('redact.censor preserves original object values after the log write', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'], censor: 'test' } }, stream) + const obj = { + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.req.headers.cookie, 'test') + assert.equal(obj.req.headers.cookie, 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;') +}) + +test('redact.remove preserves original object values after the log write', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['req.headers.cookie'], remove: true } }, stream) + const obj = { + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal('cookie' in o.req.headers, false) + assert.equal('cookie' in obj.req.headers, true) +}) + +test('redact – supports last position wildcard paths', async () => { + const stream = sink() + const instance = pino({ redact: ['req.headers.*'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') + assert.equal(req.headers.host, '[Redacted]') + assert.equal(req.headers.connection, '[Redacted]') +}) + +test('redact – supports first position wildcard paths', async () => { + const stream = sink() + const instance = pino({ redact: ['*.headers'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers, '[Redacted]') +}) + +test('redact – supports first position wildcards before other paths', async () => { + const stream = sink() + const instance = pino({ redact: ['*.headers.cookie', 'req.id'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') + assert.equal(req.id, '[Redacted]') +}) + +test('redact – supports first position wildcards after other paths', async () => { + const stream = sink() + const instance = pino({ redact: ['req.id', '*.headers.cookie'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') + assert.equal(req.id, '[Redacted]') +}) + +test('redact – supports first position wildcards after top level keys', async () => { + const stream = sink() + const instance = pino({ redact: ['key', '*.headers.cookie'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') +}) + +test('redact – supports top level wildcard', async () => { + const stream = sink() + const instance = pino({ redact: ['*'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req, '[Redacted]') +}) + +test('redact – supports top level wildcard with a censor function', async () => { + const stream = sink() + const instance = pino({ + redact: { + paths: ['*'], + censor: () => '[Redacted]' + } + }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req, '[Redacted]') +}) + +test('redact – supports top level wildcard and leading wildcard', async () => { + const stream = sink() + const instance = pino({ redact: ['*', '*.req'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req, '[Redacted]') +}) + +test('redact – supports intermediate wildcard paths', async () => { + const stream = sink() + const instance = pino({ redact: ['req.*.cookie'] }, stream) + instance.info({ + req: { + id: 7915, + method: 'GET', + url: '/', + headers: { + host: 'localhost:3000', + connection: 'keep-alive', + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + }, + remoteAddress: '::ffff:127.0.0.1', + remotePort: 58022 + } + }) + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') +}) + +test('redacts numbers at the top level', async () => { + const stream = sink() + const instance = pino({ redact: ['id'] }, stream) + const obj = { + id: 7915 + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.id, '[Redacted]') +}) + +test('redacts booleans at the top level', async () => { + const stream = sink() + const instance = pino({ redact: ['maybe'] }, stream) + const obj = { + maybe: true + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.maybe, '[Redacted]') +}) + +test('redacts strings at the top level', async () => { + const stream = sink() + const instance = pino({ redact: ['s'] }, stream) + const obj = { + s: 's' + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.s, '[Redacted]') +}) + +test('does not redact primitives if not objects', async () => { + const stream = sink() + const instance = pino({ redact: ['a.b'] }, stream) + const obj = { + a: 42 + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.a, 42) +}) + +test('redacts null at the top level', async () => { + const stream = sink() + const instance = pino({ redact: ['n'] }, stream) + const obj = { + n: null + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.n, '[Redacted]') +}) + +test('supports bracket notation', async () => { + const stream = sink() + const instance = pino({ redact: ['a["b.b"]'] }, stream) + const obj = { + a: { 'b.b': 'c' } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.a['b.b'], '[Redacted]') +}) + +test('supports bracket notation with further nesting', async () => { + const stream = sink() + const instance = pino({ redact: ['a["b.b"].c'] }, stream) + const obj = { + a: { 'b.b': { c: 'd' } } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.a['b.b'].c, '[Redacted]') +}) + +test('supports bracket notation with empty string as path segment', async () => { + const stream = sink() + const instance = pino({ redact: ['a[""].c'] }, stream) + const obj = { + a: { '': { c: 'd' } } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o.a[''].c, '[Redacted]') +}) + +test('supports leading bracket notation (single quote)', async () => { + const stream = sink() + const instance = pino({ redact: ['[\'a.a\'].b'] }, stream) + const obj = { + 'a.a': { b: 'c' } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o['a.a'].b, '[Redacted]') +}) + +test('supports leading bracket notation (double quote)', async () => { + const stream = sink() + const instance = pino({ redact: ['["a.a"].b'] }, stream) + const obj = { + 'a.a': { b: 'c' } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o['a.a'].b, '[Redacted]') +}) + +test('supports leading bracket notation (backtick quote)', async () => { + const stream = sink() + const instance = pino({ redact: ['[`a.a`].b'] }, stream) + const obj = { + 'a.a': { b: 'c' } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o['a.a'].b, '[Redacted]') +}) + +test('supports leading bracket notation (single-segment path)', async () => { + const stream = sink() + const instance = pino({ redact: ['[`a.a`]'] }, stream) + const obj = { + 'a.a': { b: 'c' } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o['a.a'], '[Redacted]') +}) + +test('supports leading bracket notation (single-segment path, wildcard)', async () => { + const stream = sink() + const instance = pino({ redact: ['[*]'] }, stream) + const obj = { + 'a.a': { b: 'c' } + } + instance.info(obj) + const o = await once(stream, 'data') + assert.equal(o['a.a'], '[Redacted]') +}) + +test('child bindings are redacted using wildcard path', async () => { + const stream = sink() + const instance = pino({ redact: ['*.headers.cookie'] }, stream) + instance.child({ + req: { + method: 'GET', + url: '/', + headers: { + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + } + } + }).info('message completed') + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') +}) + +test('child bindings are redacted using wildcard and plain path keys', async () => { + const stream = sink() + const instance = pino({ redact: ['req.method', '*.headers.cookie'] }, stream) + instance.child({ + req: { + method: 'GET', + url: '/', + headers: { + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + } + } + }).info('message completed') + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, '[Redacted]') + assert.equal(req.method, '[Redacted]') +}) + +test('redacts boolean at the top level', async () => { + const stream = sink() + const instance = pino({ redact: ['msg'] }, stream) + const obj = { + s: 's' + } + instance.info(obj, true) + const o = await once(stream, 'data') + assert.equal(o.s, 's') + assert.equal(o.msg, '[Redacted]') +}) + +test('child can customize redact', async () => { + const stream = sink() + const instance = pino({ redact: ['req.method', '*.headers.cookie'] }, stream) + instance.child({ + req: { + method: 'GET', + url: '/', + headers: { + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + } + } + }, { + redact: ['req.url'] + }).info('message completed') + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;') + assert.equal(req.method, 'GET') + assert.equal(req.url, '[Redacted]') +}) + +test('child can remove parent redact by array', async () => { + const stream = sink() + const instance = pino({ redact: ['req.method', '*.headers.cookie'] }, stream) + instance.child({ + req: { + method: 'GET', + url: '/', + headers: { + cookie: 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;' + } + } + }, { + redact: [] + }).info('message completed') + const { req } = await once(stream, 'data') + assert.equal(req.headers.cookie, 'SESSID=298zf09hf012fh2; csrftoken=u32t4o3tb3gg43; _gat=1;') + assert.equal(req.method, 'GET') +}) + +test('redact safe stringify', async () => { + const stream = sink() + const instance = pino({ redact: { paths: ['that.secret'] } }, stream) + + instance.info({ + that: { + secret: 'please hide me', + myBigInt: 123n + }, + other: { + mySecondBigInt: 222n + } + }) + const { that, other } = await once(stream, 'data') + assert.equal(that.secret, '[Redacted]') + assert.equal(that.myBigInt, 123) + assert.equal(other.mySecondBigInt, 222) +}) + +test('censor function should not be called for non-existent nested paths (issue #2313)', async () => { + const stream = sink() + const censorCalls = [] + + const instance = pino({ + redact: { + paths: ['a.b.c', 'req.authorization', 'url'], + censor (value, path) { + censorCalls.push({ value, path: path.join('.') }) + if (typeof value !== 'string') { + return '***' + } + return '***' + } + } + }, stream) + + // Test case 1: parent exists but nested path doesn't + censorCalls.length = 0 + instance.info({ req: { id: 'test' } }, 'test message') + await once(stream, 'data') + assert.equal(censorCalls.length, 0, 'censor should not be called when req.authorization does not exist') + + // Test case 2: parent exists but deeply nested path doesn't + censorCalls.length = 0 + instance.info({ a: { d: 'test' } }, 'test message') + await once(stream, 'data') + assert.equal(censorCalls.length, 0, 'censor should not be called when a.b.c does not exist') + + // Test case 3: multiple parent keys exist but nested paths don't + censorCalls.length = 0 + instance.info({ a: { c: 'should-not-show-me' }, req: { id: 'test' } }, 'test message') + await once(stream, 'data') + assert.equal(censorCalls.length, 0, 'censor should not be called when neither a.b.c nor req.authorization exist') + + // Test case 4: verify censor IS called when path exists + censorCalls.length = 0 + instance.info({ req: { authorization: 'bearer token' } }, 'test message') + await once(stream, 'data') + assert.equal(censorCalls.length, 1, 'censor should be called when req.authorization exists') + assert.equal(censorCalls[0].path, 'req.authorization') + assert.equal(censorCalls[0].value, 'bearer token') +}) diff --git a/node_modules/pino/test/serializers.test.js b/node_modules/pino/test/serializers.test.js new file mode 100644 index 0000000..b840071 --- /dev/null +++ b/node_modules/pino/test/serializers.test.js @@ -0,0 +1,257 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const stdSerializers = require('pino-std-serializers') + +const { sink, once } = require('./helper') +const pino = require('../') + +const parentSerializers = { + test: () => 'parent' +} + +const childSerializers = { + test: () => 'child' +} + +test('default err namespace error serializer', async () => { + const stream = sink() + const parent = pino(stream) + + parent.info({ err: ReferenceError('test') }) + const o = await once(stream, 'data') + assert.equal(typeof o.err, 'object') + assert.equal(o.err.type, 'ReferenceError') + assert.equal(o.err.message, 'test') + assert.equal(typeof o.err.stack, 'string') +}) + +test('custom serializer overrides default err namespace error serializer', async () => { + const stream = sink() + const parent = pino({ + serializers: { + err: (e) => ({ + t: e.constructor.name, + m: e.message, + s: e.stack + }) + } + }, stream) + + parent.info({ err: ReferenceError('test') }) + const o = await once(stream, 'data') + assert.equal(typeof o.err, 'object') + assert.equal(o.err.t, 'ReferenceError') + assert.equal(o.err.m, 'test') + assert.equal(typeof o.err.s, 'string') +}) + +test('custom serializer overrides default err namespace error serializer when nestedKey is on', async () => { + const stream = sink() + const parent = pino({ + nestedKey: 'obj', + serializers: { + err: (e) => { + return { + t: e.constructor.name, + m: e.message, + s: e.stack + } + } + } + }, stream) + + parent.info({ err: ReferenceError('test') }) + const o = await once(stream, 'data') + assert.equal(typeof o.obj.err, 'object') + assert.equal(o.obj.err.t, 'ReferenceError') + assert.equal(o.obj.err.m, 'test') + assert.equal(typeof o.obj.err.s, 'string') +}) + +test('null overrides default err namespace error serializer', async () => { + const stream = sink() + const parent = pino({ serializers: { err: null } }, stream) + + parent.info({ err: ReferenceError('test') }) + const o = await once(stream, 'data') + assert.equal(typeof o.err, 'object') + assert.equal(typeof o.err.type, 'undefined') + assert.equal(typeof o.err.message, 'undefined') + assert.equal(typeof o.err.stack, 'undefined') +}) + +test('undefined overrides default err namespace error serializer', async () => { + const stream = sink() + const parent = pino({ serializers: { err: undefined } }, stream) + + parent.info({ err: ReferenceError('test') }) + const o = await once(stream, 'data') + assert.equal(typeof o.err, 'object') + assert.equal(typeof o.err.type, 'undefined') + assert.equal(typeof o.err.message, 'undefined') + assert.equal(typeof o.err.stack, 'undefined') +}) + +test('serializers override values', async () => { + const stream = sink() + const parent = pino({ serializers: parentSerializers }, stream) + parent.child({}, { serializers: childSerializers }) + + parent.fatal({ test: 'test' }) + const o = await once(stream, 'data') + assert.equal(o.test, 'parent') +}) + +test('child does not overwrite parent serializers', async () => { + const stream = sink() + const parent = pino({ serializers: parentSerializers }, stream) + const child = parent.child({}, { serializers: childSerializers }) + + parent.fatal({ test: 'test' }) + + const o = once(stream, 'data') + assert.equal((await o).test, 'parent') + const o2 = once(stream, 'data') + child.fatal({ test: 'test' }) + assert.equal((await o2).test, 'child') +}) + +test('Symbol.for(\'pino.serializers\')', async () => { + const stream = sink() + const expected = Object.assign({ + err: stdSerializers.err + }, parentSerializers) + const parent = pino({ serializers: parentSerializers }, stream) + const child = parent.child({ a: 'property' }) + + assert.deepEqual(parent[Symbol.for('pino.serializers')], expected) + assert.deepEqual(child[Symbol.for('pino.serializers')], expected) + assert.equal(parent[Symbol.for('pino.serializers')], child[Symbol.for('pino.serializers')]) + + const child2 = parent.child({}, { + serializers: { + a + } + }) + + function a () { + return 'hello' + } + + // eslint-disable-next-line eqeqeq + assert.equal(child2[Symbol.for('pino.serializers')] != parentSerializers, true) + assert.equal(child2[Symbol.for('pino.serializers')].a, a) + assert.equal(child2[Symbol.for('pino.serializers')].test, parentSerializers.test) +}) + +test('children inherit parent serializers', async () => { + const stream = sink() + const parent = pino({ serializers: parentSerializers }, stream) + + const child = parent.child({ a: 'property' }) + child.fatal({ test: 'test' }) + const o = await once(stream, 'data') + assert.equal(o.test, 'parent') +}) + +test('children inherit parent Symbol serializers', async () => { + const stream = sink() + const symbolSerializers = { + [Symbol.for('b')]: b + } + const expected = Object.assign({ + err: stdSerializers.err + }, symbolSerializers) + const parent = pino({ serializers: symbolSerializers }, stream) + + assert.deepEqual(parent[Symbol.for('pino.serializers')], expected) + + const child = parent.child({}, { + serializers: { + [Symbol.for('a')]: a, + a + } + }) + + function a () { + return 'hello' + } + + function b () { + return 'world' + } + + assert.deepEqual(child[Symbol.for('pino.serializers')].a, a) + assert.deepEqual(child[Symbol.for('pino.serializers')][Symbol.for('b')], b) + assert.deepEqual(child[Symbol.for('pino.serializers')][Symbol.for('a')], a) +}) + +test('children serializers get called', async () => { + const stream = sink() + const parent = pino({ + test: 'this' + }, stream) + + const child = parent.child({ a: 'property' }, { serializers: childSerializers }) + + child.fatal({ test: 'test' }) + const o = await once(stream, 'data') + assert.equal(o.test, 'child') +}) + +test('children serializers get called when inherited from parent', async () => { + const stream = sink() + const parent = pino({ + test: 'this', + serializers: parentSerializers + }, stream) + + const child = parent.child({}, { serializers: { test: function () { return 'pass' } } }) + + child.fatal({ test: 'fail' }) + const o = await once(stream, 'data') + assert.equal(o.test, 'pass') +}) + +test('non-overridden serializers are available in the children', async () => { + const stream = sink() + const pSerializers = { + onlyParent: function () { return 'parent' }, + shared: function () { return 'parent' } + } + + const cSerializers = { + shared: function () { return 'child' }, + onlyChild: function () { return 'child' } + } + + const parent = pino({ serializers: pSerializers }, stream) + + const child = parent.child({}, { serializers: cSerializers }) + + const o = once(stream, 'data') + child.fatal({ shared: 'test' }) + assert.equal((await o).shared, 'child') + const o2 = once(stream, 'data') + child.fatal({ onlyParent: 'test' }) + assert.equal((await o2).onlyParent, 'parent') + const o3 = once(stream, 'data') + child.fatal({ onlyChild: 'test' }) + assert.equal((await o3).onlyChild, 'child') + const o4 = once(stream, 'data') + parent.fatal({ onlyChild: 'test' }) + assert.equal((await o4).onlyChild, 'test') +}) + +test('custom serializer for messageKey', async () => { + const stream = sink() + const instance = pino({ serializers: { msg: () => '422' } }, stream) + + const o = { num: NaN } + instance.info(o, 42) + + const { msg } = await once(stream, 'data') + assert.equal(msg, '422') +}) diff --git a/node_modules/pino/test/stdout-protection.test.js b/node_modules/pino/test/stdout-protection.test.js new file mode 100644 index 0000000..d557e5d --- /dev/null +++ b/node_modules/pino/test/stdout-protection.test.js @@ -0,0 +1,41 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { join } = require('node:path') +const { fork } = require('node:child_process') +const writer = require('flush-write-stream') + +const { once } = require('./helper') +const pino = require('..') + +test('do not use SonicBoom is someone tampered with process.stdout.write', async () => { + let actual = '' + const child = fork(join(__dirname, 'fixtures', 'stdout-hack-protection.js'), { silent: true }) + + child.stdout.pipe(writer((s, enc, cb) => { + actual += s + cb() + })) + await once(child, 'close') + assert.equal(actual.match(/^hack/) != null, true) +}) + +test('do not use SonicBoom is someone has passed process.stdout to pino', async () => { + const logger = pino(process.stdout) + assert.equal(logger[pino.symbols.streamSym], process.stdout) +}) + +test('do not crash if process.stdout has no fd', async (t) => { + const fd = process.stdout.fd + delete process.stdout.fd + t.after(function () { process.stdout.fd = fd }) + pino() +}) + +test('use fd=1 if process.stdout has no fd in pino.destination() (worker case)', async (t) => { + const fd = process.stdout.fd + delete process.stdout.fd + t.after(function () { process.stdout.fd = fd }) + pino.destination() +}) diff --git a/node_modules/pino/test/syncfalse.test.js b/node_modules/pino/test/syncfalse.test.js new file mode 100644 index 0000000..d39c1dc --- /dev/null +++ b/node_modules/pino/test/syncfalse.test.js @@ -0,0 +1,186 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const { promises: { readFile }, createWriteStream } = require('node:fs') +const { join } = require('node:path') +const { fork } = require('node:child_process') +const writer = require('flush-write-stream') +const { + once, + getPathToNull, + file, + watchFileCreated +} = require('./helper') +const { promisify } = require('node:util') +const tspl = require('@matteo.collina/tspl') + +const sleep = promisify(setTimeout) + +test('asynchronous logging', async (t) => { + const now = Date.now + const hostname = os.hostname + const proc = process + global.process = { + __proto__: process, + pid: 123456 + } + Date.now = () => 1459875739796 + os.hostname = () => 'abcdefghijklmnopqr' + delete require.cache[require.resolve('../')] + const pino = require('../') + let expected = '' + let actual = '' + const normal = pino(writer((s, enc, cb) => { + expected += s + cb() + })) + + const dest = createWriteStream(getPathToNull()) + dest.write = (s) => { + actual += s + } + const asyncLogger = pino(dest) + + let i = 44 + while (i--) { + normal.info('h') + asyncLogger.info('h') + } + + const expected2 = expected.split('\n')[0] + let actual2 = '' + + const child = fork(join(__dirname, '/fixtures/syncfalse.js'), { silent: true }) + child.stdout.pipe(writer((s, enc, cb) => { + actual2 += s + cb() + })) + await once(child, 'close') + // Wait for the last write to be flushed + await sleep(100) + assert.equal(actual, expected) + assert.equal(actual2.trim(), expected2) + + t.after(() => { + os.hostname = hostname + Date.now = now + global.process = proc + }) +}) + +test('sync false with child', async (t) => { + const now = Date.now + const hostname = os.hostname + const proc = process + global.process = { + __proto__: process, + pid: 123456 + } + Date.now = function () { + return 1459875739796 + } + os.hostname = function () { + return 'abcdefghijklmnopqr' + } + delete require.cache[require.resolve('../')] + const pino = require('../') + let expected = '' + let actual = '' + const normal = pino(writer((s, enc, cb) => { + expected += s + cb() + })).child({ hello: 'world' }) + + const dest = createWriteStream(getPathToNull()) + dest.write = function (s) { + actual += s + } + const asyncLogger = pino(dest).child({ hello: 'world' }) + + let i = 500 + while (i--) { + normal.info('h') + asyncLogger.info('h') + } + + asyncLogger.flush() + + const expected2 = expected.split('\n')[0] + let actual2 = '' + + const child = fork(join(__dirname, '/fixtures/syncfalse-child.js'), { silent: true }) + child.stdout.pipe(writer((s, enc, cb) => { + actual2 += s + cb() + })) + await once(child, 'close') + assert.equal(actual, expected) + assert.equal(actual2.trim(), expected2) + + t.after(() => { + os.hostname = hostname + Date.now = now + global.process = proc + }) +}) + +test('flush does nothing with sync true (default)', async () => { + const instance = require('..')() + assert.equal(instance.flush(), undefined) +}) + +test('should still call flush callback even when does nothing with sync true (default)', async (t) => { + const plan = tspl(t, { plan: 3 }) + const instance = require('..')() + instance.flush((...args) => { + plan.ok('flush called') + plan.deepEqual(args, []) + + // next tick to make flush not called more than once + process.nextTick(() => { + plan.ok('flush next tick called') + }) + }) + + await plan +}) + +test('should call the flush callback when flushed the data for async logger', async () => { + const outputPath = file() + async function getOutputLogLines () { + return (await readFile(outputPath)).toString().trim().split('\n').map(JSON.parse) + } + + const pino = require('../') + + const instance = pino({}, pino.destination({ + dest: outputPath, + + // to make sure it does not flush on its own + minLength: 4096 + })) + const flushPromise = promisify(instance.flush).bind(instance) + + instance.info('hello') + await flushPromise() + await watchFileCreated(outputPath) + + const [firstFlushData] = await getOutputLogLines() + + assert.equal(firstFlushData.msg, 'hello') + + // should not flush this as no data accumulated that's bigger than min length + instance.info('world') + + // Making sure data is not flushed yet + const afterLogData = await getOutputLogLines() + assert.equal(afterLogData.length, 1) + + await flushPromise() + + // Making sure data is not flushed yet + const afterSecondFlush = (await getOutputLogLines())[1] + assert.equal(afterSecondFlush.msg, 'world') +}) diff --git a/node_modules/pino/test/timestamp-nano.test.js b/node_modules/pino/test/timestamp-nano.test.js new file mode 100644 index 0000000..ce6ecec --- /dev/null +++ b/node_modules/pino/test/timestamp-nano.test.js @@ -0,0 +1,37 @@ +'use strict' + +/* eslint no-prototype-builtins: 0 */ + +const test = require('node:test') +const assert = require('node:assert') + +const { sink, once } = require('./helper') + +test('pino.stdTimeFunctions.isoTimeNano returns RFC 3339 timestamps', async () => { + // Mock Date.now at module initialization time + const now = Date.now + Date.now = () => new Date('2025-08-01T15:03:45.000000000Z').getTime() + + // Mock process.hrtime.bigint at module initialization time + const hrTimeBigint = process.hrtime.bigint + process.hrtime.bigint = () => 100000000000000n + + const pino = require('../') + + const opts = { + timestamp: pino.stdTimeFunctions.isoTimeNano + } + const stream = sink() + + // Mock process.hrtime.bigint at invocation time, add 1 day to the timestamp + process.hrtime.bigint = () => 100000000000000n + 86400012345678n + + const instance = pino(opts, stream) + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), true) + assert.equal(result.time, '2025-08-02T15:03:45.012345678Z') + + Date.now = now + process.hrtime.bigint = hrTimeBigint +}) diff --git a/node_modules/pino/test/timestamp.test.js b/node_modules/pino/test/timestamp.test.js new file mode 100644 index 0000000..f7d4a01 --- /dev/null +++ b/node_modules/pino/test/timestamp.test.js @@ -0,0 +1,124 @@ +'use strict' + +/* eslint no-prototype-builtins: 0 */ + +const test = require('node:test') +const assert = require('node:assert') + +const { sink, once } = require('./helper') +const pino = require('../') + +test('pino exposes standard time functions', async () => { + assert.ok(pino.stdTimeFunctions) + assert.ok(pino.stdTimeFunctions.epochTime) + assert.ok(pino.stdTimeFunctions.unixTime) + assert.ok(pino.stdTimeFunctions.nullTime) + assert.ok(pino.stdTimeFunctions.isoTime) + assert.ok(pino.stdTimeFunctions.isoTimeNano) +}) + +test('pino accepts external time functions', async () => { + const opts = { + timestamp: () => ',"time":"none"' + } + const stream = sink() + const instance = pino(opts, stream) + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), true) + assert.equal(result.time, 'none') +}) + +test('pino accepts external time functions with custom label', async () => { + const opts = { + timestamp: () => ',"custom-time-label":"none"' + } + const stream = sink() + const instance = pino(opts, stream) + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('custom-time-label'), true) + assert.equal(result['custom-time-label'], 'none') +}) + +test('inserts timestamp by default', async ({ ok, equal }) => { + const stream = sink() + const instance = pino(stream) + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), true) + assert.ok(new Date(result.time) <= new Date(), 'time is greater than timestamp') + assert.equal(result.msg, 'foobar') +}) + +test('omits timestamp when timestamp option is false', async () => { + const stream = sink() + const instance = pino({ timestamp: false }, stream) + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), false) + assert.equal(result.msg, 'foobar') +}) + +test('inserts timestamp when timestamp option is true', async ({ ok, equal }) => { + const stream = sink() + const instance = pino({ timestamp: true }, stream) + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), true) + assert.ok(new Date(result.time) <= new Date(), 'time is greater than timestamp') + assert.equal(result.msg, 'foobar') +}) + +test('child inserts timestamp by default', async ({ ok, equal }) => { + const stream = sink() + const logger = pino(stream) + const instance = logger.child({ component: 'child' }) + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), true) + assert.ok(new Date(result.time) <= new Date(), 'time is greater than timestamp') + assert.equal(result.msg, 'foobar') +}) + +test('child omits timestamp with option', async () => { + const stream = sink() + const logger = pino({ timestamp: false }, stream) + const instance = logger.child({ component: 'child' }) + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), false) + assert.equal(result.msg, 'foobar') +}) + +test('pino.stdTimeFunctions.unixTime returns seconds based timestamps', async () => { + const opts = { + timestamp: pino.stdTimeFunctions.unixTime + } + const stream = sink() + const instance = pino(opts, stream) + const now = Date.now + Date.now = () => 1531069919686 + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), true) + assert.equal(result.time, 1531069920) + Date.now = now +}) + +test('pino.stdTimeFunctions.isoTime returns ISO 8601 timestamps', async () => { + const opts = { + timestamp: pino.stdTimeFunctions.isoTime + } + const stream = sink() + const instance = pino(opts, stream) + const ms = 1531069919686 + const now = Date.now + Date.now = () => ms + const iso = new Date(ms).toISOString() + instance.info('foobar') + const result = await once(stream, 'data') + assert.equal(result.hasOwnProperty('time'), true) + assert.equal(result.time, iso) + Date.now = now +}) diff --git a/node_modules/pino/test/transport-stream.test.js b/node_modules/pino/test/transport-stream.test.js new file mode 100644 index 0000000..865d9b8 --- /dev/null +++ b/node_modules/pino/test/transport-stream.test.js @@ -0,0 +1,40 @@ +'use strict' + +const test = require('node:test') +const proxyquire = require('proxyquire') +const tspl = require('@matteo.collina/tspl') + +test('should import', async (t) => { + const plan = tspl(t, { plan: 2 }) + const mockRealRequire = (target) => { + return { + default: { + default: () => { + plan.equal(target, 'pino-pretty') + return Promise.resolve() + } + } + } + } + const mockRealImport = async () => { + await Promise.resolve() + throw Object.assign(new Error(), { code: 'ERR_MODULE_NOT_FOUND' }) + } + + const loadTransportStreamBuilder = proxyquire( + '../lib/transport-stream.js', + { + 'real-require': { + realRequire: mockRealRequire, + realImport: mockRealImport + } + } + ) + + const fn = await loadTransportStreamBuilder('pino-pretty') + + await fn() + plan.ok('returned promise resolved') + + await plan +}) diff --git a/node_modules/pino/test/transport/big.test.js b/node_modules/pino/test/transport/big.test.js new file mode 100644 index 0000000..fef70b7 --- /dev/null +++ b/node_modules/pino/test/transport/big.test.js @@ -0,0 +1,42 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { join } = require('node:path') +const { createReadStream } = require('node:fs') +const { promisify } = require('node:util') +const stream = require('node:stream') +const execa = require('execa') +const split = require('split2') + +const { file } = require('../helper') + +const pipeline = promisify(stream.pipeline) +const { Writable } = stream +const sleep = promisify(setTimeout) + +const skip = process.env.CI || process.env.CITGM + +test('eight million lines', { skip }, async () => { + const destination = file() + await execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-many-lines.js'), destination]) + + if (process.platform !== 'win32') { + try { + await execa('sync') // Wait for the file to be written to disk + } catch { + // Just a fallback, this should be unreachable + } + } + await sleep(1_000) // It seems that sync is not enough (even in POSIX systems) + + const toWrite = 8 * 1_000_000 + let count = 0 + await pipeline(createReadStream(destination), split(), new Writable({ + write (chunk, enc, cb) { + count++ + cb() + } + })) + assert.equal(count, toWrite) +}) diff --git a/node_modules/pino/test/transport/bundlers-support.test.js b/node_modules/pino/test/transport/bundlers-support.test.js new file mode 100644 index 0000000..45ddfd5 --- /dev/null +++ b/node_modules/pino/test/transport/bundlers-support.test.js @@ -0,0 +1,99 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const { join } = require('node:path') +const { readFile } = require('node:fs').promises + +const { watchFileCreated, file } = require('../helper') +const pino = require('../../pino') + +const { pid } = process +const hostname = os.hostname() + +test('pino.transport with destination overridden by bundler', async (t) => { + globalThis.__bundlerPathsOverrides = { + foobar: join(__dirname, '..', 'fixtures', 'to-file-transport.js') + } + + const destination = file() + const transport = pino.transport({ + target: 'foobar', + options: { destination } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + + globalThis.__bundlerPathsOverrides = undefined +}) + +test('pino.transport with worker destination overridden by bundler', async (t) => { + globalThis.__bundlerPathsOverrides = { + 'pino-worker': join(__dirname, '..', '..', 'lib/worker.js') + } + + const destination = file() + const transport = pino.transport({ + targets: [ + { + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination } + } + ] + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + + globalThis.__bundlerPathsOverrides = undefined +}) + +test('pino.transport with worker destination overridden by bundler and mjs transport', async (t) => { + globalThis.__bundlerPathsOverrides = { + 'pino-worker': join(__dirname, '..', '..', 'lib/worker.js') + } + + const destination = file() + const transport = pino.transport({ + targets: [ + { + target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.es2017.cjs'), + options: { destination } + } + ] + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + + globalThis.__bundlerPathsOverrides = undefined +}) diff --git a/node_modules/pino/test/transport/caller.test.js b/node_modules/pino/test/transport/caller.test.js new file mode 100644 index 0000000..b5f22c0 --- /dev/null +++ b/node_modules/pino/test/transport/caller.test.js @@ -0,0 +1,24 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { join } = require('node:path') +const execa = require('execa') + +test('when using a custom transport outside node_modules, the first file outside node_modules should be used', async function () { + const evalApp = join(__dirname, '../', '/fixtures/eval/index.js') + const { stdout } = await execa(process.argv[0], [evalApp]) + assert.match(stdout, /done!/) +}) + +test('when using a custom transport where some files in stacktrace are in the node_modules, the first file outside node_modules should be used', async function () { + const evalApp = join(__dirname, '../', '/fixtures/eval/node_modules/2-files.js') + const { stdout } = await execa(process.argv[0], [evalApp]) + assert.match(stdout, /done!/) +}) + +test('when using a custom transport where all files in stacktrace are in the node_modules, the first file inside node_modules should be used', async function () { + const evalApp = join(__dirname, '../', '/fixtures/eval/node_modules/14-files.js') + const { stdout } = await execa(process.argv[0], [evalApp]) + assert.match(stdout, /done!/) +}) diff --git a/node_modules/pino/test/transport/core.test.js b/node_modules/pino/test/transport/core.test.js new file mode 100644 index 0000000..c3bf470 --- /dev/null +++ b/node_modules/pino/test/transport/core.test.js @@ -0,0 +1,654 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const { join } = require('node:path') +const { once } = require('node:events') +const { setImmediate: immediate } = require('node:timers/promises') +const { readFile, writeFile } = require('node:fs').promises +const url = require('url') +const strip = require('strip-ansi') +const execa = require('execa') +const writer = require('flush-write-stream') +const rimraf = require('rimraf') +const tspl = require('@matteo.collina/tspl') + +const { match, watchFileCreated, watchForWrite, file } = require('../helper') +const pino = require('../../') + +const { tmpdir } = os +const pid = process.pid +const hostname = os.hostname() + +test('pino.transport with file', async (t) => { + const destination = file() + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport with file (no options + error handling)', async () => { + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js') + }) + const [err] = await once(transport, 'error') + assert.equal(err.message, 'kaboom') +}) + +test('pino.transport with file URL', async (t) => { + const destination = file() + const transport = pino.transport({ + target: url.pathToFileURL(join(__dirname, '..', 'fixtures', 'to-file-transport.js')).href, + options: { destination } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport errors if file does not exists', (t, end) => { + const instance = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'non-existent-file'), + worker: { + stdin: true, + stdout: true, + stderr: true + } + }) + instance.on('error', function () { + assert.ok('error received') + end() + }) +}) + +test('pino.transport errors if transport worker module does not export a function', async (t) => { + // TODO: add case for non-pipelined single target (needs changes in thread-stream) + const plan = tspl(t, { plan: 2 }) + const manyTargetsInstance = pino.transport({ + targets: [{ + level: 'info', + target: join(__dirname, '..', 'fixtures', 'transport-wrong-export-type.js') + }, { + level: 'info', + target: join(__dirname, '..', 'fixtures', 'transport-wrong-export-type.js') + }] + }) + manyTargetsInstance.on('error', function (e) { + plan.equal(e.message, 'exported worker is not a function') + }) + + const pipelinedInstance = pino.transport({ + pipeline: [{ + target: join(__dirname, '..', 'fixtures', 'transport-wrong-export-type.js') + }] + }) + pipelinedInstance.on('error', function (e) { + plan.equal(e.message, 'exported worker is not a function') + }) + + await plan +}) + +test('pino.transport with esm', async (t) => { + const destination = file() + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'to-file-transport.mjs'), + options: { destination } + }) + const instance = pino(transport) + t.after(transport.end.bind(transport)) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport with two files', async (t) => { + const dest1 = file() + const dest2 = file() + const transport = pino.transport({ + targets: [{ + level: 'info', + target: 'file://' + join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: dest1 } + }, { + level: 'info', + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: dest2 } + }] + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)]) + const result1 = JSON.parse(await readFile(dest1)) + delete result1.time + assert.deepEqual(result1, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + const result2 = JSON.parse(await readFile(dest2)) + delete result2.time + assert.deepEqual(result2, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport with two files and custom levels', async (t) => { + const dest1 = file() + const dest2 = file() + const transport = pino.transport({ + targets: [{ + level: 'info', + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: dest1 } + }, { + level: 'foo', + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: dest2 } + }], + levels: { trace: 10, debug: 20, info: 30, warn: 40, error: 50, fatal: 60, foo: 25 } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)]) + const result1 = JSON.parse(await readFile(dest1)) + delete result1.time + assert.deepEqual(result1, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + const result2 = JSON.parse(await readFile(dest2)) + delete result2.time + assert.deepEqual(result2, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport without specifying default levels', async (t) => { + const dest = file() + const transport = pino.transport({ + targets: [{ + level: 'foo', + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: dest } + }], + levels: { foo: 25 } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await Promise.all([watchFileCreated(dest)]) + const result1 = JSON.parse(await readFile(dest)) + delete result1.time + assert.deepEqual(result1, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport with two files and dedupe', async (t) => { + const dest1 = file() + const dest2 = file() + const transport = pino.transport({ + dedupe: true, + targets: [{ + level: 'info', + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: dest1 } + }, { + level: 'error', + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: dest2 } + }] + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + instance.error('world') + await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)]) + const result1 = JSON.parse(await readFile(dest1)) + delete result1.time + assert.deepEqual(result1, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + const result2 = JSON.parse(await readFile(dest2)) + delete result2.time + assert.deepEqual(result2, { + pid, + hostname, + level: 50, + msg: 'world' + }) +}) + +test('pino.transport with an array including a pino-pretty destination', async (t) => { + const dest1 = file() + const dest2 = file() + const transport = pino.transport({ + targets: [{ + level: 'info', + target: 'pino/file', + options: { + destination: dest1 + } + }, { + level: 'info', + target: 'pino-pretty', + options: { + destination: dest2 + } + }] + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)]) + const result1 = JSON.parse(await readFile(dest1)) + delete result1.time + assert.deepEqual(result1, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + const actual = (await readFile(dest2)).toString() + assert.match(strip(actual), /\[.*\] INFO.*hello/) +}) + +test('no transport.end()', async (t) => { + const destination = file() + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination } + }) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('autoEnd = false', async (t) => { + const destination = file() + const count = process.listenerCount('exit') + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination }, + worker: { autoEnd: false } + }) + t.after(transport.end.bind(transport)) + await once(transport, 'ready') + + const instance = pino(transport) + instance.info('hello') + + await watchFileCreated(destination) + + assert.equal(count, process.listenerCount('exit')) + + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport with target and targets', async () => { + assert.throws( + () => { + pino.transport({ + target: '/a/file', + targets: [{ + target: '/a/file' + }] + }) + }, + /only one of target or targets can be specified/ + ) +}) + +test('pino.transport with target pino/file', async (t) => { + const destination = file() + const transport = pino.transport({ + target: 'pino/file', + options: { destination } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport with target pino/file and mkdir option', async (t) => { + const folder = join(tmpdir(), `pino-${process.pid}-mkdir-transport-file`) + const destination = join(folder, 'log.txt') + t.after(() => { + try { + rimraf.sync(folder) + } catch (err) { + // ignore + } + }) + const transport = pino.transport({ + target: 'pino/file', + options: { destination, mkdir: true } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('pino.transport with target pino/file and append option', async (t) => { + const destination = file() + await writeFile(destination, JSON.stringify({ pid, hostname, time: Date.now(), level: 30, msg: 'hello' })) + const transport = pino.transport({ + target: 'pino/file', + options: { destination, append: false } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('goodbye') + await watchForWrite(destination, '"goodbye"') + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'goodbye' + }) +}) + +test('pino.transport should error with unknown target', async () => { + assert.throws( + () => { + pino.transport({ + target: 'origin', + caller: 'unknown-file.js' + }) + }, + /unable to determine transport target for "origin"/ + ) +}) + +test('pino.transport with target pino-pretty', async (t) => { + const destination = file() + const transport = pino.transport({ + target: 'pino-pretty', + options: { destination } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const actual = await readFile(destination, 'utf8') + assert.match(strip(actual), /\[.*\] INFO.*hello/) +}) + +test('sets worker data informing the transport that pino will send its config', async (t) => { + const plan = tspl(t, { plan: 1 }) + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'transport-worker-data.js') + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + transport.once('workerData', (workerData) => { + match(workerData.workerData, { pinoWillSendConfig: true }) + plan.ok('passed') + }) + instance.info('hello') + + await plan +}) + +test('sets worker data informing the transport that pino will send its config (frozen file)', async (t) => { + const plan = tspl(t, { plan: 1 }) + const config = { + transport: { + target: join(__dirname, '..', 'fixtures', 'transport-worker-data.js'), + options: {} + } + } + Object.freeze(config) + Object.freeze(config.transport) + Object.freeze(config.transport.options) + const instance = pino(config) + const transport = instance[pino.symbols.streamSym] + t.after(transport.end.bind(transport)) + transport.once('workerData', (workerData) => { + match(workerData.workerData, { pinoWillSendConfig: true }) + plan.ok('passed') + }) + instance.info('hello') + + await plan +}) + +test('stdout in worker', async () => { + let actual = '' + const child = execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-main.js')]) + + for await (const chunk of child.stdout) { + actual += chunk + } + assert.equal(strip(actual).match(/Hello/) != null, true) +}) + +test('log and exit on ready', async () => { + let actual = '' + const child = execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-exit-on-ready.js')]) + + child.stdout.pipe(writer((s, enc, cb) => { + actual += s + cb() + })) + await once(child, 'close') + await immediate() + assert.equal(strip(actual).match(/Hello/) != null, true) +}) + +test('log and exit before ready', async () => { + let actual = '' + const child = execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-exit-immediately.js')]) + + child.stdout.pipe(writer((s, enc, cb) => { + actual += s + cb() + })) + await once(child, 'close') + await immediate() + assert.equal(strip(actual).match(/Hello/) != null, true) +}) + +test('log and exit before ready with async dest', async () => { + const destination = file() + const child = execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-exit-immediately-with-async-dest.js'), destination]) + + await once(child, 'exit') + + const actual = await readFile(destination, 'utf8') + assert.equal(strip(actual).match(/HELLO/) != null, true) + assert.equal(strip(actual).match(/WORLD/) != null, true) +}) + +test('string integer destination', async () => { + let actual = '' + const child = execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-string-stdout.js')]) + + child.stdout.pipe(writer((s, enc, cb) => { + actual += s + cb() + })) + await once(child, 'close') + await immediate() + assert.equal(strip(actual).match(/Hello/) != null, true) +}) + +test('pino transport options with target', async (t) => { + const destination = file() + const instance = pino({ + transport: { + target: 'pino/file', + options: { destination } + } + }) + const transportStream = instance[pino.symbols.streamSym] + t.after(transportStream.end.bind(transportStream)) + instance.info('transport option test') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'transport option test' + }) +}) + +test('pino transport options with targets', async (t) => { + const dest1 = file() + const dest2 = file() + const instance = pino({ + transport: { + targets: [ + { target: 'pino/file', options: { destination: dest1 } }, + { target: 'pino/file', options: { destination: dest2 } } + ] + } + }) + const transportStream = instance[pino.symbols.streamSym] + t.after(transportStream.end.bind(transportStream)) + instance.info('transport option test') + + await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)]) + const result1 = JSON.parse(await readFile(dest1)) + delete result1.time + assert.deepEqual(result1, { + pid, + hostname, + level: 30, + msg: 'transport option test' + }) + const result2 = JSON.parse(await readFile(dest2)) + delete result2.time + assert.deepEqual(result2, { + pid, + hostname, + level: 30, + msg: 'transport option test' + }) +}) + +test('transport options with target and targets', async () => { + assert.throws( + () => { + pino({ + transport: { + target: {}, + targets: {} + } + }) + }, + /only one of target or targets can be specified/ + ) +}) + +test('transport options with target and stream', async () => { + assert.throws( + () => { + pino({ + transport: { + target: {} + } + }, '/log/null') + }, + /only one of option.transport or stream can be specified/ + ) +}) + +test('transport options with stream', async (t) => { + const dest1 = file() + const transportStream = pino.transport({ target: 'pino/file', options: { destination: dest1 } }) + t.after(transportStream.end.bind(transportStream)) + assert.throws( + () => { + pino({ + transport: transportStream + }) + }, + Error('option.transport do not allow stream, please pass to option directly. e.g. pino(transport)') + ) +}) diff --git a/node_modules/pino/test/transport/core.transpiled.test.ts b/node_modules/pino/test/transport/core.transpiled.test.ts new file mode 100644 index 0000000..d5ea07f --- /dev/null +++ b/node_modules/pino/test/transport/core.transpiled.test.ts @@ -0,0 +1,114 @@ +import test from 'node:test' +import assert from 'node:assert' +import * as os from 'node:os' +import { join } from 'node:path' +import fs from 'node:fs' +import * as url from 'node:url' + +import { watchFileCreated } from '../helper' +import pino from '../../' + +const readFile = fs.promises.readFile + +const { pid } = process +const hostname = os.hostname() + +// A subset of the test from core.test.js, we don't need all of them to check for compatibility +function runTests(esVersion: string): void { + test(`(ts -> ${esVersion}) pino.transport with file`, async (t) => { + const destination = join( + os.tmpdir(), + '_' + Math.random().toString(36).substr(2, 9) + ) + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`), + options: { destination } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination, { encoding: 'utf8' })) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + }) + + test(`(ts -> ${esVersion}) pino.transport with file URL`, async (t) => { + const destination = join( + os.tmpdir(), + '_' + Math.random().toString(36).substr(2, 9) + ) + const transport = pino.transport({ + target: url.pathToFileURL(join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`)).href, + options: { destination } + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination, { encoding: 'utf8' })) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + }) + + test(`(ts -> ${esVersion}) pino.transport with two files`, async (t) => { + const dest1 = join( + os.tmpdir(), + '_' + Math.random().toString(36).substr(2, 9) + ) + const dest2 = join( + os.tmpdir(), + '_' + Math.random().toString(36).substr(2, 9) + ) + const transport = pino.transport({ + targets: [{ + level: 'info', + target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`), + options: { destination: dest1 } + }, { + level: 'info', + target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`), + options: { destination: dest2 } + }] + }) + + t.after(transport.end.bind(transport)) + + const instance = pino(transport) + instance.info('hello') + + await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)]) + + const result1 = JSON.parse(await readFile(dest1, { encoding: 'utf8' })) + delete result1.time + assert.deepEqual(result1, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + const result2 = JSON.parse(await readFile(dest2, { encoding: 'utf8' })) + delete result2.time + assert.deepEqual(result2, { + pid, + hostname, + level: 30, + msg: 'hello' + }) + }) +} + +runTests('es5') +runTests('es6') +runTests('es2017') +runTests('esnext') diff --git a/node_modules/pino/test/transport/crash.test.js b/node_modules/pino/test/transport/crash.test.js new file mode 100644 index 0000000..10cac01 --- /dev/null +++ b/node_modules/pino/test/transport/crash.test.js @@ -0,0 +1,36 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { join } = require('node:path') +const { once } = require('node:events') +const { setImmediate: immediate } = require('node:timers/promises') + +const pino = require('../../') + +test('pino.transport emits error if the worker exits with 0 unexpectably', async (t) => { + // This test will take 10s, because flushSync waits for 10s + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'crashing-transport.js'), + sync: true + }) + t.after(transport.end.bind(transport)) + + await once(transport, 'ready') + + let maybeError + transport.on('error', (err) => { + maybeError = err + }) + + const logger = pino(transport) + for (let i = 0; i < 100000; i++) { + logger.info('hello') + } + + await once(transport.worker, 'exit') + + await immediate() + + assert.equal(maybeError.message, 'the worker has exited') +}) diff --git a/node_modules/pino/test/transport/module-link.test.js b/node_modules/pino/test/transport/module-link.test.js new file mode 100644 index 0000000..32023f2 --- /dev/null +++ b/node_modules/pino/test/transport/module-link.test.js @@ -0,0 +1,241 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const { join } = require('node:path') +const { readFile, symlink, unlink, mkdir, writeFile } = require('node:fs').promises +const { once } = require('node:events') +const execa = require('execa') +const rimraf = require('rimraf') + +const { isWin, isYarnPnp, watchFileCreated, file } = require('../helper') +const pino = require('../../') + +const { pid } = process +const hostname = os.hostname() + +async function installTransportModule (target) { + if (isYarnPnp) { + return + } + try { + await uninstallTransportModule() + } catch {} + + if (!target) { + target = join(__dirname, '..', '..') + } + + await symlink( + join(__dirname, '..', 'fixtures', 'transport'), + join(target, 'node_modules', 'transport') + ) +} + +async function uninstallTransportModule () { + if (isYarnPnp) { + return + } + await unlink(join(__dirname, '..', '..', 'node_modules', 'transport')) +} + +// TODO make this test pass on Windows +test('pino.transport with package', { skip: isWin }, async (t) => { + const destination = file() + + await installTransportModule() + + const transport = pino.transport({ + target: 'transport', + options: { destination } + }) + + t.after(async () => { + await uninstallTransportModule() + transport.end() + }) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +// TODO make this test pass on Windows +test('pino.transport with package as a target', { skip: isWin }, async (t) => { + const destination = file() + + await installTransportModule() + + const transport = pino.transport({ + targets: [{ + target: 'transport', + options: { destination } + }] + }) + t.after(async () => { + await uninstallTransportModule() + transport.end() + }) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +// TODO make this test pass on Windows +test('pino({ transport })', { skip: isWin || isYarnPnp }, async (t) => { + const folder = join( + os.tmpdir(), + '_' + Math.random().toString(36).substr(2, 9) + ) + + t.after(() => { + rimraf.sync(folder) + }) + + const destination = join(folder, 'output') + + await mkdir(join(folder, 'node_modules'), { recursive: true }) + + // Link pino + await symlink( + join(__dirname, '..', '..'), + join(folder, 'node_modules', 'pino') + ) + + await installTransportModule(folder) + + const toRun = join(folder, 'index.js') + + const toRunContent = ` + const pino = require('pino') + const logger = pino({ + transport: { + target: 'transport', + options: { destination: '${destination}' } + } + }) + logger.info('hello') + ` + + await writeFile(toRun, toRunContent) + + const child = execa(process.argv[0], [toRun]) + + await once(child, 'close') + + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid: child.pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +// TODO make this test pass on Windows +test('pino({ transport }) from a wrapped dependency', { skip: isWin || isYarnPnp }, async (t) => { + const folder = join( + os.tmpdir(), + '_' + Math.random().toString(36).substr(2, 9) + ) + + const wrappedFolder = join( + os.tmpdir(), + '_' + Math.random().toString(36).substr(2, 9) + ) + + const destination = join(folder, 'output') + + await mkdir(join(folder, 'node_modules'), { recursive: true }) + await mkdir(join(wrappedFolder, 'node_modules'), { recursive: true }) + + t.after(() => { + rimraf.sync(wrappedFolder) + rimraf.sync(folder) + }) + + // Link pino + await symlink( + join(__dirname, '..', '..'), + join(wrappedFolder, 'node_modules', 'pino') + ) + + // Link get-caller-file + await symlink( + join(__dirname, '..', '..', 'node_modules', 'get-caller-file'), + join(wrappedFolder, 'node_modules', 'get-caller-file') + ) + + // Link wrapped + await symlink( + wrappedFolder, + join(folder, 'node_modules', 'wrapped') + ) + + await installTransportModule(folder) + + const pkgjsonContent = { + name: 'pino' + } + + await writeFile(join(wrappedFolder, 'package.json'), JSON.stringify(pkgjsonContent)) + + const wrapped = join(wrappedFolder, 'index.js') + + const wrappedContent = ` + const pino = require('pino') + const getCaller = require('get-caller-file') + + module.exports = function build () { + const logger = pino({ + transport: { + caller: getCaller(), + target: 'transport', + options: { destination: '${destination}' } + } + }) + return logger + } + ` + + await writeFile(wrapped, wrappedContent) + + const toRun = join(folder, 'index.js') + + const toRunContent = ` + const logger = require('wrapped')() + logger.info('hello') + ` + + await writeFile(toRun, toRunContent) + + const child = execa(process.argv[0], [toRun]) + + await once(child, 'close') + + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid: child.pid, + hostname, + level: 30, + msg: 'hello' + }) +}) diff --git a/node_modules/pino/test/transport/pipeline.test.js b/node_modules/pino/test/transport/pipeline.test.js new file mode 100644 index 0000000..8e86a89 --- /dev/null +++ b/node_modules/pino/test/transport/pipeline.test.js @@ -0,0 +1,137 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const { join } = require('node:path') +const { readFile } = require('node:fs').promises + +const { watchFileCreated, file } = require('../helper') +const pino = require('../../') +const { DEFAULT_LEVELS } = require('../../lib/constants') + +const { pid } = process +const hostname = os.hostname() + +test('pino.transport with a pipeline', async (t) => { + const destination = file() + const transport = pino.transport({ + pipeline: [{ + target: join(__dirname, '..', 'fixtures', 'transport-transform.js') + }, { + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination } + }] + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: DEFAULT_LEVELS.info, + msg: 'hello', + service: 'pino' // this property was added by the transform + }) +}) + +test('pino.transport with targets containing pipelines', async (t) => { + const destinationA = file() + const destinationB = file() + const transport = pino.transport({ + targets: [ + { + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: destinationA } + }, + { + pipeline: [ + { + target: join(__dirname, '..', 'fixtures', 'transport-transform.js') + }, + { + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: destinationB } + } + ] + } + ] + }) + + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello') + await watchFileCreated(destinationA) + await watchFileCreated(destinationB) + const resultA = JSON.parse(await readFile(destinationA)) + const resultB = JSON.parse(await readFile(destinationB)) + delete resultA.time + delete resultB.time + assert.deepEqual(resultA, { + pid, + hostname, + level: DEFAULT_LEVELS.info, + msg: 'hello' + }) + assert.deepEqual(resultB, { + pid, + hostname, + level: DEFAULT_LEVELS.info, + msg: 'hello', + service: 'pino' // this property was added by the transform + }) +}) + +test('pino.transport with targets containing pipelines with levels defined and dedupe', async (t) => { + const destinationA = file() + const destinationB = file() + const transport = pino.transport({ + targets: [ + { + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: destinationA }, + level: DEFAULT_LEVELS.info + }, + { + pipeline: [ + { + target: join(__dirname, '..', 'fixtures', 'transport-transform.js') + }, + { + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: destinationB } + } + ], + level: DEFAULT_LEVELS.error + } + ], + dedupe: true + }) + + t.after(transport.end.bind(transport)) + const instance = pino(transport) + instance.info('hello info') + instance.error('hello error') + await watchFileCreated(destinationA) + await watchFileCreated(destinationB) + const resultA = JSON.parse(await readFile(destinationA)) + const resultB = JSON.parse(await readFile(destinationB)) + delete resultA.time + delete resultB.time + assert.deepEqual(resultA, { + pid, + hostname, + level: DEFAULT_LEVELS.info, + msg: 'hello info' + }) + assert.deepEqual(resultB, { + pid, + hostname, + level: DEFAULT_LEVELS.error, + msg: 'hello error', + service: 'pino' // this property was added by the transform + }) +}) diff --git a/node_modules/pino/test/transport/repl.test.js b/node_modules/pino/test/transport/repl.test.js new file mode 100644 index 0000000..cdb7ae0 --- /dev/null +++ b/node_modules/pino/test/transport/repl.test.js @@ -0,0 +1,15 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const proxyquire = require('proxyquire') + +test('pino.transport resolves targets in REPL', async () => { + // Arrange + const transport = proxyquire('../../lib/transport', { + './caller': () => ['node:repl'] + }) + + // Act / Assert + assert.doesNotThrow(() => transport({ target: 'pino-pretty' })) +}) diff --git a/node_modules/pino/test/transport/sync-false.test.js b/node_modules/pino/test/transport/sync-false.test.js new file mode 100644 index 0000000..633d991 --- /dev/null +++ b/node_modules/pino/test/transport/sync-false.test.js @@ -0,0 +1,70 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const { join } = require('node:path') +const { readFile } = require('node:fs').promises +const { promisify } = require('node:util') + +const pino = require('../..') +const { watchFileCreated, file } = require('../helper') + +const { pid } = process +const hostname = os.hostname() + +test('thread-stream async flush', async () => { + const destination = file() + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination } + }) + const instance = pino(transport) + instance.info('hello') + + assert.equal(instance.flush(), undefined) + + await watchFileCreated(destination) + const result = JSON.parse(await readFile(destination)) + delete result.time + assert.deepEqual(result, { + pid, + hostname, + level: 30, + msg: 'hello' + }) +}) + +test('thread-stream async flush should call the passed callback', async () => { + const outputPath = file() + async function getOutputLogLines () { + return (await readFile(outputPath)).toString().trim().split('\n').map(JSON.parse) + } + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: outputPath } + }) + const instance = pino(transport) + const flushPromise = promisify(instance.flush).bind(instance) + + instance.info('hello') + await flushPromise() + await watchFileCreated(outputPath) + + const [firstFlushData] = await getOutputLogLines() + + assert.equal(firstFlushData.msg, 'hello') + + // should not flush this as no data accumulated that's bigger than min length + instance.info('world') + + // Making sure data is not flushed yet + const afterLogData = await getOutputLogLines() + assert.equal(afterLogData.length, 1) + + await flushPromise() + + // Making sure data is not flushed yet + const afterSecondFlush = (await getOutputLogLines())[1] + assert.equal(afterSecondFlush.msg, 'world') +}) diff --git a/node_modules/pino/test/transport/sync-true.test.js b/node_modules/pino/test/transport/sync-true.test.js new file mode 100644 index 0000000..3624549 --- /dev/null +++ b/node_modules/pino/test/transport/sync-true.test.js @@ -0,0 +1,57 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { join } = require('node:path') +const { readFileSync } = require('node:fs') + +const { file } = require('../helper') +const pino = require('../..') + +test('thread-stream sync true should log synchronously', async () => { + const outputPath = file() + + function getOutputLogLines () { + return (readFileSync(outputPath)).toString().trim().split('\n').map(JSON.parse) + } + + const transport = pino.transport({ + target: join(__dirname, '..', 'fixtures', 'to-file-transport.js'), + options: { destination: outputPath, flush: true }, + sync: true + }) + const instance = pino(transport) + + var value = { message: 'sync' } + instance.info(value) + instance.info(value) + instance.info(value) + instance.info(value) + instance.info(value) + instance.info(value) + let interrupt = false + let flushData + let loopCounter = 0 + + // Start a synchronous loop + while (!interrupt && loopCounter < (process.env.MAX_TEST_LOOP_ITERATION || 20000)) { + try { + loopCounter++ + const data = getOutputLogLines() + flushData = data + if (data) { + interrupt = true + break + } + } catch (error) { + // File may not exist yet + // Wait till MAX_TEST_LOOP_ITERATION iterations + } + } + + if (!interrupt) { + throw new Error('Sync loop did not get interrupt') + } + + assert.equal(flushData.length, 6) +}) diff --git a/node_modules/pino/test/transport/targets.test.js b/node_modules/pino/test/transport/targets.test.js new file mode 100644 index 0000000..c7a80fa --- /dev/null +++ b/node_modules/pino/test/transport/targets.test.js @@ -0,0 +1,48 @@ +'use strict' + +const test = require('node:test') +const { join } = require('node:path') +const Writable = require('node:stream').Writable +const proxyquire = require('proxyquire') +const tspl = require('@matteo.collina/tspl') +const pino = require('../../pino') + +test('file-target mocked', async function (t) { + const plan = tspl(t, { plan: 1 }) + let ret + const fileTarget = proxyquire('../../file', { + './pino': { + destination (opts) { + plan.deepEqual(opts, { dest: 1, sync: false }) + + ret = new Writable() + ret.fd = opts.dest + + process.nextTick(() => { + ret.emit('ready') + }) + + return ret + } + } + }) + + await fileTarget() + await plan +}) + +test('pino.transport with syntax error', async (t) => { + const plan = tspl(t, { plan: 1 }) + const transport = pino.transport({ + targets: [{ + target: join(__dirname, '..', 'fixtures', 'syntax-error-esm.mjs') + }] + }) + t.after(transport.end.bind(transport)) + + transport.on('error', (err) => { + plan.deepEqual(err, new SyntaxError('Unexpected end of input')) + }) + + await plan +}) diff --git a/node_modules/pino/test/transport/uses-pino-config.test.js b/node_modules/pino/test/transport/uses-pino-config.test.js new file mode 100644 index 0000000..7546812 --- /dev/null +++ b/node_modules/pino/test/transport/uses-pino-config.test.js @@ -0,0 +1,166 @@ +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const os = require('node:os') +const { join } = require('node:path') +const { readFile } = require('node:fs').promises +const writeStream = require('flush-write-stream') + +const { watchFileCreated, file } = require('../helper') +const pino = require('../../') + +const { pid } = process +const hostname = os.hostname() + +function serializeError (error) { + return { + type: error.name, + message: error.message, + stack: error.stack + } +} + +function parseLogs (buffer) { + return JSON.parse(`[${buffer.toString().replace(/}{/g, '},{')}]`) +} + +test('transport uses pino config', async (t) => { + const destination = file() + const transport = pino.transport({ + pipeline: [{ + target: join(__dirname, '..', 'fixtures', 'transport-uses-pino-config.js') + }, { + target: 'pino/file', + options: { destination } + }] + }) + t.after(transport.end.bind(transport)) + const instance = pino({ + messageKey: 'customMessageKey', + errorKey: 'customErrorKey', + customLevels: { custom: 35 } + }, transport) + + const error = new Error('bar') + instance.custom('foo') + instance.error(error) + await watchFileCreated(destination) + const result = parseLogs(await readFile(destination)) + + assert.deepEqual(result, [{ + severityText: 'custom', + body: 'foo', + attributes: { + pid, + hostname + } + }, { + severityText: 'error', + body: 'bar', + attributes: { + pid, + hostname + }, + error: serializeError(error) + }]) +}) + +test('transport uses pino config without customizations', async (t) => { + const destination = file() + const transport = pino.transport({ + pipeline: [{ + target: join(__dirname, '..', 'fixtures', 'transport-uses-pino-config.js') + }, { + target: 'pino/file', + options: { destination } + }] + }) + t.after(transport.end.bind(transport)) + const instance = pino(transport) + + const error = new Error('qux') + instance.info('baz') + instance.error(error) + await watchFileCreated(destination) + const result = parseLogs(await readFile(destination)) + + assert.deepEqual(result, [{ + severityText: 'info', + body: 'baz', + attributes: { + pid, + hostname + } + }, { + severityText: 'error', + body: 'qux', + attributes: { + pid, + hostname + }, + error: serializeError(error) + }]) +}) + +test('transport uses pino config with multistream', async (t) => { + const destination = file() + const messages = [] + const stream = writeStream(function (data, enc, cb) { + const message = JSON.parse(data) + delete message.time + messages.push(message) + cb() + }) + const transport = pino.transport({ + pipeline: [{ + target: join(__dirname, '..', 'fixtures', 'transport-uses-pino-config.js') + }, { + target: 'pino/file', + options: { destination } + }] + }) + t.after(transport.end.bind(transport)) + const instance = pino({ + messageKey: 'customMessageKey', + errorKey: 'customErrorKey', + customLevels: { custom: 35 } + }, pino.multistream([transport, { stream }])) + + const error = new Error('buzz') + const serializedError = serializeError(error) + instance.custom('fizz') + instance.error(error) + await watchFileCreated(destination) + const result = parseLogs(await readFile(destination)) + + assert.deepEqual(result, [{ + severityText: 'custom', + body: 'fizz', + attributes: { + pid, + hostname + } + }, { + severityText: 'error', + body: 'buzz', + attributes: { + pid, + hostname + }, + error: serializedError + }]) + + assert.deepEqual(messages, [{ + level: 35, + pid, + hostname, + customMessageKey: 'fizz' + }, { + level: 50, + pid, + hostname, + customErrorKey: serializedError, + customMessageKey: 'buzz' + }]) +}) diff --git a/node_modules/pino/test/types/pino-import.test-d.cts b/node_modules/pino/test/types/pino-import.test-d.cts new file mode 100644 index 0000000..e0f941a --- /dev/null +++ b/node_modules/pino/test/types/pino-import.test-d.cts @@ -0,0 +1,30 @@ +import { expectType } from "tsd"; + +import * as pinoStar from "../../pino"; +import { default as P, default as pino, pino as pinoNamed } from '../../pino'; +import pinoCjsImport = require ("../../pino"); +const pinoCjs = require("../../pino"); +const { P: pinoCjsNamed } = require('pino') + +const log = pino(); +expectType(log.info); +expectType(log.error); + +expectType(pinoNamed()); +expectType(pinoNamed()); +expectType(pinoStar.default()); +expectType(pinoStar.pino()); +// expectType(pinoCjsImport.default()); +expectType(pinoCjsImport.pino()); +expectType(pinoCjsNamed()); +expectType(pinoCjs()); +expectType(pinoNamed.stdTimeFunctions.isoTimeNano) +expectType(pinoNamed.stdTimeFunctions.isoTimeNano()) + +const levelChangeEventListener: P.LevelChangeEventListener = ( + lvl: P.LevelWithSilent | string, + val: number, + prevLvl: P.LevelWithSilent | string, + prevVal: number, +) => {} +expectType(levelChangeEventListener) diff --git a/node_modules/pino/test/types/pino-multistream.test-d.ts b/node_modules/pino/test/types/pino-multistream.test-d.ts new file mode 100644 index 0000000..6206eca --- /dev/null +++ b/node_modules/pino/test/types/pino-multistream.test-d.ts @@ -0,0 +1,28 @@ +import { expectType } from 'tsd' + +import { createWriteStream } from 'node:fs' + +import pino, { multistream } from '../../pino' + +const streams = [ + { stream: process.stdout }, + { stream: createWriteStream('') }, + { level: 'error' as const, stream: process.stderr }, + { level: 'fatal' as const, stream: process.stderr }, +] + +expectType(pino.multistream(process.stdout)) +expectType(pino.multistream([createWriteStream('')])) +expectType>(pino.multistream({ level: 'error' as const, stream: process.stderr })) +expectType>(pino.multistream([{ level: 'fatal' as const, stream: createWriteStream('') }])) + +expectType>(pino.multistream(streams)) +expectType>(pino.multistream(streams, {})) +expectType>(pino.multistream(streams, { levels: { 'info': 30 } })) +expectType>(pino.multistream(streams, { dedupe: true })) +expectType>(pino.multistream(streams[0]).add(streams[1])) +expectType>(multistream(streams)) +expectType>(multistream(streams).clone('error')) + + +expectType(multistream(process.stdout)); diff --git a/node_modules/pino/test/types/pino-top-export.test-d.ts b/node_modules/pino/test/types/pino-top-export.test-d.ts new file mode 100644 index 0000000..6e2b263 --- /dev/null +++ b/node_modules/pino/test/types/pino-top-export.test-d.ts @@ -0,0 +1,36 @@ +import { expectType, expectAssignable } from 'tsd' +import type { SonicBoom } from "sonic-boom"; + +import { + destination, + LevelMapping, + levels, + Logger, + multistream, + MultiStreamRes, + SerializedError, + stdSerializers, + stdTimeFunctions, + symbols, + transport, + version, +} from "../../pino"; +import pino from "../../pino"; + +expectType(destination("")); +expectType(levels); +expectType(multistream(process.stdout)); +expectType(stdSerializers.err({} as any)); +expectType(stdTimeFunctions.isoTime()); +expectType(stdTimeFunctions.isoTimeNano()); +expectType(version); + +// Can't test against `unique symbol`, see https://github.com/SamVerschueren/tsd/issues/49 +expectAssignable(symbols.endSym); + +// TODO: currently returns (aliased) `any`, waiting for strong typed `thread-stream` +transport({ + target: '#pino/pretty', + options: { some: 'options for', the: 'transport' } +}); + diff --git a/node_modules/pino/test/types/pino-transport.test-d.ts b/node_modules/pino/test/types/pino-transport.test-d.ts new file mode 100644 index 0000000..0156dc1 --- /dev/null +++ b/node_modules/pino/test/types/pino-transport.test-d.ts @@ -0,0 +1,145 @@ +import pino from '../../pino' +import { expectType } from "tsd"; + +// Single +const transport = pino.transport({ + target: '#pino/pretty', + options: { some: 'options for', the: 'transport' } +}) +pino(transport) + +expectType(pino({ + transport: { + target: 'pino-pretty' + }, +})) + +// Multiple +const transports = pino.transport({targets: [ + { + level: 'info', + target: '#pino/pretty', + options: { some: 'options for', the: 'transport' } + }, + { + level: 'trace', + target: '#pino/file', + options: { destination: './test.log' } + } +]}) +pino(transports) + +expectType(pino({ + transport: {targets: [ + { + level: 'info', + target: '#pino/pretty', + options: { some: 'options for', the: 'transport' } + }, + { + level: 'trace', + target: '#pino/file', + options: { destination: './test.log' } + } + ]}, +})) + +const transportsWithCustomLevels = pino.transport({targets: [ + { + level: 'info', + target: '#pino/pretty', + options: { some: 'options for', the: 'transport' } + }, + { + level: 'foo', + target: '#pino/file', + options: { destination: './test.log' } + } +], levels: { foo: 35 }}) +pino(transports) + +expectType(pino({ + transport: {targets: [ + { + level: 'info', + target: '#pino/pretty', + options: { some: 'options for', the: 'transport' } + }, + { + level: 'trace', + target: '#pino/file', + options: { destination: './test.log' } + } + ], levels: { foo: 35 } + }, +})) + +const transportsWithoutOptions = pino.transport({ + targets: [ + { target: '#pino/pretty' }, + { target: '#pino/file' } + ], levels: { foo: 35 } +}) +pino(transports) + +expectType(pino({ + transport: { + targets: [ + { target: '#pino/pretty' }, + { target: '#pino/file' } + ], levels: { foo: 35 } + }, +})) + +const pipelineTransport = pino.transport({ + pipeline: [{ + target: './my-transform.js' + }, { + // Use target: 'pino/file' to write to stdout + // without any change. + target: 'pino-pretty' + }] +}) +pino(pipelineTransport) + +expectType(pino({ + transport: { + pipeline: [{ + target: './my-transform.js' + }, { + // Use target: 'pino/file' to write to stdout + // without any change. + target: 'pino-pretty' + }] + } +})) + +type TransportConfig = { + id: string +} + +// Custom transport params +const customTransport = pino.transport({ + target: 'custom', + options: { id: 'abc' } +}) +pino(customTransport) + +// Worker +pino.transport({ + target: 'custom', + worker: { + argv: ['a', 'b'], + stdin: false, + stderr: true, + stdout: false, + autoEnd: true, + }, + options: { id: 'abc' } +}) + +// Dedupe +pino.transport({ + targets: [], + dedupe: true, +}) diff --git a/node_modules/pino/test/types/pino-type-only.test-d.ts b/node_modules/pino/test/types/pino-type-only.test-d.ts new file mode 100644 index 0000000..dc64947 --- /dev/null +++ b/node_modules/pino/test/types/pino-type-only.test-d.ts @@ -0,0 +1,66 @@ +import { expectAssignable, expectType, expectNotAssignable } from "tsd"; + +import pino from "../../"; +import type {LevelWithSilent, Logger, LogFn, DestinationStreamWithMetadata, Level, LevelOrString, LevelWithSilentOrString, LoggerExtras, LoggerOptions } from "../../pino"; + +// NB: can also use `import * as pino`, but that form is callable as `pino()` +// under `esModuleInterop: false` or `pino.default()` under `esModuleInterop: true`. +const log = pino(); +expectAssignable(log); +expectType(log); +expectType(log.info); + +expectType>([log.level]); + +const level: Level = 'debug'; +expectAssignable(level); + +const levelWithSilent: LevelWithSilent = 'silent'; +expectAssignable(levelWithSilent); + +const levelOrString: LevelOrString = "myCustomLevel"; +expectAssignable(levelOrString); +expectNotAssignable(levelOrString); +expectNotAssignable(levelOrString); +expectAssignable(levelOrString); + +const levelWithSilentOrString: LevelWithSilentOrString = "myCustomLevel"; +expectAssignable(levelWithSilentOrString); +expectNotAssignable(levelWithSilentOrString); +expectNotAssignable(levelWithSilentOrString); +expectAssignable(levelWithSilentOrString); + +function createStream(): DestinationStreamWithMetadata { + return { write() {} }; +} + +const stream = createStream(); +// Argh. TypeScript doesn't seem to narrow unless we assign the symbol like so, and tsd seems to +// break without annotating the type explicitly +const needsMetadata: typeof pino.symbols.needsMetadataGsym = pino.symbols.needsMetadataGsym; +if (stream[needsMetadata]) { + expectType(stream.lastLevel); +} + +const loggerOptions:LoggerOptions = { + browser: { + formatters: { + log(obj) { + return obj + }, + level(label, number) { + return { label, number} + } + + } + } +} + +expectType(loggerOptions) + +// Reference: https://github.com/pinojs/pino/issues/2285 +const someConst = "test" as const; +pino().error({}, someConst); +const someFunc = (someConst: T) => { + pino().error({}, someConst); +}; diff --git a/node_modules/pino/test/types/pino.test-d.ts b/node_modules/pino/test/types/pino.test-d.ts new file mode 100644 index 0000000..67539db --- /dev/null +++ b/node_modules/pino/test/types/pino.test-d.ts @@ -0,0 +1,596 @@ +import { IncomingMessage, ServerResponse } from "http"; +import { mock } from 'node:test' +import { Socket } from "net"; +import { expectError, expectType } from 'tsd'; +import pino, { LogFn, LoggerOptions } from "../../"; +import Logger = pino.Logger; + +const log = pino(); +const info = log.info; +const error = log.error; + +info("hello world"); +error("this is at error level"); + +// primitive types +info('simple string'); +info(true) +info(42); +info(3.14); +info(null); +info(undefined); + +// object types +info({ a: 1, b: '2' }); +info(new Error()); +info(new Date()); +info([]) +info(new Map()); +info(new Set()); + +// placeholder messages +info('Hello %s', 'world'); +info('The answer is %d', 42); +info('The object is %o', { a: 1, b: '2' }); +info('The json is %j', { a: 1, b: '2' }); +info('The object is %O', { a: 1, b: '2' }); +info('The answer is %d and the question is %s with %o', 42, 'unknown', { correct: 'order' }); +info('Missing placeholder is fine %s'); + +// %s placeholder supports all primitive types +info('Boolean %s', true); +info('Boolean %s', false); +info('Number %s', 123); +info('Number %s', 3.14); +info('BigInt %s', BigInt(123)); +info('Null %s', null); +info('Undefined %s', undefined); +info('Symbol %s', Symbol('test')); +info('String %s', 'hello'); + +// %s placeholder with multiple primitives +info('Multiple primitives %s %s %s', true, 42, 'world'); +info('All primitive types %s %s %s %s %s %s %s', 'string', 123, true, BigInt(123), null, undefined, Symbol('test')); +declare const errorOrString: string | Error; +info(errorOrString) + +// placeholder messages type errors +expectError(info('The answer is %d', 'not a number')); +expectError(info('The object is %o', 'not an object')); +expectError(info('The object is %j', 'not a JSON')); +expectError(info('The object is %O', 'not an object')); +expectError(info('The answer is %d and the question is %s with %o', 42, { incorrect: 'order' }, 'unknown')); +expectError(info('Extra message %s', 'after placeholder', 'not allowed')); + +// object types with messages +info({ obj: 42 }, "hello world"); +info({ obj: 42, b: 2 }, "hello world"); +info({ obj: { aa: "bbb" } }, "another"); +info({ a: 1, b: '2' }, 'hello world with %s', 'extra data'); + +// Extra message after placeholder +expectError(info({ a: 1, b: '2' }, 'hello world with %d', 2, 'extra' )); + +// metadata with messages type passes, because of custom toString method +// We can't detect if the object has a custom toString method that returns a string +info({ a: 1, b: '2' }, 'hello world with %s', {}); + +// metadata after message +expectError(info('message', { a: 1, b: '2' })); + +// multiple strings without placeholder +expectError(info('string1', 'string2')); +expectError(info('string1', 'string2', 'string3')); + +setImmediate(info, "after setImmediate"); +error(new Error("an error")); + +const writeSym = pino.symbols.writeSym; + +const testUniqSymbol = { + [pino.symbols.needsMetadataGsym]: true, +}[pino.symbols.needsMetadataGsym]; + +const log2: pino.Logger = pino({ + name: "myapp", + safe: true, + serializers: { + req: pino.stdSerializers.req, + res: pino.stdSerializers.res, + err: pino.stdSerializers.err, + }, +}); + +pino({ + write(o) {}, +}); + +pino({ + mixin() { + return { customName: "unknown", customId: 111 }; + }, +}); + +pino({ + mixin: () => ({ customName: "unknown", customId: 111 }), +}); + +pino({ + mixin: (context: object) => ({ customName: "unknown", customId: 111 }), +}); + +pino({ + mixin: (context: object, level: number) => ({ customName: "unknown", customId: 111 }), +}); + +pino({ + redact: { paths: [], censor: "SECRET" }, +}); + +pino({ + redact: { paths: [], censor: () => "SECRET" }, +}); + +pino({ + redact: { paths: [], censor: (value) => value }, +}); + +pino({ + redact: { paths: [], censor: (value, path) => path.join() }, +}); + +pino({ + redact: { + paths: [], + censor: (value): string => 'SECRET', + }, +}); + +expectError(pino({ + redact: { paths: [], censor: (value: string) => value }, +})); + +pino({ + depthLimit: 1 +}); + +pino({ + edgeLimit: 1 +}); + +pino({ + browser: { + write(o) {}, + }, +}); + +pino({ + browser: { + write: { + info(o) {}, + error(o) {}, + }, + serialize: true, + asObject: true, + transmit: { + level: "fatal", + send: (level, logEvent) => { + level; + logEvent.bindings; + logEvent.level; + logEvent.ts; + logEvent.messages; + }, + }, + disabled: false + }, +}); + +pino({ + browser: { + asObjectBindingsOnly: true, + } +}); + +pino({}, undefined); + +pino({ base: null }); +if ("pino" in log) console.log(`pino version: ${log.pino}`); + +expectType(log.flush()); +log.flush((err?: Error) => undefined); +log.child({ a: "property" }).info("hello child!"); +log.level = "error"; +log.info("nope"); +const child = log.child({ foo: "bar" }); +child.info("nope again"); +child.level = "info"; +child.info("hooray"); +log.info("nope nope nope"); +log.child({ foo: "bar" }, { level: "debug" }).debug("debug!"); +child.bindings(); +const customSerializers = { + test() { + return "this is my serializer"; + }, +}; +pino().child({}, { serializers: customSerializers }).info({ test: "should not show up" }); +const child2 = log.child({ father: true }); +const childChild = child2.child({ baby: true }); +const childRedacted = pino().child({}, { redact: ["path"] }) +childRedacted.info({ + msg: "logged with redacted properties", + path: "Not shown", +}); +const childAnotherRedacted = pino().child({}, { + redact: { + paths: ["anotherPath"], + censor: "Not the log you\re looking for", + } +}) +childAnotherRedacted.info({ + msg: "another logged with redacted properties", + anotherPath: "Not shown", +}); + +log.level = "info"; +if (log.levelVal === 30) { + console.log("logger level is `info`"); +} + +const listener = (lvl: any, val: any, prevLvl: any, prevVal: any) => { + console.log(lvl, val, prevLvl, prevVal); +}; +log.on("level-change", (lvl, val, prevLvl, prevVal, logger) => { + console.log(lvl, val, prevLvl, prevVal); +}); +log.level = "trace"; +log.removeListener("level-change", listener); +log.level = "info"; + +pino.levels.values.error === 50; +pino.levels.labels[50] === "error"; + +const logstderr: pino.Logger = pino(process.stderr); +logstderr.error("on stderr instead of stdout"); + +log.useLevelLabels = true; +log.info("lol"); +log.level === "info"; +const isEnabled: boolean = log.isLevelEnabled("info"); + +const redacted = pino({ + redact: ["path"], +}); + +redacted.info({ + msg: "logged with redacted properties", + path: "Not shown", +}); + +const anotherRedacted = pino({ + redact: { + paths: ["anotherPath"], + censor: "Not the log you\re looking for", + }, +}); + +anotherRedacted.info({ + msg: "another logged with redacted properties", + anotherPath: "Not shown", +}); + +const withTimeFn = pino({ + timestamp: pino.stdTimeFunctions.isoTime, +}); + +const withRFC3339TimeFn = pino({ + timestamp: pino.stdTimeFunctions.isoTimeNano, +}); + +const withNestedKey = pino({ + nestedKey: "payload", +}); + +const withHooks = pino({ + hooks: { + logMethod(args, method, level) { + expectType(this); + return method.apply(this, args); + }, + streamWrite(s) { + expectType(s); + return s.replaceAll('secret-key', 'xxx'); + }, + }, +}); + +// Properties/types imported from pino-std-serializers +const wrappedErrSerializer = pino.stdSerializers.wrapErrorSerializer((err: pino.SerializedError) => { + return { ...err, newProp: "foo" }; +}); +const wrappedReqSerializer = pino.stdSerializers.wrapRequestSerializer((req: pino.SerializedRequest) => { + return { ...req, newProp: "foo" }; +}); +const wrappedResSerializer = pino.stdSerializers.wrapResponseSerializer((res: pino.SerializedResponse) => { + return { ...res, newProp: "foo" }; +}); + +const socket = new Socket(); +const incomingMessage = new IncomingMessage(socket); +const serverResponse = new ServerResponse(incomingMessage); + +const mappedHttpRequest: { req: pino.SerializedRequest } = pino.stdSerializers.mapHttpRequest(incomingMessage); +const mappedHttpResponse: { res: pino.SerializedResponse } = pino.stdSerializers.mapHttpResponse(serverResponse); + +const serializedErr: pino.SerializedError = pino.stdSerializers.err(new Error()); +const serializedReq: pino.SerializedRequest = pino.stdSerializers.req(incomingMessage); +const serializedRes: pino.SerializedResponse = pino.stdSerializers.res(serverResponse); + +/** + * Destination static method + */ +const destinationViaDefaultArgs = pino.destination(); +const destinationViaStrFileDescriptor = pino.destination("/log/path"); +const destinationViaNumFileDescriptor = pino.destination(2); +const destinationViaStream = pino.destination(process.stdout); +const destinationViaOptionsObject = pino.destination({ dest: "/log/path", sync: false }); + +pino(destinationViaDefaultArgs); +pino({ name: "my-logger" }, destinationViaDefaultArgs); +pino(destinationViaStrFileDescriptor); +pino({ name: "my-logger" }, destinationViaStrFileDescriptor); +pino(destinationViaNumFileDescriptor); +pino({ name: "my-logger" }, destinationViaNumFileDescriptor); +pino(destinationViaStream); +pino({ name: "my-logger" }, destinationViaStream); +pino(destinationViaOptionsObject); +pino({ name: "my-logger" }, destinationViaOptionsObject); + +try { + throw new Error('Some error') +} catch (err) { + log.error(err) +} + +interface StrictShape { + activity: string; + err?: unknown; +} + +info({ + activity: "Required property", +}); + +const logLine: pino.LogDescriptor = { + level: 20, + msg: "A log message", + time: new Date().getTime(), + aCustomProperty: true, +}; + +interface CustomLogger extends pino.Logger { + customMethod(msg: string, ...args: unknown[]): void; +} + +const serializerFunc: pino.SerializerFn = () => {} +const writeFunc: pino.WriteFn = () => {} + +interface CustomBaseLogger extends pino.BaseLogger { + child(): CustomBaseLogger +} + +const customBaseLogger: CustomBaseLogger = { + level: 'info', + fatal() {}, + error() {}, + warn() {}, + info() {}, + debug() {}, + trace() {}, + silent() {}, + child() { return this }, + msgPrefix: 'prefix', +} + +// custom levels +const log3 = pino({ customLevels: { myLevel: 100 } }) +expectError(log3.log()) +log3.level = 'myLevel' +log3.myLevel('') +log3.child({}).myLevel('') + +log3.on('level-change', (lvl, val, prevLvl, prevVal, instance) => { + instance.myLevel('foo'); +}); + +const clog3 = log3.child({}, { customLevels: { childLevel: 120 } }) +// child inherit parent +clog3.myLevel('') +// child itself +clog3.childLevel('') +const cclog3 = clog3.child({}, { customLevels: { childLevel2: 130 } }) +// child inherit root +cclog3.myLevel('') +// child inherit parent +cclog3.childLevel('') +// child itself +cclog3.childLevel2('') + +const ccclog3 = clog3.child({}) +expectError(ccclog3.nonLevel('')) + +const withChildCallback = pino({ + onChild: (child: Logger) => {} +}) +withChildCallback.onChild = (child: Logger) => {} + +pino({ + crlf: true, +}); + +const customLevels = { foo: 99, bar: 42 } + +const customLevelLogger = pino({ customLevels }); + +type CustomLevelLogger = typeof customLevelLogger +type CustomLevelLoggerLevels = pino.Level | keyof typeof customLevels + +const fn = (logger: Pick) => {} + +const customLevelChildLogger = customLevelLogger.child({ name: "child" }) + +fn(customLevelChildLogger); // missing foo typing + +// unknown option +expectError( + pino({ + hello: 'world' + }) +); + +// unknown option +expectError( + pino({ + hello: 'world', + customLevels: { + 'log': 30 + } + }) +); + +function dangerous () { + throw Error('foo') +} + +try { + dangerous() +} catch (err) { + log.error(err) +} + +try { + dangerous() +} catch (err) { + log.error({ err }) +} + +const bLogger = pino({ + customLevels: { + log: 5, + }, + level: 'log', + transport: { + target: 'pino-pretty', + options: { + colorize: true, + }, + }, +}); + +// Test that we can properly extract parameters from the log fn type +type LogParam = Parameters +const [param1, param2, param3, param4]: LogParam = [{ multiple: 'params' }, 'should', 'be', 'accepted'] + +expectType(param1) +expectType(param2) +expectType(param3) +expectType(param4) + +const logger = mock.fn() +logger.mock.calls[0].arguments[1]?.includes('I should be able to get params') + +const hooks: LoggerOptions['hooks'] = { + logMethod(this, parameters, method) { + if (parameters.length >= 2) { + const [parameter1, parameter2, ...remainingParameters] = parameters; + if (typeof parameter1 === 'string') { + return method.apply(this, [parameter2, parameter1, ...remainingParameters]); + } + return method.apply(this, [parameter2]); + } + + return method.apply(this, parameters); + } +} + +expectType>(pino({ + customLevels: { + log: 5, + }, + level: 'log', + transport: { + target: 'pino-pretty', + options: { + colorize: true, + }, + }, +})) + +const parentLogger1 = pino({ + customLevels: { myLevel: 90 }, + onChild: (child) => { const a = child.myLevel; } +}, process.stdout) +parentLogger1.onChild = (child) => { child.myLevel(''); } + +const childLogger1 = parentLogger1.child({}); +childLogger1.myLevel(''); +expectError(childLogger1.doesntExist('')); + +const parentLogger2 = pino({}, process.stdin); +expectError(parentLogger2.onChild = (child) => { const b = child.doesntExist; }); + +const childLogger2 = parentLogger2.child({}); +expectError(childLogger2.doesntExist); + +expectError(pino({ + onChild: (child) => { const a = child.doesntExist; } +}, process.stdout)); + +const pinoWithoutLevelsSorting = pino({}); +const pinoWithDescSortingLevels = pino({ levelComparison: 'DESC' }); +const pinoWithAscSortingLevels = pino({ levelComparison: 'ASC' }); +const pinoWithCustomSortingLevels = pino({ levelComparison: () => false }); +// with wrong level comparison direction +expectError(pino({ levelComparison: 'SOME'}), process.stdout); +// with wrong level comparison type +expectError(pino({ levelComparison: 123}), process.stdout); +// with wrong custom level comparison return type +expectError(pino({ levelComparison: () => null }), process.stdout); +expectError(pino({ levelComparison: () => 1 }), process.stdout); +expectError(pino({ levelComparison: () => 'string' }), process.stdout); + +const customLevelsOnlyOpts = { + useOnlyCustomLevels: true, + customLevels: { + customDebug: 10, + info: 20, // to make sure the default names are also available for override + customNetwork: 30, + customError: 40, + }, + level: 'customDebug', +} satisfies LoggerOptions; + +const loggerWithCustomLevelOnly = pino(customLevelsOnlyOpts); +loggerWithCustomLevelOnly.customDebug('test3') +loggerWithCustomLevelOnly.info('test4') +loggerWithCustomLevelOnly.customError('test5') +loggerWithCustomLevelOnly.customNetwork('test6') + +expectError(loggerWithCustomLevelOnly.fatal('test')); +expectError(loggerWithCustomLevelOnly.error('test')); +expectError(loggerWithCustomLevelOnly.warn('test')); +expectError(loggerWithCustomLevelOnly.debug('test')); +expectError(loggerWithCustomLevelOnly.trace('test')); + +// Module extension +declare module "../../" { + interface LogFnFields { + bannedField?: never; + typeCheckedField?: string + } +} + +info({typeCheckedField: 'bar'}) +expectError(info({bannedField: 'bar'})) +expectError(info({typeCheckedField: 123})) diff --git a/node_modules/pino/test/types/pino.ts b/node_modules/pino/test/types/pino.ts new file mode 100644 index 0000000..744389a --- /dev/null +++ b/node_modules/pino/test/types/pino.ts @@ -0,0 +1,90 @@ +import { join } from 'node:path' +import { tmpdir } from 'node:os' +import pinoPretty from 'pino-pretty' +// Test both default ("Pino") and named ("pino") imports. +import Pino, { LoggerOptions, StreamEntry, pino, multistream, transport } from '../../pino' + +const destination = join( + tmpdir(), + '_' + Math.random().toString(36).substr(2, 9) +) + +// Single +const transport1 = transport({ + target: 'pino-pretty', + options: { some: 'options for', the: 'transport' } +}) +const logger = pino(transport1) +logger.setBindings({ some: 'bindings' }) +logger.info('test2') +logger.flush() +const loggerDefault = Pino(transport1) +loggerDefault.setBindings({ some: 'bindings' }) +loggerDefault.info('test2') +loggerDefault.flush() + +const transport2 = transport({ + target: 'pino-pretty', +}) +const logger2 = pino(transport2) +logger2.info('test2') +const logger2Default = Pino(transport2) +logger2Default.info('test2') + + +// Multiple + +const transports = transport({targets: [ + { + level: 'info', + target: 'pino-pretty', + options: { some: 'options for', the: 'transport' } + }, + { + level: 'trace', + target: 'pino/file', + options: { destination } + } +]}) +const loggerMulti = pino(transports) +loggerMulti.info('test2') + +// custom levels + +const customLevels = { + customDebug : 1, + info : 2, + customNetwork : 3, + customError : 4, +}; + +type CustomLevels = keyof typeof customLevels; + +const pinoOpts = { + useOnlyCustomLevels: true, + customLevels: customLevels, + level: 'customDebug', +} satisfies LoggerOptions; + +const multistreamOpts = { + dedupe: true, + levels: customLevels +}; + +const streams: StreamEntry[] = [ + { level : 'customDebug', stream : pinoPretty() }, + { level : 'info', stream : pinoPretty() }, + { level : 'customNetwork', stream : pinoPretty() }, + { level : 'customError', stream : pinoPretty() }, +]; + +const loggerCustomLevel = pino(pinoOpts, multistream(streams, multistreamOpts)); +loggerCustomLevel.customDebug('test3') +loggerCustomLevel.info('test4') +loggerCustomLevel.customError('test5') +loggerCustomLevel.customNetwork('test6') +const loggerCustomLevelDefault = Pino(pinoOpts, multistream(streams, multistreamOpts)); +loggerCustomLevelDefault.customDebug('test3') +loggerCustomLevelDefault.info('test4') +loggerCustomLevelDefault.customError('test5') +loggerCustomLevelDefault.customNetwork('test6') diff --git a/node_modules/pino/tsconfig.json b/node_modules/pino/tsconfig.json new file mode 100644 index 0000000..9c80d8f --- /dev/null +++ b/node_modules/pino/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "target": "es6", + "lib": [ "es2015", "dom" ], + "module": "commonjs", + "noEmit": true, + "strict": true, + "esModuleInterop": true + }, + "exclude": [ + "./test/types/*.test-d.ts", + "./*.d.ts" + ] +} diff --git a/node_modules/process-warning/.gitattributes b/node_modules/process-warning/.gitattributes new file mode 100644 index 0000000..a0e7df9 --- /dev/null +++ b/node_modules/process-warning/.gitattributes @@ -0,0 +1,2 @@ +# Set default behavior to automatically convert line endings +* text=auto eol=lf diff --git a/node_modules/process-warning/.github/dependabot.yml b/node_modules/process-warning/.github/dependabot.yml new file mode 100644 index 0000000..35d66ca --- /dev/null +++ b/node_modules/process-warning/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 diff --git a/node_modules/process-warning/.github/workflows/ci.yml b/node_modules/process-warning/.github/workflows/ci.yml new file mode 100644 index 0000000..496e8b9 --- /dev/null +++ b/node_modules/process-warning/.github/workflows/ci.yml @@ -0,0 +1,22 @@ +name: CI + +on: + push: + branches: + - main + - next + - 'v*' + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +jobs: + test: + uses: fastify/workflows/.github/workflows/plugins-ci.yml@v5 + with: + license-check: true + lint: true diff --git a/node_modules/process-warning/LICENSE b/node_modules/process-warning/LICENSE new file mode 100644 index 0000000..24f78a7 --- /dev/null +++ b/node_modules/process-warning/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Fastify + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/process-warning/README.md b/node_modules/process-warning/README.md new file mode 100644 index 0000000..a3818d5 --- /dev/null +++ b/node_modules/process-warning/README.md @@ -0,0 +1,118 @@ +# process-warning + +[![CI](https://github.com/fastify/process-warning/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/fastify/process-warning/actions/workflows/ci.yml) +[![NPM version](https://img.shields.io/npm/v/process-warning.svg?style=flat)](https://www.npmjs.com/package/process-warning) +[![neostandard javascript style](https://img.shields.io/badge/code_style-neostandard-brightgreen?style=flat)](https://github.com/neostandard/neostandard) + +A small utility for generating consistent warning objects across your codebase. +It also exposes a utility for emitting those warnings, guaranteeing that they are issued only once (unless configured otherwise). + +_This module is used by the [Fastify](https://fastify.dev) framework and it was called `fastify-warning` prior to version 1.0.0._ + +### Install + +``` +npm i process-warning +``` + +### Usage + +The module exports two builder functions for creating warnings. + +```js +const { + createWarning, + createDeprecation +} = require('process-warning') + +const warning = createWarning({ + name: 'ExampleWarning', + code: 'EXP_WRN_001', + message: 'Hello %s', + unlimited: true +}) +warning('world') +``` + +#### Methods + +##### `createWarning({ name, code, message[, unlimited] })` + +- `name` (`string`, required) - The error name, you can access it later with +`error.name`. For consistency, we recommend prefixing module error names +with `{YourModule}Warning` +- `code` (`string`, required) - The warning code, you can access it later with +`error.code`. For consistency, we recommend prefixing plugin error codes with +`{ThreeLetterModuleName}_`, e.g. `FST_`. NOTE: codes should be all uppercase. +- `message` (`string`, required) - The warning message. You can also use +interpolated strings for formatting the message. +- `options` (`object`, optional) - Optional options with the following +properties: + + `unlimited` (`boolean`, optional) - Should the warning be emitted more than + once? Defaults to `false`. + + +##### `createDeprecation({code, message[, options]})` + +This is a wrapper for `createWarning`. It is equivalent to invoking +`createWarning` with the `name` parameter set to "DeprecationWarning". + +Deprecation warnings have extended support for the Node.js CLI options: +`--throw-deprecation`, `--no-deprecation`, and `--trace-deprecation`. + +##### `warning([, a [, b [, c]]])` + +The returned `warning` function can used for emitting warnings. +A warning is guaranteed to be emitted at least once. + +- `[, a [, b [, c]]]` (`any`, optional) - Parameters for string interpolation. + +```js +const { createWarning } = require('process-warning') +const FST_ERROR_CODE = createWarning({ name: 'MyAppWarning', code: 'FST_ERROR_CODE', message: 'message' }) +FST_ERROR_CODE() +``` + +How to use an interpolated string: +```js +const { createWarning } = require('process-warning') +const FST_ERROR_CODE = createWarning({ name: 'MyAppWarning', code: 'FST_ERROR_CODE', message: 'Hello %s'}) +FST_ERROR_CODE('world') +``` + +The `warning` object has methods and properties for managing the warning's state. Useful for testing. +```js +const { createWarning } = require('process-warning') +const FST_ERROR_CODE = createWarning({ name: 'MyAppWarning', code: 'FST_ERROR_CODE', message: 'Hello %s'}) +console.log(FST_ERROR_CODE.emitted) // false +FST_ERROR_CODE('world') +console.log(FST_ERROR_CODE.emitted) // true + +const FST_ERROR_CODE_2 = createWarning('MyAppWarning', 'FST_ERROR_CODE_2', 'Hello %s') +FST_ERROR_CODE_2.emitted = true +FST_ERROR_CODE_2('world') // will not be emitted because it is not unlimited +``` + +How to use an unlimited warning: +```js +const { createWarning } = require('process-warning') +const FST_ERROR_CODE = createWarning({ name: 'MyAppWarning', code: 'FST_ERROR_CODE', message: 'Hello %s', unlimited: true }) +FST_ERROR_CODE('world') // will be emitted +FST_ERROR_CODE('world') // will be emitted again +``` + +#### Suppressing warnings + +It is possible to suppress warnings by utilizing one of node's built-in warning suppression mechanisms. + +Warnings can be suppressed: + +- by setting the `NODE_NO_WARNINGS` environment variable to `1` +- by passing the `--no-warnings` flag to the node process +- by setting '--no-warnings' in the `NODE_OPTIONS` environment variable + +For more information see [node's documentation](https://nodejs.org/api/cli.html). + +## License + +Licensed under [MIT](./LICENSE). diff --git a/node_modules/process-warning/benchmarks/warn.js b/node_modules/process-warning/benchmarks/warn.js new file mode 100644 index 0000000..1f49bf6 --- /dev/null +++ b/node_modules/process-warning/benchmarks/warn.js @@ -0,0 +1,25 @@ +'use strict' + +const { Suite } = require('benchmark') +const { createWarning } = require('..') + +const err1 = createWarning({ + name: 'TestWarning', + code: 'TST_ERROR_CODE_1', + message: 'message' +}) +const err2 = createWarning({ + name: 'TestWarning', + code: 'TST_ERROR_CODE_2', + message: 'message' +}) + +new Suite() + .add('warn', function () { + err1() + err2() + }) + .on('cycle', function (event) { + console.log(String(event.target)) + }) + .run() diff --git a/node_modules/process-warning/eslint.config.js b/node_modules/process-warning/eslint.config.js new file mode 100644 index 0000000..89fd678 --- /dev/null +++ b/node_modules/process-warning/eslint.config.js @@ -0,0 +1,6 @@ +'use strict' + +module.exports = require('neostandard')({ + ignores: require('neostandard').resolveIgnoresFromGitignore(), + ts: true +}) diff --git a/node_modules/process-warning/examples/example.js b/node_modules/process-warning/examples/example.js new file mode 100644 index 0000000..db9d862 --- /dev/null +++ b/node_modules/process-warning/examples/example.js @@ -0,0 +1,11 @@ +'use strict' + +const { createWarning } = require('..') + +const CUSTDEP001 = createWarning({ + name: 'DeprecationWarning', + code: 'CUSTDEP001', + message: 'This is a deprecation warning' +}) + +CUSTDEP001() diff --git a/node_modules/process-warning/index.js b/node_modules/process-warning/index.js new file mode 100644 index 0000000..e0d4ab8 --- /dev/null +++ b/node_modules/process-warning/index.js @@ -0,0 +1,124 @@ +'use strict' + +const { format } = require('node:util') + +/** + * @namespace processWarning + */ + +/** + * Represents a warning item with details. + * @typedef {Function} WarningItem + * @param {*} [a] Possible message interpolation value. + * @param {*} [b] Possible message interpolation value. + * @param {*} [c] Possible message interpolation value. + * @property {string} name - The name of the warning. + * @property {string} code - The code associated with the warning. + * @property {string} message - The warning message. + * @property {boolean} emitted - Indicates if the warning has been emitted. + * @property {function} format - Formats the warning message. + */ + +/** + * Options for creating a process warning. + * @typedef {Object} ProcessWarningOptions + * @property {string} name - The name of the warning. + * @property {string} code - The code associated with the warning. + * @property {string} message - The warning message. + * @property {boolean} [unlimited=false] - If true, allows unlimited emissions of the warning. + */ + +/** + * Represents the process warning functionality. + * @typedef {Object} ProcessWarning + * @property {function} createWarning - Creates a warning item. + * @property {function} createDeprecation - Creates a deprecation warning item. + */ + +/** + * Creates a deprecation warning item. + * @function + * @memberof processWarning + * @param {ProcessWarningOptions} params - Options for creating the warning. + * @returns {WarningItem} The created deprecation warning item. + */ +function createDeprecation (params) { + return createWarning({ ...params, name: 'DeprecationWarning' }) +} + +/** + * Creates a warning item. + * @function + * @memberof processWarning + * @param {ProcessWarningOptions} params - Options for creating the warning. + * @returns {WarningItem} The created warning item. + * @throws {Error} Throws an error if name, code, or message is empty, or if opts.unlimited is not a boolean. + */ +function createWarning ({ name, code, message, unlimited = false } = {}) { + if (!name) throw new Error('Warning name must not be empty') + if (!code) throw new Error('Warning code must not be empty') + if (!message) throw new Error('Warning message must not be empty') + if (typeof unlimited !== 'boolean') throw new Error('Warning opts.unlimited must be a boolean') + + code = code.toUpperCase() + + let warningContainer = { + [name]: function (a, b, c) { + if (warning.emitted === true && warning.unlimited !== true) { + return + } + warning.emitted = true + process.emitWarning(warning.format(a, b, c), warning.name, warning.code) + } + } + if (unlimited) { + warningContainer = { + [name]: function (a, b, c) { + warning.emitted = true + process.emitWarning(warning.format(a, b, c), warning.name, warning.code) + } + } + } + + const warning = warningContainer[name] + + warning.emitted = false + warning.message = message + warning.unlimited = unlimited + warning.code = code + + /** + * Formats the warning message. + * @param {*} [a] Possible message interpolation value. + * @param {*} [b] Possible message interpolation value. + * @param {*} [c] Possible message interpolation value. + * @returns {string} The formatted warning message. + */ + warning.format = function (a, b, c) { + let formatted + if (a && b && c) { + formatted = format(message, a, b, c) + } else if (a && b) { + formatted = format(message, a, b) + } else if (a) { + formatted = format(message, a) + } else { + formatted = message + } + return formatted + } + + return warning +} + +/** + * Module exports containing the process warning functionality. + * @namespace + * @property {function} createWarning - Creates a warning item. + * @property {function} createDeprecation - Creates a deprecation warning item. + * @property {ProcessWarning} processWarning - Represents the process warning functionality. + */ +const out = { createWarning, createDeprecation } +module.exports = out +module.exports.default = out +module.exports.processWarning = out diff --git a/node_modules/process-warning/package.json b/node_modules/process-warning/package.json new file mode 100644 index 0000000..ef74bf5 --- /dev/null +++ b/node_modules/process-warning/package.json @@ -0,0 +1,73 @@ +{ + "name": "process-warning", + "version": "5.0.0", + "description": "A small utility for creating warnings and emitting them.", + "main": "index.js", + "type": "commonjs", + "types": "types/index.d.ts", + "scripts": { + "lint": "eslint", + "lint:fix": "eslint --fix", + "test": "npm run test:unit && npm run test:jest && npm run test:typescript", + "test:jest": "jest jest.test.js", + "test:unit": "c8 --100 node --test", + "test:typescript": "tsd" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/fastify/process-warning.git" + }, + "keywords": [ + "fastify", + "error", + "warning", + "utility", + "plugin", + "emit", + "once" + ], + "author": "Tomas Della Vedova", + "contributors": [ + { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + { + "name": "Manuel Spigolon", + "email": "behemoth89@gmail.com" + }, + { + "name": "James Sumners", + "url": "https://james.sumners.info" + }, + { + "name": "Frazer Smith", + "email": "frazer.dev@icloud.com", + "url": "https://github.com/fdawgs" + } + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/fastify/fastify-warning/issues" + }, + "homepage": "https://github.com/fastify/fastify-warning#readme", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "devDependencies": { + "@fastify/pre-commit": "^2.1.0", + "benchmark": "^2.1.4", + "c8": "^10.1.3", + "eslint": "^9.17.0", + "jest": "^29.7.0", + "neostandard": "^0.12.0", + "tsd": "^0.31.0" + } +} diff --git a/node_modules/process-warning/test/emit-interpolated-string.test.js b/node_modules/process-warning/test/emit-interpolated-string.test.js new file mode 100644 index 0000000..4a90c1c --- /dev/null +++ b/node_modules/process-warning/test/emit-interpolated-string.test.js @@ -0,0 +1,34 @@ +'use strict' + +const { test } = require('node:test') +const { createWarning } = require('..') +const { withResolvers } = require('./promise') + +test('emit with interpolated string', t => { + t.plan(4) + + const { promise, resolve } = withResolvers() + + process.on('warning', onWarning) + function onWarning (warning) { + t.assert.deepStrictEqual(warning.name, 'TestDeprecation') + t.assert.deepStrictEqual(warning.code, 'CODE') + t.assert.deepStrictEqual(warning.message, 'Hello world') + t.assert.ok(codeWarning.emitted) + } + + const codeWarning = createWarning({ + name: 'TestDeprecation', + code: 'CODE', + message: 'Hello %s' + }) + codeWarning('world') + codeWarning('world') + + setImmediate(() => { + process.removeListener('warning', onWarning) + resolve() + }) + + return promise +}) diff --git a/node_modules/process-warning/test/emit-once-only.test.js b/node_modules/process-warning/test/emit-once-only.test.js new file mode 100644 index 0000000..4d5bc1f --- /dev/null +++ b/node_modules/process-warning/test/emit-once-only.test.js @@ -0,0 +1,33 @@ +'use strict' + +const { test } = require('node:test') +const { createWarning } = require('..') +const { withResolvers } = require('./promise') + +test('emit should emit a given code only once', t => { + t.plan(4) + + const { promise, resolve } = withResolvers() + + process.on('warning', onWarning) + function onWarning (warning) { + t.assert.deepStrictEqual(warning.name, 'TestDeprecation') + t.assert.deepStrictEqual(warning.code, 'CODE') + t.assert.deepStrictEqual(warning.message, 'Hello world') + t.assert.ok(warn.emitted) + } + + const warn = createWarning({ + name: 'TestDeprecation', + code: 'CODE', + message: 'Hello world' + }) + warn() + warn() + setImmediate(() => { + process.removeListener('warning', onWarning) + resolve() + }) + + return promise +}) diff --git a/node_modules/process-warning/test/emit-reset.test.js b/node_modules/process-warning/test/emit-reset.test.js new file mode 100644 index 0000000..1a31a4c --- /dev/null +++ b/node_modules/process-warning/test/emit-reset.test.js @@ -0,0 +1,40 @@ +'use strict' + +const { test } = require('node:test') +const { createWarning } = require('../') +const { withResolvers } = require('./promise') + +test('a limited warning can be re-set', t => { + t.plan(4) + + const { promise, resolve } = withResolvers() + let count = 0 + process.on('warning', onWarning) + function onWarning () { + count++ + } + + const warn = createWarning({ + name: 'TestDeprecation', + code: 'CODE', + message: 'Hello world' + }) + + warn() + t.assert.ok(warn.emitted) + + warn() + t.assert.ok(warn.emitted) + + warn.emitted = false + warn() + t.assert.ok(warn.emitted) + + setImmediate(() => { + t.assert.deepStrictEqual(count, 2) + process.removeListener('warning', onWarning) + resolve() + }) + + return promise +}) diff --git a/node_modules/process-warning/test/emit-set.test.js b/node_modules/process-warning/test/emit-set.test.js new file mode 100644 index 0000000..6880fd2 --- /dev/null +++ b/node_modules/process-warning/test/emit-set.test.js @@ -0,0 +1,35 @@ +'use strict' + +const { test } = require('node:test') +const { createWarning } = require('../') +const { withResolvers } = require('./promise') + +test('emit should set the emitted state', t => { + t.plan(3) + + const { promise, resolve } = withResolvers() + + process.on('warning', onWarning) + function onWarning () { + t.fail('should not be called') + } + + const warn = createWarning({ + name: 'TestDeprecation', + code: 'CODE', + message: 'Hello world' + }) + t.assert.ok(!warn.emitted) + warn.emitted = true + t.assert.ok(warn.emitted) + + warn() + t.assert.ok(warn.emitted) + + setImmediate(() => { + process.removeListener('warning', onWarning) + resolve() + }) + + return promise +}) diff --git a/node_modules/process-warning/test/emit-unlimited.test.js b/node_modules/process-warning/test/emit-unlimited.test.js new file mode 100644 index 0000000..3bf4780 --- /dev/null +++ b/node_modules/process-warning/test/emit-unlimited.test.js @@ -0,0 +1,42 @@ +'use strict' + +const { test } = require('node:test') +const { createWarning } = require('..') +const { withResolvers } = require('./promise') + +test('emit should emit a given code unlimited times', t => { + t.plan(50) + + let runs = 0 + const expectedRun = [] + const times = 10 + + const { promise, resolve } = withResolvers() + + process.on('warning', onWarning) + function onWarning (warning) { + t.assert.deepStrictEqual(warning.name, 'TestDeprecation') + t.assert.deepStrictEqual(warning.code, 'CODE') + t.assert.deepStrictEqual(warning.message, 'Hello world') + t.assert.ok(warn.emitted) + t.assert.deepStrictEqual(runs++, expectedRun.shift()) + } + + const warn = createWarning({ + name: 'TestDeprecation', + code: 'CODE', + message: 'Hello world', + unlimited: true + }) + + for (let i = 0; i < times; i++) { + expectedRun.push(i) + warn() + } + setImmediate(() => { + process.removeListener('warning', onWarning) + resolve() + }) + + return promise +}) diff --git a/node_modules/process-warning/test/index.test.js b/node_modules/process-warning/test/index.test.js new file mode 100644 index 0000000..93f8cc4 --- /dev/null +++ b/node_modules/process-warning/test/index.test.js @@ -0,0 +1,99 @@ +'use strict' + +const { test } = require('node:test') +const { createWarning, createDeprecation } = require('..') + +process.removeAllListeners('warning') + +test('Create warning with zero parameter', t => { + t.plan(3) + + const warnItem = createWarning({ + name: 'TestWarning', + code: 'CODE', + message: 'Not available' + }) + t.assert.deepStrictEqual(warnItem.name, 'TestWarning') + t.assert.deepStrictEqual(warnItem.message, 'Not available') + t.assert.deepStrictEqual(warnItem.code, 'CODE') +}) + +test('Create error with 1 parameter', t => { + t.plan(3) + + const warnItem = createWarning({ + name: 'TestWarning', + code: 'CODE', + message: 'hey %s' + }) + t.assert.deepStrictEqual(warnItem.name, 'TestWarning') + t.assert.deepStrictEqual(warnItem.format('alice'), 'hey alice') + t.assert.deepStrictEqual(warnItem.code, 'CODE') +}) + +test('Create error with 2 parameters', t => { + t.plan(3) + + const warnItem = createWarning({ + name: 'TestWarning', + code: 'CODE', + message: 'hey %s, I like your %s' + }) + t.assert.deepStrictEqual(warnItem.name, 'TestWarning') + t.assert.deepStrictEqual(warnItem.format('alice', 'attitude'), 'hey alice, I like your attitude') + t.assert.deepStrictEqual(warnItem.code, 'CODE') +}) + +test('Create error with 3 parameters', t => { + t.plan(3) + + const warnItem = createWarning({ + name: 'TestWarning', + code: 'CODE', + message: 'hey %s, I like your %s %s' + }) + t.assert.deepStrictEqual(warnItem.name, 'TestWarning') + t.assert.deepStrictEqual(warnItem.format('alice', 'attitude', 'see you'), 'hey alice, I like your attitude see you') + t.assert.deepStrictEqual(warnItem.code, 'CODE') +}) + +test('Creates a deprecation warning', t => { + t.plan(3) + + const deprecationItem = createDeprecation({ + name: 'DeprecationWarning', + code: 'CODE', + message: 'hello %s' + }) + t.assert.deepStrictEqual(deprecationItem.name, 'DeprecationWarning') + t.assert.deepStrictEqual(deprecationItem.format('world'), 'hello world') + t.assert.deepStrictEqual(deprecationItem.code, 'CODE') +}) + +test('Should throw when error code has no name', t => { + t.plan(1) + t.assert.throws(() => createWarning(), new Error('Warning name must not be empty')) +}) + +test('Should throw when error has no code', t => { + t.plan(1) + t.assert.throws(() => createWarning({ name: 'name' }), new Error('Warning code must not be empty')) +}) + +test('Should throw when error has no message', t => { + t.plan(1) + t.assert.throws(() => createWarning({ + name: 'name', + code: 'code' + }), new Error('Warning message must not be empty')) +}) + +test('Cannot set unlimited other than boolean', t => { + t.plan(1) + t.assert.throws(() => createWarning({ + name: 'name', + code: 'code', + message: 'message', + unlimited: 'unlimited' + }), new Error('Warning opts.unlimited must be a boolean')) +}) diff --git a/node_modules/process-warning/test/issue-88.test.js b/node_modules/process-warning/test/issue-88.test.js new file mode 100644 index 0000000..2194266 --- /dev/null +++ b/node_modules/process-warning/test/issue-88.test.js @@ -0,0 +1,38 @@ +'use strict' + +const { test } = require('node:test') +const { createWarning } = require('..') +const { withResolvers } = require('./promise') + +test('Must not overwrite config', t => { + t.plan(1) + + function onWarning (warning) { + t.assert.deepStrictEqual(warning.code, 'CODE_1') + } + + const a = createWarning({ + name: 'TestWarning', + code: 'CODE_1', + message: 'Msg' + }) + createWarning({ + name: 'TestWarning', + code: 'CODE_2', + message: 'Msg', + unlimited: true + }) + + const { promise, resolve } = withResolvers() + + process.on('warning', onWarning) + a('CODE_1') + a('CODE_1') + + setImmediate(() => { + process.removeListener('warning', onWarning) + resolve() + }) + + return promise +}) diff --git a/node_modules/process-warning/test/jest.test.js b/node_modules/process-warning/test/jest.test.js new file mode 100644 index 0000000..5935b6a --- /dev/null +++ b/node_modules/process-warning/test/jest.test.js @@ -0,0 +1,24 @@ +/* global test, expect */ +'use strict' + +const { createWarning } = require('..') + +if (globalThis.test) { + test('works with jest', done => { + const code = createWarning({ + name: 'TestDeprecation', + code: 'CODE', + message: 'Hello world' + }) + code('world') + + // we cannot actually listen to process warning event + // because jest messes with it (that's the point of this test) + // we can only test it was emitted indirectly + // and test no exception is raised + setImmediate(() => { + expect(code.emitted).toBeTruthy() + done() + }) + }) +} diff --git a/node_modules/process-warning/test/no-warnings.test.js b/node_modules/process-warning/test/no-warnings.test.js new file mode 100644 index 0000000..be0e9bf --- /dev/null +++ b/node_modules/process-warning/test/no-warnings.test.js @@ -0,0 +1,80 @@ +'use strict' + +const { test } = require('node:test') +const { spawnSync } = require('node:child_process') +const { resolve } = require('node:path') + +const entry = resolve(__dirname, '../examples', 'example.js') + +test('--no-warnings is set in cli', t => { + t.plan(1) + const child = spawnSync(process.execPath, [ + '--no-warnings', + entry + ]) + + const stderr = child.stderr.toString() + t.assert.deepStrictEqual(stderr, '') +}) + +test('--no-warnings is not set in cli', t => { + t.plan(1) + const child = spawnSync(process.execPath, [ + entry + ]) + + const stderr = child.stderr.toString() + t.assert.match(stderr, /\[CUSTDEP001\] DeprecationWarning: This is a deprecation warning/) +}) + +test('NODE_NO_WARNINGS is set to 1', t => { + t.plan(1) + const child = spawnSync(process.execPath, [ + entry + ], { + env: { + NODE_NO_WARNINGS: '1' + } + }) + + const stderr = child.stderr.toString() + t.assert.deepStrictEqual(stderr, '') +}) + +test('NODE_NO_WARNINGS is set to 0', t => { + t.plan(1) + const child = spawnSync(process.execPath, [ + entry + ], { + env: { + NODE_NO_WARNINGS: '0' + } + }) + + const stderr = child.stderr.toString() + t.assert.match(stderr, /\[CUSTDEP001\] DeprecationWarning: This is a deprecation warning/) +}) + +test('NODE_NO_WARNINGS is not set', t => { + t.plan(1) + const child = spawnSync(process.execPath, [ + entry + ]) + + const stderr = child.stderr.toString() + t.assert.match(stderr, /\[CUSTDEP001\] DeprecationWarning: This is a deprecation warning/) +}) + +test('NODE_Options contains --no-warnings', t => { + t.plan(1) + const child = spawnSync(process.execPath, [ + entry + ], { + env: { + NODE_OPTIONS: '--no-warnings' + } + }) + + const stderr = child.stderr.toString() + t.assert.deepStrictEqual(stderr, '') +}) diff --git a/node_modules/process-warning/test/promise.js b/node_modules/process-warning/test/promise.js new file mode 100644 index 0000000..c5a1ebc --- /dev/null +++ b/node_modules/process-warning/test/promise.js @@ -0,0 +1,10 @@ +module.exports = { + withResolvers: function () { + let promiseResolve, promiseReject + const promise = new Promise((resolve, reject) => { + promiseResolve = resolve + promiseReject = reject + }) + return { promise, resolve: promiseResolve, reject: promiseReject } + } +} diff --git a/node_modules/process-warning/types/index.d.ts b/node_modules/process-warning/types/index.d.ts new file mode 100644 index 0000000..0728405 --- /dev/null +++ b/node_modules/process-warning/types/index.d.ts @@ -0,0 +1,37 @@ +declare namespace processWarning { + export interface WarningItem { + (a?: any, b?: any, c?: any): void; + name: string; + code: string; + message: string; + emitted: boolean; + unlimited: boolean; + format(a?: any, b?: any, c?: any): string; + } + + export type WarningOptions = { + name: string; + code: string; + message: string; + unlimited?: boolean; + } + + export type DeprecationOptions = Omit + + export type ProcessWarningOptions = { + unlimited?: boolean; + } + + export type ProcessWarning = { + createWarning(params: WarningOptions): WarningItem; + createDeprecation(params: DeprecationOptions): WarningItem; + } + + export function createWarning (params: WarningOptions): WarningItem + export function createDeprecation (params: DeprecationOptions): WarningItem + + const processWarning: ProcessWarning + export { processWarning as default } +} + +export = processWarning diff --git a/node_modules/process-warning/types/index.test-d.ts b/node_modules/process-warning/types/index.test-d.ts new file mode 100644 index 0000000..fe338e1 --- /dev/null +++ b/node_modules/process-warning/types/index.test-d.ts @@ -0,0 +1,36 @@ +import { expectType } from 'tsd' +import { createWarning, createDeprecation } from '..' + +const WarnInstance = createWarning({ + name: 'TypeScriptWarning', + code: 'CODE', + message: 'message' +}) + +expectType(WarnInstance.code) +expectType(WarnInstance.message) +expectType(WarnInstance.name) +expectType(WarnInstance.emitted) +expectType(WarnInstance.unlimited) + +expectType(WarnInstance()) +expectType(WarnInstance('foo')) +expectType(WarnInstance('foo', 'bar')) + +const buildWarnUnlimited = createWarning({ + name: 'TypeScriptWarning', + code: 'CODE', + message: 'message', + unlimited: true +}) +expectType(buildWarnUnlimited.unlimited) + +const DeprecationInstance = createDeprecation({ + code: 'CODE', + message: 'message' +}) +expectType(DeprecationInstance.code) + +DeprecationInstance() +DeprecationInstance('foo') +DeprecationInstance('foo', 'bar') diff --git a/node_modules/pump/.github/FUNDING.yml b/node_modules/pump/.github/FUNDING.yml new file mode 100644 index 0000000..f6c9139 --- /dev/null +++ b/node_modules/pump/.github/FUNDING.yml @@ -0,0 +1,2 @@ +github: mafintosh +tidelift: "npm/pump" diff --git a/node_modules/pump/.travis.yml b/node_modules/pump/.travis.yml new file mode 100644 index 0000000..17f9433 --- /dev/null +++ b/node_modules/pump/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - "0.10" + +script: "npm test" diff --git a/node_modules/pump/LICENSE b/node_modules/pump/LICENSE new file mode 100644 index 0000000..757562e --- /dev/null +++ b/node_modules/pump/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/pump/README.md b/node_modules/pump/README.md new file mode 100644 index 0000000..5dcd8a5 --- /dev/null +++ b/node_modules/pump/README.md @@ -0,0 +1,74 @@ +# pump + +pump is a small node module that pipes streams together and destroys all of them if one of them closes. + +``` +npm install pump +``` + +[![build status](http://img.shields.io/travis/mafintosh/pump.svg?style=flat)](http://travis-ci.org/mafintosh/pump) + +## What problem does it solve? + +When using standard `source.pipe(dest)` source will _not_ be destroyed if dest emits close or an error. +You are also not able to provide a callback to tell when then pipe has finished. + +pump does these two things for you + +## Usage + +Simply pass the streams you want to pipe together to pump and add an optional callback + +``` js +var pump = require('pump') +var fs = require('fs') + +var source = fs.createReadStream('/dev/random') +var dest = fs.createWriteStream('/dev/null') + +pump(source, dest, function(err) { + console.log('pipe finished', err) +}) + +setTimeout(function() { + dest.destroy() // when dest is closed pump will destroy source +}, 1000) +``` + +You can use pump to pipe more than two streams together as well + +``` js +var transform = someTransformStream() + +pump(source, transform, anotherTransform, dest, function(err) { + console.log('pipe finished', err) +}) +``` + +If `source`, `transform`, `anotherTransform` or `dest` closes all of them will be destroyed. + +Similarly to `stream.pipe()`, `pump()` returns the last stream passed in, so you can do: + +``` +return pump(s1, s2) // returns s2 +``` + +Note that `pump` attaches error handlers to the streams to do internal error handling, so if `s2` emits an +error in the above scenario, it will not trigger a `proccess.on('uncaughtException')` if you do not listen for it. + +If you want to return a stream that combines *both* s1 and s2 to a single stream use +[pumpify](https://github.com/mafintosh/pumpify) instead. + +## License + +MIT + +## Related + +`pump` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. + +## For enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of pump and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-pump?utm_source=npm-pump&utm_medium=referral&utm_campaign=enterprise) diff --git a/node_modules/pump/SECURITY.md b/node_modules/pump/SECURITY.md new file mode 100644 index 0000000..da9c516 --- /dev/null +++ b/node_modules/pump/SECURITY.md @@ -0,0 +1,5 @@ +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. diff --git a/node_modules/pump/index.js b/node_modules/pump/index.js new file mode 100644 index 0000000..712c076 --- /dev/null +++ b/node_modules/pump/index.js @@ -0,0 +1,86 @@ +var once = require('once') +var eos = require('end-of-stream') +var fs + +try { + fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes +} catch (e) {} + +var noop = function () {} +var ancient = typeof process === 'undefined' ? false : /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' +} + +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} + +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} + +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) + + var closed = false + stream.on('close', function () { + closed = true + }) + + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) + + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true + + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) + } +} + +var call = function (fn) { + fn() +} + +var pipe = function (from, to) { + return from.pipe(to) +} + +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') + + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) + + return streams.reduce(pipe) +} + +module.exports = pump diff --git a/node_modules/pump/package.json b/node_modules/pump/package.json new file mode 100644 index 0000000..976555c --- /dev/null +++ b/node_modules/pump/package.json @@ -0,0 +1,24 @@ +{ + "name": "pump", + "version": "3.0.3", + "repository": "git://github.com/mafintosh/pump.git", + "license": "MIT", + "description": "pipe streams together and close all of them if one of them closes", + "browser": { + "fs": false + }, + "keywords": [ + "streams", + "pipe", + "destroy", + "callback" + ], + "author": "Mathias Buus Madsen ", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + }, + "scripts": { + "test": "node test-browser.js && node test-node.js" + } +} diff --git a/node_modules/pump/test-browser.js b/node_modules/pump/test-browser.js new file mode 100644 index 0000000..9a06c8a --- /dev/null +++ b/node_modules/pump/test-browser.js @@ -0,0 +1,66 @@ +var stream = require('stream') +var pump = require('./index') + +var rs = new stream.Readable() +var ws = new stream.Writable() + +rs._read = function (size) { + this.push(Buffer(size).fill('abc')) +} + +ws._write = function (chunk, encoding, cb) { + setTimeout(function () { + cb() + }, 100) +} + +var toHex = function () { + var reverse = new (require('stream').Transform)() + + reverse._transform = function (chunk, enc, callback) { + reverse.push(chunk.toString('hex')) + callback() + } + + return reverse +} + +var wsClosed = false +var rsClosed = false +var callbackCalled = false + +var check = function () { + if (wsClosed && rsClosed && callbackCalled) { + console.log('test-browser.js passes') + clearTimeout(timeout) + } +} + +ws.on('finish', function () { + wsClosed = true + check() +}) + +rs.on('end', function () { + rsClosed = true + check() +}) + +var res = pump(rs, toHex(), toHex(), toHex(), ws, function () { + callbackCalled = true + check() +}) + +if (res !== ws) { + throw new Error('should return last stream') +} + +setTimeout(function () { + rs.push(null) + rs.emit('close') +}, 1000) + +var timeout = setTimeout(function () { + check() + throw new Error('timeout') +}, 5000) diff --git a/node_modules/pump/test-node.js b/node_modules/pump/test-node.js new file mode 100644 index 0000000..561251a --- /dev/null +++ b/node_modules/pump/test-node.js @@ -0,0 +1,53 @@ +var pump = require('./index') + +var rs = require('fs').createReadStream('/dev/random') +var ws = require('fs').createWriteStream('/dev/null') + +var toHex = function () { + var reverse = new (require('stream').Transform)() + + reverse._transform = function (chunk, enc, callback) { + reverse.push(chunk.toString('hex')) + callback() + } + + return reverse +} + +var wsClosed = false +var rsClosed = false +var callbackCalled = false + +var check = function () { + if (wsClosed && rsClosed && callbackCalled) { + console.log('test-node.js passes') + clearTimeout(timeout) + } +} + +ws.on('close', function () { + wsClosed = true + check() +}) + +rs.on('close', function () { + rsClosed = true + check() +}) + +var res = pump(rs, toHex(), toHex(), toHex(), ws, function () { + callbackCalled = true + check() +}) + +if (res !== ws) { + throw new Error('should return last stream') +} + +setTimeout(function () { + rs.destroy() +}, 1000) + +var timeout = setTimeout(function () { + throw new Error('timeout') +}, 5000) diff --git a/node_modules/quick-format-unescaped/.github/workflows/ci.yml b/node_modules/quick-format-unescaped/.github/workflows/ci.yml new file mode 100644 index 0000000..7814959 --- /dev/null +++ b/node_modules/quick-format-unescaped/.github/workflows/ci.yml @@ -0,0 +1,21 @@ +name: CI Tests + +on: + - pull_request + +jobs: + build: + + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [10.x, 12.x, 13.x] + + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - run: npm i && npm test diff --git a/node_modules/quick-format-unescaped/LICENSE b/node_modules/quick-format-unescaped/LICENSE new file mode 100644 index 0000000..fc6d313 --- /dev/null +++ b/node_modules/quick-format-unescaped/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016-2019 David Mark Clements + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/quick-format-unescaped/benchmark.js b/node_modules/quick-format-unescaped/benchmark.js new file mode 100644 index 0000000..73e3b4b --- /dev/null +++ b/node_modules/quick-format-unescaped/benchmark.js @@ -0,0 +1,24 @@ +var bench = require('fastbench') +var utilFormat = require('util').format +var quickFormat = require('./') + +var run = bench([ + function util(cb) { + utilFormat('%s %j %d', 'a', {a: {x: 1}}, 1) + setImmediate(cb) + }, + function quick(cb) { + quickFormat('%s %j %d', 'a', [{a: {x: 1}}, 1], null) + setImmediate(cb) + }, + function utilWithTailObj(cb) { + utilFormat('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'}) + setImmediate(cb) + }, + function quickWithTailObj(cb) { + quickFormat('hello %s %j %d', 'world', [{obj: true}, 4, {another: 'obj'}], null) + setImmediate(cb) + } +], 100000) + +run(run) diff --git a/node_modules/quick-format-unescaped/index.js b/node_modules/quick-format-unescaped/index.js new file mode 100644 index 0000000..61768e7 --- /dev/null +++ b/node_modules/quick-format-unescaped/index.js @@ -0,0 +1,109 @@ +'use strict' +function tryStringify (o) { + try { return JSON.stringify(o) } catch(e) { return '"[Circular]"' } +} + +module.exports = format + +function format(f, args, opts) { + var ss = (opts && opts.stringify) || tryStringify + var offset = 1 + if (typeof f === 'object' && f !== null) { + var len = args.length + offset + if (len === 1) return f + var objects = new Array(len) + objects[0] = ss(f) + for (var index = 1; index < len; index++) { + objects[index] = ss(args[index]) + } + return objects.join(' ') + } + if (typeof f !== 'string') { + return f + } + var argLen = args.length + if (argLen === 0) return f + var str = '' + var a = 1 - offset + var lastPos = -1 + var flen = (f && f.length) || 0 + for (var i = 0; i < flen;) { + if (f.charCodeAt(i) === 37 && i + 1 < flen) { + lastPos = lastPos > -1 ? lastPos : 0 + switch (f.charCodeAt(i + 1)) { + case 100: // 'd' + case 102: // 'f' + if (a >= argLen) + break + if (args[a] == null) break + if (lastPos < i) + str += f.slice(lastPos, i) + str += Number(args[a]) + lastPos = i + 2 + i++ + break + case 105: // 'i' + if (a >= argLen) + break + if (args[a] == null) break + if (lastPos < i) + str += f.slice(lastPos, i) + str += Math.floor(Number(args[a])) + lastPos = i + 2 + i++ + break + case 79: // 'O' + case 111: // 'o' + case 106: // 'j' + if (a >= argLen) + break + if (args[a] === undefined) break + if (lastPos < i) + str += f.slice(lastPos, i) + var type = typeof args[a] + if (type === 'string') { + str += '\'' + args[a] + '\'' + lastPos = i + 2 + i++ + break + } + if (type === 'function') { + str += args[a].name || '' + lastPos = i + 2 + i++ + break + } + str += ss(args[a]) + lastPos = i + 2 + i++ + break + case 115: // 's' + if (a >= argLen) + break + if (lastPos < i) + str += f.slice(lastPos, i) + str += String(args[a]) + lastPos = i + 2 + i++ + break + case 37: // '%' + if (lastPos < i) + str += f.slice(lastPos, i) + str += '%' + lastPos = i + 2 + i++ + a-- + break + } + ++a + } + ++i + } + if (lastPos === -1) + return f + else if (lastPos < flen) { + str += f.slice(lastPos) + } + + return str +} diff --git a/node_modules/quick-format-unescaped/package.json b/node_modules/quick-format-unescaped/package.json new file mode 100644 index 0000000..ee60024 --- /dev/null +++ b/node_modules/quick-format-unescaped/package.json @@ -0,0 +1,29 @@ +{ + "name": "quick-format-unescaped", + "version": "4.0.4", + "description": "Solves a problem with util.format", + "main": "index.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "nyc -- node test", + "test:html": "nyc --reporter=html -- node test" + }, + "author": "David Mark Clements", + "devDependencies": { + "fastbench": "^1.0.1", + "nyc": "^15.0.0" + }, + "dependencies": {}, + "repository": { + "type": "git", + "url": "git+https://github.com/davidmarkclements/quick-format.git" + }, + "keywords": [], + "license": "MIT", + "bugs": { + "url": "https://github.com/davidmarkclements/quick-format/issues" + }, + "homepage": "https://github.com/davidmarkclements/quick-format#readme" +} diff --git a/node_modules/quick-format-unescaped/readme.md b/node_modules/quick-format-unescaped/readme.md new file mode 100644 index 0000000..653ddd1 --- /dev/null +++ b/node_modules/quick-format-unescaped/readme.md @@ -0,0 +1,66 @@ +# quick-format-unescaped + +## unescaped ? + +Sometimes you want to embed the results of quick-format into another string, +and then escape the whole string. + +## usage + +```js +var format = require('quick-format-unescaped') +format('hello %s %j %d', ['world', [{obj: true}, 4, {another: 'obj'}]]) +``` + +## format(fmt, parameters, [options]) + +### fmt + +A `printf`-like format string. Example: `'hello %s %j %d'` + +### parameters + +Array of values to be inserted into the `format` string. Example: `['world', {obj:true}]` + +### options.stringify + +Passing an options object as the third parameter with a `stringify` will mean +any objects will be passed to the supplied function instead of an the +internal `tryStringify` function. This can be useful when using augmented +capability serializers such as [`fast-safe-stringify`](http://github.com/davidmarkclements/fast-safe-stringify) or [`fast-redact`](http://github.com/davidmarkclements/fast-redact). + +## caveats + +By default `quick-format-unescaped` uses `JSON.stringify` instead of `util.inspect`, this means functions *will not be serialized*. + +## Benchmarks + +### Node 8.11.2 + +``` +util*100000: 350.325ms +quick*100000: 268.141ms +utilWithTailObj*100000: 586.387ms +quickWithTailObj*100000: 280.200ms +util*100000: 325.735ms +quick*100000: 270.251ms +utilWithTailObj*100000: 492.270ms +quickWithTailObj*100000: 261.797ms +``` + +### Node 10.4.0 + +``` +util*100000: 301.035ms +quick*100000: 217.005ms +utilWithTailObj*100000: 404.778ms +quickWithTailObj*100000: 236.176ms +util*100000: 286.349ms +quick*100000: 214.646ms +utilWithTailObj*100000: 388.574ms +quickWithTailObj*100000: 226.036ms +``` + +## Acknowledgements + +Sponsored by [nearForm](http://www.nearform.com) diff --git a/node_modules/quick-format-unescaped/test/index.js b/node_modules/quick-format-unescaped/test/index.js new file mode 100644 index 0000000..c661fd0 --- /dev/null +++ b/node_modules/quick-format-unescaped/test/index.js @@ -0,0 +1,136 @@ +'use strict'; +const assert = require('assert'); +const format = require('../'); + +// assert.equal(format([]), ''); +// assert.equal(format(['']), ''); +// assert.equal(format([[]]), '[]'); +// assert.equal(format([{}]), '{}'); +// assert.equal(format([null]), 'null'); +// assert.equal(format([true]), 'true'); +// assert.equal(format([false]), 'false'); +// assert.equal(format(['test']), 'test'); + +// // // CHECKME this is for console.log() compatibility - but is it *right*? +// assert.equal(format(['foo', 'bar', 'baz']), 'foo bar baz'); + +const emptyObj = {} +assert.equal(format(emptyObj, []), emptyObj) +assert.equal(format(emptyObj, ['a', 'b', 'c']), '{} "b" "c" ') +assert.equal(format('', ['a']), '') + +// ES6 Symbol handling +const symbol = Symbol('foo') +assert.equal(format(null, [symbol]), null); +assert.equal(format('foo', [symbol]), 'foo'); +assert.equal(format('%s', [symbol]), 'Symbol(foo)'); +assert.equal(format('%j', [symbol]), 'undefined'); +assert.throws(function() { + format('%d', [symbol]); +}, TypeError); + +assert.equal(format('%d', [42.0]), '42'); +assert.equal(format('%d', [42]), '42'); +assert.equal(format('%f', [42.99]), '42.99'); +assert.equal(format('%i', [42.99]), '42'); +assert.equal(format('%s', [42]), '42'); +assert.equal(format('%j', [42]), '42'); + +assert.equal(format('%d', [undefined]), '%d'); +assert.equal(format('%s', [undefined]), 'undefined'); +assert.equal(format('%j', [undefined]), '%j'); + + +assert.equal(format('%d', [null]), '%d'); +assert.equal(format('%i', [null]), '%i'); +assert.equal(format('%s', [null]), 'null'); +assert.equal(format('%j', [null]), 'null'); + + +assert.equal(format('%d', ['42.0']), '42'); +assert.equal(format('%d', ['42']), '42'); +assert.equal(format('%i', ['42']), '42'); +assert.equal(format('%i', ['42.99']), '42'); +assert.equal(format('%s %i', ['foo', 42.99]), 'foo 42'); +assert.equal(format('%d %d', ['42']), '42 %d'); +assert.equal(format('%i %i', ['42']), '42 %i'); +assert.equal(format('%i %i', ['42.99']), '42 %i'); +assert.equal(format('foo %d', ['42']), 'foo 42'); +assert.equal(format('%s', ['42']), '42'); +// assert.equal(format('%j', ['42']), '"42"'); + +// assert.equal(format('%%s%s', ['foo']), '%sfoo'); + +assert.equal(format('%s', []), '%s'); +assert.equal(format('%s', [undefined]), 'undefined'); +assert.equal(format('%s', ['foo']), 'foo'); +assert.equal(format('%s', ['\"quoted\"']), '\"quoted\"'); +assert.equal(format('%j', [{ s: '\"quoted\"' }]), '{\"s\":\"\\"quoted\\"\"}'); +assert.equal(format('%s:%s', []), '%s:%s'); +assert.equal(format('%s:%s', [undefined]), 'undefined:%s'); +assert.equal(format('%s:%s', ['foo']), 'foo:%s'); +assert.equal(format('%s:%s', ['foo', 'bar']), 'foo:bar'); +assert.equal(format('%s:%s', ['foo', 'bar', 'baz']), 'foo:bar'); +assert.equal(format('%s%s', []), '%s%s'); +assert.equal(format('%s%s', [undefined]), 'undefined%s'); +assert.equal(format('%s%s', ['foo']), 'foo%s'); +assert.equal(format('%s%s', ['foo', 'bar']), 'foobar'); +assert.equal(format('%s%s', ['foo', 'bar', 'baz']), 'foobar'); + +assert.equal(format('foo %s', ['foo']), 'foo foo') + +assert.equal(format('foo %o', [{foo: 'foo'}]), 'foo {"foo":"foo"}') +assert.equal(format('foo %O', [{foo: 'foo'}]), 'foo {"foo":"foo"}') +assert.equal(format('foo %j', [{foo: 'foo'}]), 'foo {"foo":"foo"}') +assert.equal(format('foo %j %j', [{foo: 'foo'}]), 'foo {"foo":"foo"} %j') +assert.equal(format('foo %j', ['foo']), 'foo \'foo\'') // TODO: isn't this wrong? +assert.equal(format('foo %j', [function foo () {}]), 'foo foo') +assert.equal(format('foo %j', [function () {}]), 'foo ') +assert.equal(format('foo %j', [{foo: 'foo'}, 'not-printed']), 'foo {"foo":"foo"}') +assert.equal( + format('foo %j', [{ foo: 'foo' }], { stringify () { return 'REPLACED' } }), + 'foo REPLACED' +) +const circularObject = {} +circularObject.foo = circularObject +assert.equal(format('foo %j', [circularObject]), 'foo "[Circular]"') + +// // assert.equal(format(['%%%s%%', 'hi']), '%hi%'); +// // assert.equal(format(['%%%s%%%%', 'hi']), '%hi%%'); + +// (function() { +// var o = {}; +// o.o = o; +// assert.equal(format(['%j', o]), '[Circular]'); +// })(); + +assert.equal(format('%%', ['foo']), '%') +assert.equal(format('foo %%', ['foo']), 'foo %') +assert.equal(format('foo %% %s', ['bar']), 'foo % bar') + +assert.equal(format('%s - %d', ['foo', undefined]), 'foo - %d') +assert.equal(format('%s - %f', ['foo', undefined]), 'foo - %f') +assert.equal(format('%s - %i', ['foo', undefined]), 'foo - %i') +assert.equal(format('%s - %O', ['foo', undefined]), 'foo - %O') +assert.equal(format('%s - %o', ['foo', undefined]), 'foo - %o') +assert.equal(format('%s - %j', ['foo', undefined]), 'foo - %j') +assert.equal(format('%s - %s', ['foo', undefined]), 'foo - undefined') +assert.equal(format('%s - %%', ['foo', undefined]), 'foo - %') + +assert.equal(format('%s - %d', ['foo', null]), 'foo - %d') +assert.equal(format('%s - %f', ['foo', null]), 'foo - %f') +assert.equal(format('%s - %i', ['foo', null]), 'foo - %i') +assert.equal(format('%s - %O', ['foo', null]), 'foo - null') +assert.equal(format('%s - %o', ['foo', null]), 'foo - null') +assert.equal(format('%s - %j', ['foo', null]), 'foo - null') +assert.equal(format('%s - %s', ['foo', null]), 'foo - null') +assert.equal(format('%s - %%', ['foo', null]), 'foo - %') + +assert.equal(format('%d%d', [11, 22]), '1122') +assert.equal(format('%d%s', [11, 22]), '1122') +assert.equal(format('%d%o', [11, { aa: 22 }]), '11{"aa":22}') +assert.equal(format('%d%d%d', [11, 22, 33]), '112233') +assert.equal(format('%d%d%s', [11, 22, 33]), '112233') +assert.equal(format('%d%o%d%s', [11, { aa: 22 }, 33, 'sss']), '11{"aa":22}33sss') +assert.equal(format('%d%%%d', [11, 22]), '11%22') +assert.equal(format('%d%%%s', [11, 22]), '11%22') diff --git a/node_modules/real-require/LICENSE.md b/node_modules/real-require/LICENSE.md new file mode 100644 index 0000000..34592a3 --- /dev/null +++ b/node_modules/real-require/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2021 Paolo Insogna and the real-require contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/real-require/README.md b/node_modules/real-require/README.md new file mode 100644 index 0000000..c34cb32 --- /dev/null +++ b/node_modules/real-require/README.md @@ -0,0 +1,51 @@ +# real-require + +[![Package Version](https://img.shields.io/npm/v/real-require.svg)](https://npm.im/real-require) +[![Dependency Status](https://img.shields.io/librariesio/release/npm/real-require)](https://libraries.io/npm/real-require) +[![Build](https://github.com/pinojs/real-require/workflows/CI/badge.svg)](https://github.com/pinojs/real-require/actions?query=workflow%3ACI) + +Keep require and import consistent after bundling or transpiling. + +## Installation + +Just run: + +```bash +npm install real-require +``` + +## Usage + +The package provides two drop-ins functions, `realRequire` and `realImport`, which can be used in scenarios where tools like transpilers or bundlers change the native `require` or `await import` calls. + +The current `realRequire` functions only handles webpack at the moment, wrapping the `__non_webpack__require__` implementation that webpack provides for the final bundle. + +### Example + +```js +// After bundling, real-require will be embedded in the bundle +const { realImport, realRequire } = require('real-require') + +/* + By using realRequire, at build time the module will not be embedded and at runtime it will try to load path from the local filesytem. + This is useful in situations where the build tool does not support skipping modules to embed. +*/ +const { join } = realRequire('path') + +async function main() { + // Similarly, this make sure the import call is not modified by the build tools + const localFunction = await realImport('./source.js') + + localFunction() +} + +main().catch(console.error) +``` + +## Contributing + +See [CONTRIBUTING.md](./CONTRIBUTING.md) + +## License + +Copyright Paolo Insogna and real-require contributors 2021. Licensed under the [MIT License](http://www.apache.org/licenses/MIT). diff --git a/node_modules/real-require/package.json b/node_modules/real-require/package.json new file mode 100644 index 0000000..c738ec4 --- /dev/null +++ b/node_modules/real-require/package.json @@ -0,0 +1,49 @@ +{ + "name": "real-require", + "version": "0.2.0", + "description": "Keep require and import consistent after bundling or transpiling", + "author": "Paolo Insogna ", + "homepage": "https://github.com/pinojs/real-require", + "contributors": [ + { + "name": "Paolo Insogna", + "url": "https://github.com/ShogunPanda" + } + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/pinojs/real-require.git" + }, + "bugs": { + "url": "https://github.com/pinojs/real-require/issues" + }, + "main": "src/index.js", + "files": [ + "src" + ], + "scripts": { + "format": "prettier -w src test", + "lint": "eslint src test", + "test": "c8 --reporter=text --reporter=html tap --reporter=spec --no-coverage test/*.test.js", + "test:watch": "tap --watch --reporter=spec --no-browser --coverage-report=text --coverage-report=html test/*.test.js", + "test:ci": "c8 --reporter=text --reporter=json --check-coverage --branches 90 --functions 90 --lines 90 --statements 90 tap --no-color --no-coverage test/*.test.js", + "ci": "npm run lint && npm run test:ci", + "prepublishOnly": "npm run ci", + "postpublish": "git push origin && git push origin -f --tags" + }, + "devDependencies": { + "eslint": "^7.12.0", + "eslint-config-standard": "^16.0.3", + "eslint-plugin-import": "^2.25.2", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.1", + "eslint-plugin-standard": "^5.0.0", + "c8": "^7.10.0", + "prettier": "^2.4.1", + "tap": "^16.0.0" + }, + "engines": { + "node": ">= 12.13.0" + } +} diff --git a/node_modules/real-require/src/index.js b/node_modules/real-require/src/index.js new file mode 100644 index 0000000..f182e1d --- /dev/null +++ b/node_modules/real-require/src/index.js @@ -0,0 +1,14 @@ +/* eslint-disable no-new-func, camelcase */ +/* globals __non_webpack__require__ */ + +const realImport = new Function('modulePath', 'return import(modulePath)') + +function realRequire(modulePath) { + if (typeof __non_webpack__require__ === 'function') { + return __non_webpack__require__(modulePath) + } + + return require(modulePath) +} + +module.exports = { realImport, realRequire } diff --git a/node_modules/safe-stable-stringify/LICENSE b/node_modules/safe-stable-stringify/LICENSE new file mode 100644 index 0000000..99c65e2 --- /dev/null +++ b/node_modules/safe-stable-stringify/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ruben Bridgewater + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/safe-stable-stringify/esm/package.json b/node_modules/safe-stable-stringify/esm/package.json new file mode 100644 index 0000000..4393479 --- /dev/null +++ b/node_modules/safe-stable-stringify/esm/package.json @@ -0,0 +1,4 @@ +{ + "type": "module", + "main": "wrapper.js" +} diff --git a/node_modules/safe-stable-stringify/esm/wrapper.d.ts b/node_modules/safe-stable-stringify/esm/wrapper.d.ts new file mode 100644 index 0000000..2351eb1 --- /dev/null +++ b/node_modules/safe-stable-stringify/esm/wrapper.d.ts @@ -0,0 +1,4 @@ +import { stringify } from '../index.js' + +export * from '../index.js' +export default stringify diff --git a/node_modules/safe-stable-stringify/esm/wrapper.js b/node_modules/safe-stable-stringify/esm/wrapper.js new file mode 100644 index 0000000..0cf01f0 --- /dev/null +++ b/node_modules/safe-stable-stringify/esm/wrapper.js @@ -0,0 +1,6 @@ +import cjsModule from '../index.js' + +export const configure = cjsModule.configure + +export const stringify = cjsModule +export default cjsModule diff --git a/node_modules/safe-stable-stringify/index.d.ts b/node_modules/safe-stable-stringify/index.d.ts new file mode 100644 index 0000000..400fca0 --- /dev/null +++ b/node_modules/safe-stable-stringify/index.d.ts @@ -0,0 +1,22 @@ +export type Replacer = (number | string)[] | null | undefined | ((key: string, value: unknown) => string | number | boolean | null | object) + +export function stringify(value: undefined | symbol | ((...args: unknown[]) => unknown), replacer?: Replacer, space?: string | number): undefined +export function stringify(value: string | number | unknown[] | null | boolean | object, replacer?: Replacer, space?: string | number): string +export function stringify(value: unknown, replacer?: ((key: string, value: unknown) => unknown) | (number | string)[] | null | undefined, space?: string | number): string | undefined + +export interface StringifyOptions { + bigint?: boolean, + circularValue?: string | null | TypeErrorConstructor | ErrorConstructor, + deterministic?: boolean | ((a: string, b: string) => number), + maximumBreadth?: number, + maximumDepth?: number, + strict?: boolean, +} + +export namespace stringify { + export function configure(options: StringifyOptions): typeof stringify +} + +export function configure(options: StringifyOptions): typeof stringify + +export default stringify diff --git a/node_modules/safe-stable-stringify/index.js b/node_modules/safe-stable-stringify/index.js new file mode 100644 index 0000000..9769e75 --- /dev/null +++ b/node_modules/safe-stable-stringify/index.js @@ -0,0 +1,625 @@ +'use strict' + +const { hasOwnProperty } = Object.prototype + +const stringify = configure() + +// @ts-expect-error +stringify.configure = configure +// @ts-expect-error +stringify.stringify = stringify + +// @ts-expect-error +stringify.default = stringify + +// @ts-expect-error used for named export +exports.stringify = stringify +// @ts-expect-error used for named export +exports.configure = configure + +module.exports = stringify + +// eslint-disable-next-line no-control-regex +const strEscapeSequencesRegExp = /[\u0000-\u001f\u0022\u005c\ud800-\udfff]/ + +// Escape C0 control characters, double quotes, the backslash and every code +// unit with a numeric value in the inclusive range 0xD800 to 0xDFFF. +function strEscape (str) { + // Some magic numbers that worked out fine while benchmarking with v8 8.0 + if (str.length < 5000 && !strEscapeSequencesRegExp.test(str)) { + return `"${str}"` + } + return JSON.stringify(str) +} + +function sort (array, comparator) { + // Insertion sort is very efficient for small input sizes, but it has a bad + // worst case complexity. Thus, use native array sort for bigger values. + if (array.length > 2e2 || comparator) { + return array.sort(comparator) + } + for (let i = 1; i < array.length; i++) { + const currentValue = array[i] + let position = i + while (position !== 0 && array[position - 1] > currentValue) { + array[position] = array[position - 1] + position-- + } + array[position] = currentValue + } + return array +} + +const typedArrayPrototypeGetSymbolToStringTag = + Object.getOwnPropertyDescriptor( + Object.getPrototypeOf( + Object.getPrototypeOf( + new Int8Array() + ) + ), + Symbol.toStringTag + ).get + +function isTypedArrayWithEntries (value) { + return typedArrayPrototypeGetSymbolToStringTag.call(value) !== undefined && value.length !== 0 +} + +function stringifyTypedArray (array, separator, maximumBreadth) { + if (array.length < maximumBreadth) { + maximumBreadth = array.length + } + const whitespace = separator === ',' ? '' : ' ' + let res = `"0":${whitespace}${array[0]}` + for (let i = 1; i < maximumBreadth; i++) { + res += `${separator}"${i}":${whitespace}${array[i]}` + } + return res +} + +function getCircularValueOption (options) { + if (hasOwnProperty.call(options, 'circularValue')) { + const circularValue = options.circularValue + if (typeof circularValue === 'string') { + return `"${circularValue}"` + } + if (circularValue == null) { + return circularValue + } + if (circularValue === Error || circularValue === TypeError) { + return { + toString () { + throw new TypeError('Converting circular structure to JSON') + } + } + } + throw new TypeError('The "circularValue" argument must be of type string or the value null or undefined') + } + return '"[Circular]"' +} + +function getDeterministicOption (options) { + let value + if (hasOwnProperty.call(options, 'deterministic')) { + value = options.deterministic + if (typeof value !== 'boolean' && typeof value !== 'function') { + throw new TypeError('The "deterministic" argument must be of type boolean or comparator function') + } + } + return value === undefined ? true : value +} + +function getBooleanOption (options, key) { + let value + if (hasOwnProperty.call(options, key)) { + value = options[key] + if (typeof value !== 'boolean') { + throw new TypeError(`The "${key}" argument must be of type boolean`) + } + } + return value === undefined ? true : value +} + +function getPositiveIntegerOption (options, key) { + let value + if (hasOwnProperty.call(options, key)) { + value = options[key] + if (typeof value !== 'number') { + throw new TypeError(`The "${key}" argument must be of type number`) + } + if (!Number.isInteger(value)) { + throw new TypeError(`The "${key}" argument must be an integer`) + } + if (value < 1) { + throw new RangeError(`The "${key}" argument must be >= 1`) + } + } + return value === undefined ? Infinity : value +} + +function getItemCount (number) { + if (number === 1) { + return '1 item' + } + return `${number} items` +} + +function getUniqueReplacerSet (replacerArray) { + const replacerSet = new Set() + for (const value of replacerArray) { + if (typeof value === 'string' || typeof value === 'number') { + replacerSet.add(String(value)) + } + } + return replacerSet +} + +function getStrictOption (options) { + if (hasOwnProperty.call(options, 'strict')) { + const value = options.strict + if (typeof value !== 'boolean') { + throw new TypeError('The "strict" argument must be of type boolean') + } + if (value) { + return (value) => { + let message = `Object can not safely be stringified. Received type ${typeof value}` + if (typeof value !== 'function') message += ` (${value.toString()})` + throw new Error(message) + } + } + } +} + +function configure (options) { + options = { ...options } + const fail = getStrictOption(options) + if (fail) { + if (options.bigint === undefined) { + options.bigint = false + } + if (!('circularValue' in options)) { + options.circularValue = Error + } + } + const circularValue = getCircularValueOption(options) + const bigint = getBooleanOption(options, 'bigint') + const deterministic = getDeterministicOption(options) + const comparator = typeof deterministic === 'function' ? deterministic : undefined + const maximumDepth = getPositiveIntegerOption(options, 'maximumDepth') + const maximumBreadth = getPositiveIntegerOption(options, 'maximumBreadth') + + function stringifyFnReplacer (key, parent, stack, replacer, spacer, indentation) { + let value = parent[key] + + if (typeof value === 'object' && value !== null && typeof value.toJSON === 'function') { + value = value.toJSON(key) + } + value = replacer.call(parent, key, value) + + switch (typeof value) { + case 'string': + return strEscape(value) + case 'object': { + if (value === null) { + return 'null' + } + if (stack.indexOf(value) !== -1) { + return circularValue + } + + let res = '' + let join = ',' + const originalIndentation = indentation + + if (Array.isArray(value)) { + if (value.length === 0) { + return '[]' + } + if (maximumDepth < stack.length + 1) { + return '"[Array]"' + } + stack.push(value) + if (spacer !== '') { + indentation += spacer + res += `\n${indentation}` + join = `,\n${indentation}` + } + const maximumValuesToStringify = Math.min(value.length, maximumBreadth) + let i = 0 + for (; i < maximumValuesToStringify - 1; i++) { + const tmp = stringifyFnReplacer(String(i), value, stack, replacer, spacer, indentation) + res += tmp !== undefined ? tmp : 'null' + res += join + } + const tmp = stringifyFnReplacer(String(i), value, stack, replacer, spacer, indentation) + res += tmp !== undefined ? tmp : 'null' + if (value.length - 1 > maximumBreadth) { + const removedKeys = value.length - maximumBreadth - 1 + res += `${join}"... ${getItemCount(removedKeys)} not stringified"` + } + if (spacer !== '') { + res += `\n${originalIndentation}` + } + stack.pop() + return `[${res}]` + } + + let keys = Object.keys(value) + const keyLength = keys.length + if (keyLength === 0) { + return '{}' + } + if (maximumDepth < stack.length + 1) { + return '"[Object]"' + } + let whitespace = '' + let separator = '' + if (spacer !== '') { + indentation += spacer + join = `,\n${indentation}` + whitespace = ' ' + } + const maximumPropertiesToStringify = Math.min(keyLength, maximumBreadth) + if (deterministic && !isTypedArrayWithEntries(value)) { + keys = sort(keys, comparator) + } + stack.push(value) + for (let i = 0; i < maximumPropertiesToStringify; i++) { + const key = keys[i] + const tmp = stringifyFnReplacer(key, value, stack, replacer, spacer, indentation) + if (tmp !== undefined) { + res += `${separator}${strEscape(key)}:${whitespace}${tmp}` + separator = join + } + } + if (keyLength > maximumBreadth) { + const removedKeys = keyLength - maximumBreadth + res += `${separator}"...":${whitespace}"${getItemCount(removedKeys)} not stringified"` + separator = join + } + if (spacer !== '' && separator.length > 1) { + res = `\n${indentation}${res}\n${originalIndentation}` + } + stack.pop() + return `{${res}}` + } + case 'number': + return isFinite(value) ? String(value) : fail ? fail(value) : 'null' + case 'boolean': + return value === true ? 'true' : 'false' + case 'undefined': + return undefined + case 'bigint': + if (bigint) { + return String(value) + } + // fallthrough + default: + return fail ? fail(value) : undefined + } + } + + function stringifyArrayReplacer (key, value, stack, replacer, spacer, indentation) { + if (typeof value === 'object' && value !== null && typeof value.toJSON === 'function') { + value = value.toJSON(key) + } + + switch (typeof value) { + case 'string': + return strEscape(value) + case 'object': { + if (value === null) { + return 'null' + } + if (stack.indexOf(value) !== -1) { + return circularValue + } + + const originalIndentation = indentation + let res = '' + let join = ',' + + if (Array.isArray(value)) { + if (value.length === 0) { + return '[]' + } + if (maximumDepth < stack.length + 1) { + return '"[Array]"' + } + stack.push(value) + if (spacer !== '') { + indentation += spacer + res += `\n${indentation}` + join = `,\n${indentation}` + } + const maximumValuesToStringify = Math.min(value.length, maximumBreadth) + let i = 0 + for (; i < maximumValuesToStringify - 1; i++) { + const tmp = stringifyArrayReplacer(String(i), value[i], stack, replacer, spacer, indentation) + res += tmp !== undefined ? tmp : 'null' + res += join + } + const tmp = stringifyArrayReplacer(String(i), value[i], stack, replacer, spacer, indentation) + res += tmp !== undefined ? tmp : 'null' + if (value.length - 1 > maximumBreadth) { + const removedKeys = value.length - maximumBreadth - 1 + res += `${join}"... ${getItemCount(removedKeys)} not stringified"` + } + if (spacer !== '') { + res += `\n${originalIndentation}` + } + stack.pop() + return `[${res}]` + } + stack.push(value) + let whitespace = '' + if (spacer !== '') { + indentation += spacer + join = `,\n${indentation}` + whitespace = ' ' + } + let separator = '' + for (const key of replacer) { + const tmp = stringifyArrayReplacer(key, value[key], stack, replacer, spacer, indentation) + if (tmp !== undefined) { + res += `${separator}${strEscape(key)}:${whitespace}${tmp}` + separator = join + } + } + if (spacer !== '' && separator.length > 1) { + res = `\n${indentation}${res}\n${originalIndentation}` + } + stack.pop() + return `{${res}}` + } + case 'number': + return isFinite(value) ? String(value) : fail ? fail(value) : 'null' + case 'boolean': + return value === true ? 'true' : 'false' + case 'undefined': + return undefined + case 'bigint': + if (bigint) { + return String(value) + } + // fallthrough + default: + return fail ? fail(value) : undefined + } + } + + function stringifyIndent (key, value, stack, spacer, indentation) { + switch (typeof value) { + case 'string': + return strEscape(value) + case 'object': { + if (value === null) { + return 'null' + } + if (typeof value.toJSON === 'function') { + value = value.toJSON(key) + // Prevent calling `toJSON` again. + if (typeof value !== 'object') { + return stringifyIndent(key, value, stack, spacer, indentation) + } + if (value === null) { + return 'null' + } + } + if (stack.indexOf(value) !== -1) { + return circularValue + } + const originalIndentation = indentation + + if (Array.isArray(value)) { + if (value.length === 0) { + return '[]' + } + if (maximumDepth < stack.length + 1) { + return '"[Array]"' + } + stack.push(value) + indentation += spacer + let res = `\n${indentation}` + const join = `,\n${indentation}` + const maximumValuesToStringify = Math.min(value.length, maximumBreadth) + let i = 0 + for (; i < maximumValuesToStringify - 1; i++) { + const tmp = stringifyIndent(String(i), value[i], stack, spacer, indentation) + res += tmp !== undefined ? tmp : 'null' + res += join + } + const tmp = stringifyIndent(String(i), value[i], stack, spacer, indentation) + res += tmp !== undefined ? tmp : 'null' + if (value.length - 1 > maximumBreadth) { + const removedKeys = value.length - maximumBreadth - 1 + res += `${join}"... ${getItemCount(removedKeys)} not stringified"` + } + res += `\n${originalIndentation}` + stack.pop() + return `[${res}]` + } + + let keys = Object.keys(value) + const keyLength = keys.length + if (keyLength === 0) { + return '{}' + } + if (maximumDepth < stack.length + 1) { + return '"[Object]"' + } + indentation += spacer + const join = `,\n${indentation}` + let res = '' + let separator = '' + let maximumPropertiesToStringify = Math.min(keyLength, maximumBreadth) + if (isTypedArrayWithEntries(value)) { + res += stringifyTypedArray(value, join, maximumBreadth) + keys = keys.slice(value.length) + maximumPropertiesToStringify -= value.length + separator = join + } + if (deterministic) { + keys = sort(keys, comparator) + } + stack.push(value) + for (let i = 0; i < maximumPropertiesToStringify; i++) { + const key = keys[i] + const tmp = stringifyIndent(key, value[key], stack, spacer, indentation) + if (tmp !== undefined) { + res += `${separator}${strEscape(key)}: ${tmp}` + separator = join + } + } + if (keyLength > maximumBreadth) { + const removedKeys = keyLength - maximumBreadth + res += `${separator}"...": "${getItemCount(removedKeys)} not stringified"` + separator = join + } + if (separator !== '') { + res = `\n${indentation}${res}\n${originalIndentation}` + } + stack.pop() + return `{${res}}` + } + case 'number': + return isFinite(value) ? String(value) : fail ? fail(value) : 'null' + case 'boolean': + return value === true ? 'true' : 'false' + case 'undefined': + return undefined + case 'bigint': + if (bigint) { + return String(value) + } + // fallthrough + default: + return fail ? fail(value) : undefined + } + } + + function stringifySimple (key, value, stack) { + switch (typeof value) { + case 'string': + return strEscape(value) + case 'object': { + if (value === null) { + return 'null' + } + if (typeof value.toJSON === 'function') { + value = value.toJSON(key) + // Prevent calling `toJSON` again + if (typeof value !== 'object') { + return stringifySimple(key, value, stack) + } + if (value === null) { + return 'null' + } + } + if (stack.indexOf(value) !== -1) { + return circularValue + } + + let res = '' + + const hasLength = value.length !== undefined + if (hasLength && Array.isArray(value)) { + if (value.length === 0) { + return '[]' + } + if (maximumDepth < stack.length + 1) { + return '"[Array]"' + } + stack.push(value) + const maximumValuesToStringify = Math.min(value.length, maximumBreadth) + let i = 0 + for (; i < maximumValuesToStringify - 1; i++) { + const tmp = stringifySimple(String(i), value[i], stack) + res += tmp !== undefined ? tmp : 'null' + res += ',' + } + const tmp = stringifySimple(String(i), value[i], stack) + res += tmp !== undefined ? tmp : 'null' + if (value.length - 1 > maximumBreadth) { + const removedKeys = value.length - maximumBreadth - 1 + res += `,"... ${getItemCount(removedKeys)} not stringified"` + } + stack.pop() + return `[${res}]` + } + + let keys = Object.keys(value) + const keyLength = keys.length + if (keyLength === 0) { + return '{}' + } + if (maximumDepth < stack.length + 1) { + return '"[Object]"' + } + let separator = '' + let maximumPropertiesToStringify = Math.min(keyLength, maximumBreadth) + if (hasLength && isTypedArrayWithEntries(value)) { + res += stringifyTypedArray(value, ',', maximumBreadth) + keys = keys.slice(value.length) + maximumPropertiesToStringify -= value.length + separator = ',' + } + if (deterministic) { + keys = sort(keys, comparator) + } + stack.push(value) + for (let i = 0; i < maximumPropertiesToStringify; i++) { + const key = keys[i] + const tmp = stringifySimple(key, value[key], stack) + if (tmp !== undefined) { + res += `${separator}${strEscape(key)}:${tmp}` + separator = ',' + } + } + if (keyLength > maximumBreadth) { + const removedKeys = keyLength - maximumBreadth + res += `${separator}"...":"${getItemCount(removedKeys)} not stringified"` + } + stack.pop() + return `{${res}}` + } + case 'number': + return isFinite(value) ? String(value) : fail ? fail(value) : 'null' + case 'boolean': + return value === true ? 'true' : 'false' + case 'undefined': + return undefined + case 'bigint': + if (bigint) { + return String(value) + } + // fallthrough + default: + return fail ? fail(value) : undefined + } + } + + function stringify (value, replacer, space) { + if (arguments.length > 1) { + let spacer = '' + if (typeof space === 'number') { + spacer = ' '.repeat(Math.min(space, 10)) + } else if (typeof space === 'string') { + spacer = space.slice(0, 10) + } + if (replacer != null) { + if (typeof replacer === 'function') { + return stringifyFnReplacer('', { '': value }, [], replacer, spacer, '') + } + if (Array.isArray(replacer)) { + return stringifyArrayReplacer('', value, [], getUniqueReplacerSet(replacer), spacer, '') + } + } + if (spacer.length !== 0) { + return stringifyIndent('', value, [], spacer, '') + } + } + return stringifySimple('', value, []) + } + + return stringify +} diff --git a/node_modules/safe-stable-stringify/package.json b/node_modules/safe-stable-stringify/package.json new file mode 100644 index 0000000..59cf825 --- /dev/null +++ b/node_modules/safe-stable-stringify/package.json @@ -0,0 +1,65 @@ +{ + "name": "safe-stable-stringify", + "version": "2.5.0", + "description": "Deterministic and safely JSON.stringify to quickly serialize JavaScript objects", + "exports": { + "require": "./index.js", + "import": "./esm/wrapper.js" + }, + "keywords": [ + "stable", + "stringify", + "JSON", + "JSON.stringify", + "safe", + "serialize", + "deterministic", + "circular", + "object", + "predicable", + "repeatable", + "fast", + "bigint" + ], + "main": "index.js", + "scripts": { + "test": "standard && tap test.js", + "tap": "tap test.js", + "tap:only": "tap test.js --watch --only", + "benchmark": "node benchmark.js", + "compare": "node compare.js", + "lint": "standard --fix", + "tsc": "tsc --project tsconfig.json" + }, + "engines": { + "node": ">=10" + }, + "author": "Ruben Bridgewater", + "license": "MIT", + "typings": "index.d.ts", + "devDependencies": { + "@types/json-stable-stringify": "^1.0.34", + "@types/node": "^18.11.18", + "benchmark": "^2.1.4", + "clone": "^2.1.2", + "fast-json-stable-stringify": "^2.1.0", + "fast-safe-stringify": "^2.1.1", + "fast-stable-stringify": "^1.0.0", + "faster-stable-stringify": "^1.0.0", + "fastest-stable-stringify": "^2.0.2", + "json-stable-stringify": "^1.0.1", + "json-stringify-deterministic": "^1.0.7", + "json-stringify-safe": "^5.0.1", + "standard": "^16.0.4", + "tap": "^15.0.9", + "typescript": "^4.8.3" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/BridgeAR/safe-stable-stringify.git" + }, + "bugs": { + "url": "https://github.com/BridgeAR/safe-stable-stringify/issues" + }, + "homepage": "https://github.com/BridgeAR/safe-stable-stringify#readme" +} diff --git a/node_modules/safe-stable-stringify/readme.md b/node_modules/safe-stable-stringify/readme.md new file mode 100644 index 0000000..9baa477 --- /dev/null +++ b/node_modules/safe-stable-stringify/readme.md @@ -0,0 +1,179 @@ +# safe-stable-stringify + +Safe, deterministic and fast serialization alternative to [JSON.stringify][]. +Zero dependencies. ESM and CJS. 100% coverage. + +Gracefully handles circular structures and bigint instead of throwing. + +Optional custom circular values, deterministic behavior or strict JSON +compatibility check. + +## stringify(value[, replacer[, space]]) + +The same as [JSON.stringify][]. + +* `value` {any} +* `replacer` {string[]|function|null} +* `space` {number|string} +* Returns: {string} + +```js +const stringify = require('safe-stable-stringify') + +const bigint = { a: 0, c: 2n, b: 1 } + +stringify(bigint) +// '{"a":0,"b":1,"c":2}' +JSON.stringify(bigint) +// TypeError: Do not know how to serialize a BigInt + +const circular = { b: 1, a: 0 } +circular.circular = circular + +stringify(circular) +// '{"a":0,"b":1,"circular":"[Circular]"}' +JSON.stringify(circular) +// TypeError: Converting circular structure to JSON + +stringify(circular, ['a', 'b'], 2) +// { +// "a": 0, +// "b": 1 +// } +``` + +## stringify.configure(options) + +* `bigint` {boolean} If `true`, bigint values are converted to a number. Otherwise + they are ignored. **Default:** `true`. +* `circularValue` {string|null|undefined|ErrorConstructor} Defines the value for + circular references. Set to `undefined`, circular properties are not + serialized (array entries are replaced with `null`). Set to `Error`, to throw + on circular references. **Default:** `'[Circular]'`. +* `deterministic` {boolean|function} If `true` or a `Array#sort(comparator)` + comparator method, guarantee a deterministic key order instead of relying on + the insertion order. **Default:** `true`. +* `maximumBreadth` {number} Maximum number of entries to serialize per object + (at least one). The serialized output contains information about how many + entries have not been serialized. Ignored properties are counted as well + (e.g., properties with symbol values). Using the array replacer overrules this + option. **Default:** `Infinity` +* `maximumDepth` {number} Maximum number of object nesting levels (at least 1) + that will be serialized. Objects at the maximum level are serialized as + `'[Object]'` and arrays as `'[Array]'`. **Default:** `Infinity` +* `strict` {boolean} Instead of handling any JSON value gracefully, throw an + error in case it may not be represented as JSON (functions, NaN, ...). + Circular values and bigint values throw as well in case either option is not + explicitly defined. Sets and Maps are not detected as well as Symbol keys! + **Default:** `false` +* Returns: {function} A stringify function with the options applied. + +```js +import { configure } from 'safe-stable-stringify' + +const stringify = configure({ + bigint: true, + circularValue: 'Magic circle!', + deterministic: false, + maximumDepth: 1, + maximumBreadth: 4 +}) + +const circular = { + bigint: 999_999_999_999_999_999n, + typed: new Uint8Array(3), + deterministic: "I don't think so", +} +circular.circular = circular +circular.ignored = true +circular.alsoIgnored = 'Yes!' + +const stringified = stringify(circular, null, 4) + +console.log(stringified) +// { +// "bigint": 999999999999999999, +// "typed": "[Object]", +// "deterministic": "I don't think so", +// "circular": "Magic circle!", +// "...": "2 items not stringified" +// } + +const throwOnCircular = configure({ + circularValue: Error +}) + +throwOnCircular(circular); +// TypeError: Converting circular structure to JSON +``` + +## Differences to JSON.stringify + +1. _Circular values_ are replaced with the string `[Circular]` (configurable). +1. _Object keys_ are sorted instead of using the insertion order (configurable). +1. _BigInt_ values are stringified as regular number instead of throwing a + TypeError (configurable). +1. _Boxed primitives_ (e.g., `Number(5)`) are not unboxed and are handled as + regular object. + +Those are the only differences to `JSON.stringify()`. This is a side effect free +variant and [`toJSON`][], [`replacer`][] and the [`spacer`][] work the same as +with `JSON.stringify()`. + +## Performance / Benchmarks + +Currently this is by far the fastest known stable (deterministic) stringify +implementation. This is especially important for big objects and TypedArrays. + +(Dell Precision 5540, i7-9850H CPU @ 2.60GHz, Node.js 16.11.1) + +```md +simple: simple object x 3,463,894 ops/sec ±0.44% (98 runs sampled) +simple: circular x 1,236,007 ops/sec ±0.46% (99 runs sampled) +simple: deep x 18,942 ops/sec ±0.41% (93 runs sampled) +simple: deep circular x 18,690 ops/sec ±0.72% (96 runs sampled) + +replacer: simple object x 2,664,940 ops/sec ±0.31% (98 runs sampled) +replacer: circular x 1,015,981 ops/sec ±0.09% (99 runs sampled) +replacer: deep x 17,328 ops/sec ±0.38% (97 runs sampled) +replacer: deep circular x 17,071 ops/sec ±0.21% (98 runs sampled) + +array: simple object x 3,869,608 ops/sec ±0.22% (98 runs sampled) +array: circular x 3,853,943 ops/sec ±0.45% (96 runs sampled) +array: deep x 3,563,227 ops/sec ±0.20% (100 runs sampled) +array: deep circular x 3,286,475 ops/sec ±0.07% (100 runs sampled) + +indentation: simple object x 2,183,162 ops/sec ±0.66% (97 runs sampled) +indentation: circular x 872,538 ops/sec ±0.57% (98 runs sampled) +indentation: deep x 16,795 ops/sec ±0.48% (93 runs sampled) +indentation: deep circular x 16,443 ops/sec ±0.40% (97 runs sampled) +``` + +Comparing `safe-stable-stringify` with known alternatives: + +```md +fast-json-stable-stringify x 18,765 ops/sec ±0.71% (94 runs sampled) +json-stable-stringify x 13,870 ops/sec ±0.72% (94 runs sampled) +fast-stable-stringify x 21,343 ops/sec ±0.33% (95 runs sampled) +faster-stable-stringify x 17,707 ops/sec ±0.44% (97 runs sampled) +json-stringify-deterministic x 11,208 ops/sec ±0.57% (98 runs sampled) +fast-safe-stringify x 21,460 ops/sec ±0.75% (99 runs sampled) +this x 30,367 ops/sec ±0.39% (96 runs sampled) + +The fastest is this +``` + +The `fast-safe-stringify` comparison uses the modules stable implementation. + +## Acknowledgements + +Sponsored by [MaibornWolff](https://www.maibornwolff.de/) and [nearForm](http://nearform.com) + +## License + +MIT + +[`replacer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The%20replacer%20parameter +[`spacer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The%20space%20argument +[`toJSON`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON()_behavior +[JSON.stringify]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify diff --git a/node_modules/secure-json-parse/.airtap.yml b/node_modules/secure-json-parse/.airtap.yml new file mode 100644 index 0000000..f5fc1fe --- /dev/null +++ b/node_modules/secure-json-parse/.airtap.yml @@ -0,0 +1,7 @@ +providers: + - airtap-playwright + +browsers: + - name: chromium + supports: + headless: true diff --git a/node_modules/secure-json-parse/.gitattributes b/node_modules/secure-json-parse/.gitattributes new file mode 100644 index 0000000..a0e7df9 --- /dev/null +++ b/node_modules/secure-json-parse/.gitattributes @@ -0,0 +1,2 @@ +# Set default behavior to automatically convert line endings +* text=auto eol=lf diff --git a/node_modules/secure-json-parse/.github/dependabot.yml b/node_modules/secure-json-parse/.github/dependabot.yml new file mode 100644 index 0000000..35d66ca --- /dev/null +++ b/node_modules/secure-json-parse/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 diff --git a/node_modules/secure-json-parse/.github/stale.yml b/node_modules/secure-json-parse/.github/stale.yml new file mode 100644 index 0000000..d51ce63 --- /dev/null +++ b/node_modules/secure-json-parse/.github/stale.yml @@ -0,0 +1,21 @@ +# Number of days of inactivity before an issue becomes stale +daysUntilStale: 15 +# Number of days of inactivity before a stale issue is closed +daysUntilClose: 7 +# Issues with these labels will never be considered stale +exemptLabels: + - "discussion" + - "feature request" + - "bug" + - "help wanted" + - "plugin suggestion" + - "good first issue" +# Label to use when marking an issue as stale +staleLabel: stale +# Comment to post when marking an issue as stale. Set to `false` to disable +markComment: > + This issue has been automatically marked as stale because it has not had + recent activity. It will be closed if no further activity occurs. Thank you + for your contributions. +# Comment to post when closing a stale issue. Set to `false` to disable +closeComment: false diff --git a/node_modules/secure-json-parse/.github/workflows/ci.yml b/node_modules/secure-json-parse/.github/workflows/ci.yml new file mode 100644 index 0000000..92ca21c --- /dev/null +++ b/node_modules/secure-json-parse/.github/workflows/ci.yml @@ -0,0 +1,149 @@ +name: CI + +on: + push: + branches: + - main + - next + - 'v*' + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +permissions: + contents: read + +jobs: + dependency-review: + name: Dependency Review + if: github.event_name == 'pull_request' + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v5 + with: + persist-credentials: false + + - name: Dependency review + uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0 + + lint: + name: Lint Code + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v5 + with: + persist-credentials: false + + - name: Setup Node + uses: actions/setup-node@v5 + with: + check-latest: true + node-version: lts/* + + - name: Install dependencies + run: npm i --ignore-scripts + + - name: Lint code + run: npm run lint + + browsers: + name: Test Browsers + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v5 + with: + persist-credentials: false + + - name: Setup Node + uses: actions/setup-node@v5 + with: + check-latest: true + node-version: lts/* + + - name: Install dependencies + run: npm i + + - name: Install Playwright + run: npx playwright install + + - name: Run tests + run: npm run test:browser + + test: + name: Test + runs-on: ubuntu-latest + permissions: + contents: read + strategy: + matrix: + node-version: [20, 22, 24] + steps: + - name: Check out repo + uses: actions/checkout@v5 + with: + persist-credentials: false + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v5 + with: + check-latest: true + node-version: ${{ matrix.node-version }} + + - name: Install dependencies + run: npm i --ignore-scripts + + - name: Run tests + run: npm run test:unit + + typescript: + name: Test TypeScript + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v5 + with: + persist-credentials: false + + - name: Setup Node + uses: actions/setup-node@v5 + with: + check-latest: true + node-version: lts/* + + - name: Install dependencies + run: npm i --ignore-scripts + + - name: tsd + run: npm run test:typescript + + automerge: + name: Automerge Dependabot PRs + if: > + github.event_name == 'pull_request' && + github.event.pull_request.head.repo.full_name == github.repository && + github.event.pull_request.user.login == 'dependabot[bot]' + needs: [browsers, lint, test, typescript] + permissions: + pull-requests: write + contents: write + runs-on: ubuntu-latest + steps: + - uses: fastify/github-action-merge-dependabot@e820d631adb1d8ab16c3b93e5afe713450884a4a # v3.11.1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + target: major diff --git a/node_modules/secure-json-parse/LICENSE b/node_modules/secure-json-parse/LICENSE new file mode 100644 index 0000000..95f28ce --- /dev/null +++ b/node_modules/secure-json-parse/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2019, Sideway Inc, and project contributors +Copyright (c) 2019-present The Fastify team +All rights reserved. + +The Fastify team members are listed at https://github.com/fastify/fastify#team. + +The complete list of contributors can be found at: +- https://github.com/hapijs/bourne/graphs/contributors +- https://github.com/fastify/secure-json-parse/graphs/contributors + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/secure-json-parse/README.md b/node_modules/secure-json-parse/README.md new file mode 100644 index 0000000..342bbcd --- /dev/null +++ b/node_modules/secure-json-parse/README.md @@ -0,0 +1,132 @@ +# secure-json-parse + +[![CI](https://github.com/fastify/secure-json-parse/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/fastify/secure-json-parse/actions/workflows/ci.yml) +[![NPM version](https://img.shields.io/npm/v/secure-json-parse.svg?style=flat)](https://www.npmjs.com/package/secure-json-parse) +[![neostandard javascript style](https://img.shields.io/badge/code_style-neostandard-brightgreen?style=flat)](https://github.com/neostandard/neostandard) + +`JSON.parse()` drop-in replacement with prototype poisoning protection. + +## Introduction + +Consider this: + +```js +> const a = '{"__proto__":{ "b":5}}'; +'{"__proto__":{ "b":5}}' + +> const b = JSON.parse(a); +{ __proto__: { b: 5 } } + +> b.b; +undefined + +> const c = Object.assign({}, b); +{} + +> c.b +5 +``` + +The problem is that `JSON.parse()` retains the `__proto__` property as a plain object key. By +itself, this is not a security issue. However, as soon as that object is assigned to another or +iterated on and values copied, the `__proto__` property leaks and becomes the object's prototype. + +## Install +``` +npm i secure-json-parse +``` + +## Usage + +Pass the option object as a second (or third) parameter for configuring the action to take in case of a bad JSON, if nothing is configured, the default is to throw a `SyntaxError`.
+You can choose which action to perform in case `__proto__` is present, and in case `constructor.prototype` is present. + +```js +const sjson = require('secure-json-parse') + +const goodJson = '{ "a": 5, "b": 6 }' +const badJson = '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "constructor": {"prototype": {"bar": "baz"} } }' + +console.log(JSON.parse(goodJson), sjson.parse(goodJson, undefined, { protoAction: 'remove', constructorAction: 'remove' })) +console.log(JSON.parse(badJson), sjson.parse(badJson, undefined, { protoAction: 'remove', constructorAction: 'remove' })) +``` + +## API + +### `sjson.parse(text, [reviver], [options])` + +Parses a given JSON-formatted text into an object where: +- `text` - the JSON text string. +- `reviver` - the `JSON.parse()` optional `reviver` argument. +- `options` - optional configuration object where: + - `protoAction` - optional string with one of: + - `'error'` - throw a `SyntaxError` when a `__proto__` key is found. This is the default value. + - `'remove'` - deletes any `__proto__` keys from the result object. + - `'ignore'` - skips all validation (same as calling `JSON.parse()` directly). + - `constructorAction` - optional string with one of: + - `'error'` - throw a `SyntaxError` when a `constructor.prototype` key is found. This is the default value. + - `'remove'` - deletes any `constructor` keys from the result object. + - `'ignore'` - skips all validation (same as calling `JSON.parse()` directly). + - `safe` - optional boolean: + - `true` - returns `null` instead of throwing when a forbidden prototype property is found. + - `false` - default behavior (throws or removes based on `protoAction`/`constructorAction`). + +### `sjson.scan(obj, [options])` + +Scans a given object for prototype properties where: +- `obj` - the object being scanned. +- `options` - optional configuration object where: + - `protoAction` - optional string with one of: + - `'error'` - throw a `SyntaxError` when a `__proto__` key is found. This is the default value. + - `'remove'` - deletes any `__proto__` keys from the input `obj`. + - `constructorAction` - optional string with one of: + - `'error'` - throw a `SyntaxError` when a `constructor.prototype` key is found. This is the default value. + - `'remove'` - deletes any `constructor` keys from the input `obj`. + - `safe` - optional boolean: + - `true` - returns `null` instead of throwing when a forbidden prototype property is found. + - `false` - default behavior (throws or removes based on `protoAction`/`constructorAction`). + +## Benchmarks + +Machine: 2,7 GHz Quad-Core Intel Core i7 + +``` +v14.8.0 + +> node ignore.js + +JSON.parse x 679,376 ops/sec ±1.15% (84 runs sampled) +secure-json-parse x 649,605 ops/sec ±0.58% (87 runs sampled) +reviver x 244,414 ops/sec ±1.05% (88 runs sampled) +Fastest is JSON.parse + +> node no__proto__.js + +JSON.parse x 652,190 ops/sec ±0.67% (86 runs sampled) +secure-json-parse x 589,785 ops/sec ±1.01% (88 runs sampled) +reviver x 218,075 ops/sec ±1.58% (87 runs sampled) +Fastest is JSON.parse + +> node remove.js + +JSON.parse x 683,527 ops/sec ±0.62% (88 runs sampled) +secure-json-parse x 316,926 ops/sec ±0.63% (87 runs sampled) +reviver x 214,167 ops/sec ±0.63% (86 runs sampled) +Fastest is JSON.parse + +> node throw.js + +JSON.parse x 682,548 ops/sec ±0.60% (88 runs sampled) +JSON.parse error x 170,716 ops/sec ±0.93% (87 runs sampled) +secure-json-parse x 104,483 ops/sec ±0.62% (87 runs sampled) +reviver x 114,197 ops/sec ±0.63% (87 runs sampled) +Fastest is JSON.parse +``` + +## Acknowledgments +This project has been forked from [hapijs/bourne](https://github.com/hapijs/bourne). +All credit before commit [4690682](https://github.com/hapijs/bourne/commit/4690682c6cdaa06590da7b2485d5df91c09da889) goes to the hapijs/bourne project contributors. +After, the project will be maintained by the Fastify team. + +## License +Licensed under [BSD-3-Clause](./LICENSE). diff --git a/node_modules/secure-json-parse/benchmarks/ignore.js b/node_modules/secure-json-parse/benchmarks/ignore.js new file mode 100644 index 0000000..c07a1f8 --- /dev/null +++ b/node_modules/secure-json-parse/benchmarks/ignore.js @@ -0,0 +1,35 @@ +'use strict' + +const Benchmark = require('benchmark') +const sjson = require('..') + +const internals = { + text: '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' +} + +const suite = new Benchmark.Suite() + +suite + .add('JSON.parse', () => { + JSON.parse(internals.text) + }) + .add('secure-json-parse parse', () => { + sjson.parse(internals.text, { protoAction: 'ignore' }) + }) + .add('secure-json-parse safeParse', () => { + sjson.safeParse(internals.text) + }) + .add('reviver', () => { + JSON.parse(internals.text, internals.reviver) + }) + .on('cycle', (event) => { + console.log(String(event.target)) + }) + .on('complete', function () { + console.log('Fastest is ' + this.filter('fastest').map('name')) + }) + .run({ async: true }) + +internals.reviver = function (_key, value) { + return value +} diff --git a/node_modules/secure-json-parse/benchmarks/no__proto__.js b/node_modules/secure-json-parse/benchmarks/no__proto__.js new file mode 100644 index 0000000..f2724fe --- /dev/null +++ b/node_modules/secure-json-parse/benchmarks/no__proto__.js @@ -0,0 +1,40 @@ +'use strict' + +const Benchmark = require('benchmark') +const sjson = require('..') + +const internals = { + text: '{ "a": 5, "b": 6, "proto": { "x": 7 }, "c": { "d": 0, "e": "text", "\\u005f\\u005fproto": { "y": 8 }, "f": { "g": 2 } } }', + suspectRx: /"(?:_|\\u005f)(?:_|\\u005f)(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006f)(?:t|\\u0074)(?:o|\\u006f)(?:_|\\u005f)(?:_|\\u005f)"/ +} + +const suite = new Benchmark.Suite() + +suite + .add('JSON.parse', () => { + JSON.parse(internals.text) + }) + .add('secure-json-parse parse', () => { + sjson.parse(internals.text) + }) + .add('secure-json-parse safeParse', () => { + sjson.safeParse(internals.text) + }) + .add('reviver', () => { + JSON.parse(internals.text, internals.reviver) + }) + .on('cycle', (event) => { + console.log(String(event.target)) + }) + .on('complete', function () { + console.log('Fastest is ' + this.filter('fastest').map('name')) + }) + .run({ async: true }) + +internals.reviver = function (key, value) { + if (key.match(internals.suspectRx)) { + return undefined + } + + return value +} diff --git a/node_modules/secure-json-parse/benchmarks/package.json b/node_modules/secure-json-parse/benchmarks/package.json new file mode 100644 index 0000000..b12151b --- /dev/null +++ b/node_modules/secure-json-parse/benchmarks/package.json @@ -0,0 +1,15 @@ +{ + "name": "benchmarks", + "version": "1.0.0", + "scripts": { + "valid": "node valid.js", + "ignore": "node ignore.js", + "no_proto": "node no__proto__.js", + "remove": "node remove.js", + "throw": "node throw.js", + "all": "node --version && npm run valid && npm run ignore && npm run no_proto && npm run remove && npm run throw" + }, + "dependencies": { + "benchmark": "^2.1.4" + } +} diff --git a/node_modules/secure-json-parse/benchmarks/remove.js b/node_modules/secure-json-parse/benchmarks/remove.js new file mode 100644 index 0000000..af900db --- /dev/null +++ b/node_modules/secure-json-parse/benchmarks/remove.js @@ -0,0 +1,39 @@ +'use strict' + +const Benchmark = require('benchmark') +const sjson = require('..') + +const internals = { + text: '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' +} + +const suite = new Benchmark.Suite() + +suite + .add('JSON.parse', () => { + JSON.parse(internals.text) + }) + .add('secure-json-parse parse', () => { + sjson.parse(internals.text, { protoAction: 'remove' }) + }) + .add('secure-json-parse safeParse', () => { + sjson.safeParse(internals.text) + }) + .add('reviver', () => { + JSON.parse(internals.text, internals.reviver) + }) + .on('cycle', (event) => { + console.log(String(event.target)) + }) + .on('complete', function () { + console.log('Fastest is ' + this.filter('fastest').map('name')) + }) + .run({ async: true }) + +internals.reviver = function (key, value) { + if (key === '__proto__') { + return undefined + } + + return value +} diff --git a/node_modules/secure-json-parse/benchmarks/throw.js b/node_modules/secure-json-parse/benchmarks/throw.js new file mode 100644 index 0000000..14f47d1 --- /dev/null +++ b/node_modules/secure-json-parse/benchmarks/throw.js @@ -0,0 +1,49 @@ +'use strict' + +const Benchmark = require('benchmark') +const sjson = require('..') + +const internals = { + text: '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }', + invalid: '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } } }' +} + +const suite = new Benchmark.Suite() + +suite + .add('JSON.parse valid', () => { + JSON.parse(internals.text) + }) + .add('JSON.parse error', () => { + try { + JSON.parse(internals.invalid) + } catch { } + }) + .add('secure-json-parse parse', () => { + try { + sjson.parse(internals.invalid) + } catch { } + }) + .add('secure-json-parse safeParse', () => { + sjson.safeParse(internals.invalid) + }) + .add('reviver', () => { + try { + JSON.parse(internals.invalid, internals.reviver) + } catch { } + }) + .on('cycle', (event) => { + console.log(String(event.target)) + }) + .on('complete', function () { + console.log('Fastest is ' + this.filter('fastest').map('name')) + }) + .run({ async: true }) + +internals.reviver = function (key, value) { + if (key === '__proto__') { + throw new Error('kaboom') + } + + return value +} diff --git a/node_modules/secure-json-parse/benchmarks/valid.js b/node_modules/secure-json-parse/benchmarks/valid.js new file mode 100644 index 0000000..c221487 --- /dev/null +++ b/node_modules/secure-json-parse/benchmarks/valid.js @@ -0,0 +1,49 @@ +'use strict' + +const Benchmark = require('benchmark') +const sjson = require('..') + +const internals = { + text: '{ "a": 5, "b": 6, "c": { "d": 0, "e": "text", "f": { "g": 2 } } }', + proto: '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' +} + +const suite = new Benchmark.Suite() + +suite + .add('JSON.parse', () => { + JSON.parse(internals.text) + }) + .add('JSON.parse proto', () => { + JSON.parse(internals.proto) + }) + .add('secure-json-parse parse', () => { + sjson.parse(internals.text) + }) + .add('secure-json-parse parse proto', () => { + sjson.parse(internals.text, { constructorAction: 'ignore', protoAction: 'ignore' }) + }) + .add('secure-json-parse safeParse', () => { + sjson.safeParse(internals.text) + }) + .add('secure-json-parse safeParse proto', () => { + sjson.safeParse(internals.proto) + }) + .add('JSON.parse reviver', () => { + JSON.parse(internals.text, internals.reviver) + }) + .on('cycle', (event) => { + console.log(String(event.target)) + }) + .on('complete', function () { + console.log('Fastest is ' + this.filter('fastest').map('name')) + }) + .run({ async: true }) + +internals.reviver = function (key, value) { + if (key === '__proto__') { + return undefined + } + + return value +} diff --git a/node_modules/secure-json-parse/eslint.config.js b/node_modules/secure-json-parse/eslint.config.js new file mode 100644 index 0000000..89fd678 --- /dev/null +++ b/node_modules/secure-json-parse/eslint.config.js @@ -0,0 +1,6 @@ +'use strict' + +module.exports = require('neostandard')({ + ignores: require('neostandard').resolveIgnoresFromGitignore(), + ts: true +}) diff --git a/node_modules/secure-json-parse/index.js b/node_modules/secure-json-parse/index.js new file mode 100644 index 0000000..728ba96 --- /dev/null +++ b/node_modules/secure-json-parse/index.js @@ -0,0 +1,161 @@ +'use strict' + +const hasBuffer = typeof Buffer !== 'undefined' +const suspectProtoRx = /"(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])"\s*:/ +const suspectConstructorRx = /"(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)"\s*:/ + +/** + * @description Internal parse function that parses JSON text with security checks. + * @private + * @param {string|Buffer} text - The JSON text string or Buffer to parse. + * @param {Function} [reviver] - The JSON.parse() optional reviver argument. + * @param {import('./types').ParseOptions} [options] - Optional configuration object. + * @returns {*} The parsed object. + * @throws {SyntaxError} If a forbidden prototype property is found and `options.protoAction` or + * `options.constructorAction` is `'error'`. + */ +function _parse (text, reviver, options) { + // Normalize arguments + if (options == null) { + if (reviver !== null && typeof reviver === 'object') { + options = reviver + reviver = undefined + } + } + + if (hasBuffer && Buffer.isBuffer(text)) { + text = text.toString() + } + + // BOM checker + if (text && text.charCodeAt(0) === 0xFEFF) { + text = text.slice(1) + } + + // Parse normally, allowing exceptions + const obj = JSON.parse(text, reviver) + + // Ignore null and non-objects + if (obj === null || typeof obj !== 'object') { + return obj + } + + const protoAction = (options && options.protoAction) || 'error' + const constructorAction = (options && options.constructorAction) || 'error' + + // options: 'error' (default) / 'remove' / 'ignore' + if (protoAction === 'ignore' && constructorAction === 'ignore') { + return obj + } + + if (protoAction !== 'ignore' && constructorAction !== 'ignore') { + if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) { + return obj + } + } else if (protoAction !== 'ignore' && constructorAction === 'ignore') { + if (suspectProtoRx.test(text) === false) { + return obj + } + } else { + if (suspectConstructorRx.test(text) === false) { + return obj + } + } + + // Scan result for proto keys + return filter(obj, { protoAction, constructorAction, safe: options && options.safe }) +} + +/** + * @description Scans and filters an object for forbidden prototype properties. + * @param {Object} obj - The object being scanned. + * @param {import('./types').ParseOptions} [options] - Optional configuration object. + * @returns {Object|null} The filtered object, or `null` if safe mode is enabled and issues are found. + * @throws {SyntaxError} If a forbidden prototype property is found and `options.protoAction` or + * `options.constructorAction` is `'error'`. + */ +function filter (obj, { protoAction = 'error', constructorAction = 'error', safe } = {}) { + let next = [obj] + + while (next.length) { + const nodes = next + next = [] + + for (const node of nodes) { + if (protoAction !== 'ignore' && Object.prototype.hasOwnProperty.call(node, '__proto__')) { // Avoid calling node.hasOwnProperty directly + if (safe === true) { + return null + } else if (protoAction === 'error') { + throw new SyntaxError('Object contains forbidden prototype property') + } + + delete node.__proto__ // eslint-disable-line no-proto + } + + if (constructorAction !== 'ignore' && + Object.prototype.hasOwnProperty.call(node, 'constructor') && + node.constructor !== null && + typeof node.constructor === 'object' && + Object.prototype.hasOwnProperty.call(node.constructor, 'prototype')) { // Avoid calling node.hasOwnProperty directly + if (safe === true) { + return null + } else if (constructorAction === 'error') { + throw new SyntaxError('Object contains forbidden prototype property') + } + + delete node.constructor + } + + for (const key in node) { + const value = node[key] + if (value && typeof value === 'object') { + next.push(value) + } + } + } + } + return obj +} + +/** + * @description Parses a given JSON-formatted text into an object. + * @param {string|Buffer} text - The JSON text string or Buffer to parse. + * @param {Function} [reviver] - The `JSON.parse()` optional reviver argument, or options object. + * @param {import('./types').ParseOptions} [options] - Optional configuration object. + * @returns {*} The parsed object. + * @throws {SyntaxError} If the JSON text is malformed or contains forbidden prototype properties + * when `options.protoAction` or `options.constructorAction` is `'error'`. + */ +function parse (text, reviver, options) { + const { stackTraceLimit } = Error + Error.stackTraceLimit = 0 + try { + return _parse(text, reviver, options) + } finally { + Error.stackTraceLimit = stackTraceLimit + } +} + +/** + * @description Safely parses a given JSON-formatted text into an object. + * @param {string|Buffer} text - The JSON text string or Buffer to parse. + * @param {Function} [reviver] - The `JSON.parse()` optional reviver argument. + * @returns {*|null|undefined} The parsed object, `null` if security issues found, or `undefined` on parse error. + */ +function safeParse (text, reviver) { + const { stackTraceLimit } = Error + Error.stackTraceLimit = 0 + try { + return _parse(text, reviver, { safe: true }) + } catch { + return undefined + } finally { + Error.stackTraceLimit = stackTraceLimit + } +} + +module.exports = parse +module.exports.default = parse +module.exports.parse = parse +module.exports.safeParse = safeParse +module.exports.scan = filter diff --git a/node_modules/secure-json-parse/package.json b/node_modules/secure-json-parse/package.json new file mode 100644 index 0000000..a99a2b0 --- /dev/null +++ b/node_modules/secure-json-parse/package.json @@ -0,0 +1,74 @@ +{ + "name": "secure-json-parse", + "version": "4.1.0", + "description": "JSON parse with prototype poisoning protection", + "main": "index.js", + "type": "commonjs", + "types": "types/index.d.ts", + "scripts": { + "benchmark": "cd benchmarks && npm install && npm run all", + "lint": "eslint", + "lint:fix": "eslint --fix", + "test": "nyc npm run test:unit && npm run test:typescript", + "test:unit": "tape \"test/*.test.js\"", + "test:typescript": "tsd", + "test:browser": "airtap test/*.test.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/fastify/secure-json-parse.git" + }, + "author": "Eran Hammer ", + "contributors": [ + { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + { + "name": "Tomas Della Vedova", + "url": "http://delved.org" + }, + { + "name": "Aras Abbasi", + "email": "aras.abbasi@gmail.com" + }, + { + "name": "Frazer Smith", + "email": "frazer.dev@icloud.com", + "url": "https://github.com/fdawgs" + } + ], + "keywords": [ + "JSON", + "parse", + "safe", + "security", + "prototype", + "pollution" + ], + "license": "BSD-3-Clause", + "bugs": { + "url": "https://github.com/fastify/secure-json-parse/issues" + }, + "homepage": "https://github.com/fastify/secure-json-parse#readme", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "devDependencies": { + "airtap": "^5.0.0", + "airtap-playwright": "^1.0.1", + "eslint": "^9.17.0", + "neostandard": "^0.12.0", + "nyc": "^17.0.0", + "playwright": "^1.43.1", + "tape": "^5.7.5", + "tsd": "^0.33.0" + } +} diff --git a/node_modules/secure-json-parse/test/index.test.js b/node_modules/secure-json-parse/test/index.test.js new file mode 100644 index 0000000..835d582 --- /dev/null +++ b/node_modules/secure-json-parse/test/index.test.js @@ -0,0 +1,649 @@ +'use strict' + +const { test } = require('tape') +const j = require('..') + +test('parse', t => { + t.test('parses object string', t => { + t.deepEqual( + j.parse('{"a": 5, "b": 6}'), + JSON.parse('{"a": 5, "b": 6}') + ) + t.end() + }) + + t.test('parses null string', t => { + t.strictEqual( + j.parse('null'), + JSON.parse('null') + ) + t.end() + }) + + t.test('parses 0 string', t => { + t.strictEqual( + j.parse('0'), + JSON.parse('0') + ) + t.end() + }) + + t.test('parses string string', t => { + t.strictEqual( + j.parse('"X"'), + JSON.parse('"X"') + ) + t.end() + }) + + t.test('parses buffer', t => { + t.strictEqual( + j.parse(Buffer.from('"X"')), + JSON.parse(Buffer.from('"X"')) + ) + t.end() + }) + + t.test('parses object string (reviver)', t => { + const reviver = (_key, value) => { + return typeof value === 'number' ? value + 1 : value + } + + t.deepEqual( + j.parse('{"a": 5, "b": 6}', reviver), + JSON.parse('{"a": 5, "b": 6}', reviver) + ) + t.end() + }) + + t.test('protoAction', t => { + t.test('sanitizes object string (reviver, options)', t => { + const reviver = (_key, value) => { + return typeof value === 'number' ? value + 1 : value + } + + t.deepEqual( + j.parse('{"a": 5, "b": 6,"__proto__": { "x": 7 }}', reviver, { protoAction: 'remove' }), + { a: 6, b: 7 } + ) + t.end() + }) + + t.test('sanitizes object string (options)', t => { + t.deepEqual( + j.parse('{"a": 5, "b": 6,"__proto__": { "x": 7 }}', { protoAction: 'remove' }), + { a: 5, b: 6 } + ) + t.end() + }) + + t.test('sanitizes object string (null, options)', t => { + t.deepEqual( + j.parse('{"a": 5, "b": 6,"__proto__": { "x": 7 }}', null, { protoAction: 'remove' }), + { a: 5, b: 6 } + ) + t.end() + }) + + t.test('sanitizes object string (null, options)', t => { + t.deepEqual( + j.parse('{"a": 5, "b": 6,"__proto__": { "x": 7 }}', { protoAction: 'remove' }), + { a: 5, b: 6 } + ) + t.end() + }) + + t.test('sanitizes nested object string', t => { + t.deepEqual( + j.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }', { protoAction: 'remove' }), + { a: 5, b: 6, c: { d: 0, e: 'text', f: { g: 2 } } } + ) + t.end() + }) + + t.test('ignores proto property', t => { + t.deepEqual( + j.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }', { protoAction: 'ignore' }), + JSON.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }') + ) + t.end() + }) + + t.test('ignores proto value', t => { + t.deepEqual( + j.parse('{"a": 5, "b": "__proto__"}'), + { a: 5, b: '__proto__' } + ) + t.end() + }) + + t.test('errors on proto property', t => { + t.throws(() => j.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "__proto__" : { "x": 7 } }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "__proto__" \n\r\t : { "x": 7 } }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "__proto__" \n \r \t : { "x": 7 } }'), SyntaxError) + t.end() + }) + + t.test('errors on proto property (null, null)', t => { + t.throws(() => j.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }', null, null), SyntaxError) + t.end() + }) + + t.test('errors on proto property (explicit options)', t => { + t.throws(() => j.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }', { protoAction: 'error' }), SyntaxError) + t.end() + }) + + t.test('errors on proto property (unicode)', t => { + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u005f_proto__": { "x": 7 } }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "_\\u005fp\\u0072oto__": { "x": 7 } }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u005f\\u005f\\u0070\\u0072\\u006f\\u0074\\u006f\\u005f\\u005f": { "x": 7 } }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u005F_proto__": { "x": 7 } }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "_\\u005Fp\\u0072oto__": { "x": 7 } }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u005F\\u005F\\u0070\\u0072\\u006F\\u0074\\u006F\\u005F\\u005F": { "x": 7 } }'), SyntaxError) + t.end() + }) + + t.test('should reset stackTraceLimit', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + Error.stackTraceLimit = 42 + t.throws(() => j.parse(text)) + t.same(Error.stackTraceLimit, 42) + t.end() + }) + + t.end() + }) + + t.test('constructorAction', t => { + t.test('sanitizes object string (reviver, options)', t => { + const reviver = (_key, value) => { + return typeof value === 'number' ? value + 1 : value + } + + t.deepEqual( + j.parse('{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}} }', reviver, { constructorAction: 'remove' }), + { a: 6, b: 7 } + ) + t.end() + }) + + t.test('sanitizes object string (options)', t => { + t.deepEqual( + j.parse('{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}} }', { constructorAction: 'remove' }), + { a: 5, b: 6 } + ) + t.end() + }) + + t.test('sanitizes object string (null, options)', t => { + t.deepEqual( + j.parse('{"a": 5, "b": 6,"constructor":{"prototype":{"bar":"baz"}} }', null, { constructorAction: 'remove' }), + { a: 5, b: 6 } + ) + t.end() + }) + + t.test('sanitizes object string (null, options)', t => { + t.deepEqual( + j.parse('{"a": 5, "b": 6,"constructor":{"prototype":{"bar":"baz"}} }', { constructorAction: 'remove' }), + { a: 5, b: 6 } + ) + t.end() + }) + + t.test('sanitizes object string (no prototype key)', t => { + t.deepEqual( + j.parse('{"a": 5, "b": 6,"constructor":{"bar":"baz"} }', { constructorAction: 'remove' }), + { a: 5, b: 6, constructor: { bar: 'baz' } } + ) + t.end() + }) + + t.test('sanitizes nested object string', t => { + t.deepEqual( + j.parse('{ "a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "c": { "d": 0, "e": "text", "constructor":{"prototype":{"bar":"baz"}}, "f": { "g": 2 } } }', { constructorAction: 'remove' }), + { a: 5, b: 6, c: { d: 0, e: 'text', f: { g: 2 } } } + ) + t.end() + }) + + t.test('ignores proto property', t => { + t.deepEqual( + j.parse('{ "a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}} }', { constructorAction: 'ignore' }), + JSON.parse('{ "a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}} }') + ) + t.end() + }) + + t.test('ignores proto value', t => { + t.deepEqual( + j.parse('{"a": 5, "b": "constructor"}'), + { a: 5, b: 'constructor' } + ) + t.end() + }) + + t.test('errors on proto property', t => { + t.throws(() => j.parse('{ "a": 5, "b": 6, "constructor": {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "constructor" : {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "constructor" \n\r\t : {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "constructor" \n \r \t : {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.end() + }) + + t.test('Should not throw if the constructor key hasn\'t a child named prototype', t => { + t.doesNotThrow(() => j.parse('{ "a": 5, "b": 6, "constructor":{"bar":"baz"} }', null, null), SyntaxError) + t.end() + }) + + t.test('errors on proto property (null, null)', t => { + t.throws(() => j.parse('{ "a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}} }', null, null), SyntaxError) + t.end() + }) + + t.test('errors on proto property (explicit options)', t => { + t.throws(() => j.parse('{ "a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}} }', { constructorAction: 'error' }), SyntaxError) + t.end() + }) + + t.test('errors on proto property (unicode)', t => { + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u0063\\u006fnstructor": {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u0063\\u006f\\u006e\\u0073\\u0074ructor": {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u0063\\u006f\\u006e\\u0073\\u0074\\u0072\\u0075\\u0063\\u0074\\u006f\\u0072": {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u0063\\u006Fnstructor": {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.throws(() => j.parse('{ "a": 5, "b": 6, "\\u0063\\u006F\\u006E\\u0073\\u0074\\u0072\\u0075\\u0063\\u0074\\u006F\\u0072": {"prototype":{"bar":"baz"}} }'), SyntaxError) + t.end() + }) + + t.test('handles constructor null safely', t => { + // Test that constructor: null doesn't trigger prototype pollution checks + t.deepEqual( + j.parse('{"constructor": null}', { constructorAction: 'remove' }), + { constructor: null } + ) + + // Test that constructor: null doesn't throw error when using error action + t.deepEqual( + j.parse('{"constructor": null}', { constructorAction: 'error' }), + { constructor: null } + ) + + // Test that constructor: null is preserved when using ignore action + t.deepEqual( + j.parse('{"constructor": null}', { constructorAction: 'ignore' }), + { constructor: null } + ) + t.end() + }) + + t.end() + }) + + t.test('protoAction and constructorAction', t => { + t.test('protoAction=remove constructorAction=remove', t => { + t.deepEqual( + j.parse( + '{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "__proto__": { "x": 7 } }', + { protoAction: 'remove', constructorAction: 'remove' } + ), + { a: 5, b: 6 } + ) + t.end() + }) + + t.test('protoAction=ignore constructorAction=remove', t => { + t.deepEqual( + j.parse( + '{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "__proto__": { "x": 7 } }', + { protoAction: 'ignore', constructorAction: 'remove' } + ), + JSON.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }') + ) + t.end() + }) + + t.test('protoAction=remove constructorAction=ignore', t => { + t.deepEqual( + j.parse( + '{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "__proto__": { "x": 7 } }', + { protoAction: 'remove', constructorAction: 'ignore' } + ), + JSON.parse('{ "a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}} }') + ) + t.end() + }) + + t.test('protoAction=ignore constructorAction=ignore', t => { + t.deepEqual( + j.parse( + '{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "__proto__": { "x": 7 } }', + { protoAction: 'ignore', constructorAction: 'ignore' } + ), + JSON.parse('{ "a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "__proto__": { "x": 7 } }') + ) + t.end() + }) + + t.test('protoAction=error constructorAction=ignore', t => { + t.throws(() => j.parse( + '{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "__proto__": { "x": 7 } }', + { protoAction: 'error', constructorAction: 'ignore' } + ), SyntaxError) + t.end() + }) + + t.test('protoAction=ignore constructorAction=error', t => { + t.throws(() => j.parse( + '{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "__proto__": { "x": 7 } }', + { protoAction: 'ignore', constructorAction: 'error' } + ), SyntaxError) + t.end() + }) + + t.test('protoAction=error constructorAction=error', t => { + t.throws(() => j.parse( + '{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}}, "__proto__": { "x": 7 } }', + { protoAction: 'error', constructorAction: 'error' } + ), SyntaxError) + t.end() + }) + + t.end() + }) + + t.test('sanitizes nested object string', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + + const obj = j.parse(text, { protoAction: 'remove' }) + t.deepEqual(obj, { a: 5, b: 6, c: { d: 0, e: 'text', f: { g: 2 } } }) + t.end() + }) + + t.test('errors on constructor property', t => { + const text = '{ "a": 5, "b": 6, "constructor": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + + t.throws(() => j.parse(text), SyntaxError) + t.end() + }) + + t.test('errors on proto property', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + + t.throws(() => j.parse(text), SyntaxError) + t.end() + }) + + t.test('errors on constructor property', t => { + const text = '{ "a": 5, "b": 6, "constructor": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + + t.throws(() => j.parse(text), SyntaxError) + t.end() + }) + + t.test('does not break when hasOwnProperty is overwritten', t => { + const text = '{ "a": 5, "b": 6, "hasOwnProperty": "text", "__proto__": { "x": 7 } }' + + const obj = j.parse(text, { protoAction: 'remove' }) + t.deepEqual(obj, { a: 5, b: 6, hasOwnProperty: 'text' }) + t.end() + }) + t.end() +}) + +test('safeParse', t => { + t.test('parses buffer', t => { + t.strictEqual( + j.safeParse(Buffer.from('"X"')), + JSON.parse(Buffer.from('"X"')) + ) + t.end() + }) + + t.test('should reset stackTraceLimit', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + Error.stackTraceLimit = 42 + t.same(j.safeParse(text), null) + t.same(Error.stackTraceLimit, 42) + t.end() + }) + + t.test('sanitizes nested object string', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + + t.same(j.safeParse(text), null) + t.end() + }) + + t.test('returns null on constructor property', t => { + const text = '{ "a": 5, "b": 6, "constructor": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + + t.same(j.safeParse(text), null) + t.end() + }) + + t.test('returns null on proto property', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + + t.same(j.safeParse(text), null) + t.end() + }) + + t.test('returns null on constructor property', t => { + const text = '{ "a": 5, "b": 6, "constructor": { "x": 7 }, "c": { "d": 0, "e": "text", "__proto__": { "y": 8 }, "f": { "g": 2 } } }' + + t.same(j.safeParse(text), null) + t.end() + }) + + t.test('parses object string', t => { + t.deepEqual( + j.safeParse('{"a": 5, "b": 6}'), + { a: 5, b: 6 } + ) + t.end() + }) + + t.test('returns null on proto object string', t => { + t.strictEqual( + j.safeParse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }'), + null + ) + t.end() + }) + + t.test('returns undefined on invalid object string', t => { + t.strictEqual( + j.safeParse('{"a": 5, "b": 6'), + undefined + ) + t.end() + }) + + t.test('sanitizes object string (options)', t => { + t.deepEqual( + j.safeParse('{"a": 5, "b": 6, "constructor":{"prototype":{"bar":"baz"}} }'), + null + ) + t.end() + }) + + t.test('sanitizes object string (no prototype key)', t => { + t.deepEqual( + j.safeParse('{"a": 5, "b": 6,"constructor":{"bar":"baz"} }'), + { a: 5, b: 6, constructor: { bar: 'baz' } } + ) + t.end() + }) + + t.end() +}) + +test('parse string with BOM', t => { + const theJson = { hello: 'world' } + const buffer = Buffer.concat([ + Buffer.from([239, 187, 191]), // the utf8 BOM + Buffer.from(JSON.stringify(theJson)) + ]) + t.deepEqual(j.parse(buffer.toString()), theJson) + t.end() +}) + +test('parse buffer with BOM', t => { + const theJson = { hello: 'world' } + const buffer = Buffer.concat([ + Buffer.from([239, 187, 191]), // the utf8 BOM + Buffer.from(JSON.stringify(theJson)) + ]) + t.deepEqual(j.parse(buffer), theJson) + t.end() +}) + +test('safeParse string with BOM', t => { + const theJson = { hello: 'world' } + const buffer = Buffer.concat([ + Buffer.from([239, 187, 191]), // the utf8 BOM + Buffer.from(JSON.stringify(theJson)) + ]) + t.deepEqual(j.safeParse(buffer.toString()), theJson) + t.end() +}) + +test('safeParse buffer with BOM', t => { + const theJson = { hello: 'world' } + const buffer = Buffer.concat([ + Buffer.from([239, 187, 191]), // the utf8 BOM + Buffer.from(JSON.stringify(theJson)) + ]) + t.deepEqual(j.safeParse(buffer), theJson) + t.end() +}) + +test('scan handles optional options', t => { + t.doesNotThrow(() => j.scan({ a: 'b' })) + t.end() +}) + +test('safe option', t => { + t.test('parse with safe=true returns null on __proto__', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 } }' + t.strictEqual(j.parse(text, { safe: true }), null) + t.end() + }) + + t.test('parse with safe=true returns null on constructor', t => { + const text = '{ "a": 5, "b": 6, "constructor": {"prototype": {"bar": "baz"}} }' + t.strictEqual(j.parse(text, { safe: true }), null) + t.end() + }) + + t.test('parse with safe=true returns object when valid', t => { + const text = '{ "a": 5, "b": 6 }' + t.deepEqual(j.parse(text, { safe: true }), { a: 5, b: 6 }) + t.end() + }) + + t.test('parse with safe=true and reviver', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 } }' + const reviver = (_key, value) => { + return typeof value === 'number' ? value + 1 : value + } + t.strictEqual(j.parse(text, reviver, { safe: true }), null) + t.end() + }) + + t.test('parse with safe=true and protoAction=remove returns null', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 } }' + t.strictEqual(j.parse(text, { safe: true, protoAction: 'remove' }), null) + t.end() + }) + + t.test('parse with safe=true and constructorAction=remove returns null', t => { + const text = '{ "a": 5, "b": 6, "constructor": {"prototype": {"bar": "baz"}} }' + t.strictEqual(j.parse(text, { safe: true, constructorAction: 'remove' }), null) + t.end() + }) + + t.test('parse with safe=false throws on __proto__', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 } }' + t.throws(() => j.parse(text, { safe: false }), SyntaxError) + t.end() + }) + + t.test('parse with safe=false throws on constructor', t => { + const text = '{ "a": 5, "b": 6, "constructor": {"prototype": {"bar": "baz"}} }' + t.throws(() => j.parse(text, { safe: false }), SyntaxError) + t.end() + }) + + t.test('scan with safe=true returns null on __proto__', t => { + const obj = JSON.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }') + t.strictEqual(j.scan(obj, { safe: true }), null) + t.end() + }) + + t.test('scan with safe=true returns null on constructor', t => { + const obj = JSON.parse('{ "a": 5, "b": 6, "constructor": {"prototype": {"bar": "baz"}} }') + t.strictEqual(j.scan(obj, { safe: true }), null) + t.end() + }) + + t.test('scan with safe=true returns object when valid', t => { + const obj = { a: 5, b: 6 } + t.deepEqual(j.scan(obj, { safe: true }), { a: 5, b: 6 }) + t.end() + }) + + t.test('scan with safe=false throws on __proto__', t => { + const obj = JSON.parse('{ "a": 5, "b": 6, "__proto__": { "x": 7 } }') + t.throws(() => j.scan(obj, { safe: false }), SyntaxError) + t.end() + }) + + t.test('scan with safe=false throws on constructor', t => { + const obj = JSON.parse('{ "a": 5, "b": 6, "constructor": {"prototype": {"bar": "baz"}} }') + t.throws(() => j.scan(obj, { safe: false }), SyntaxError) + t.end() + }) + + t.test('parse with safe=true returns null on nested __proto__', t => { + const text = '{ "a": 5, "c": { "d": 0, "__proto__": { "y": 8 } } }' + t.strictEqual(j.parse(text, { safe: true }), null) + t.end() + }) + + t.test('parse with safe=true returns null on nested constructor', t => { + const text = '{ "a": 5, "c": { "d": 0, "constructor": {"prototype": {"bar": "baz"}} } }' + t.strictEqual(j.parse(text, { safe: true }), null) + t.end() + }) + + t.test('parse with safe=true and protoAction=ignore returns object', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 } }' + t.deepEqual( + j.parse(text, { safe: true, protoAction: 'ignore' }), + JSON.parse(text) + ) + t.end() + }) + + t.test('parse with safe=true and constructorAction=ignore returns object', t => { + const text = '{ "a": 5, "b": 6, "constructor": {"prototype": {"bar": "baz"}} }' + t.deepEqual( + j.parse(text, { safe: true, constructorAction: 'ignore' }), + JSON.parse(text) + ) + t.end() + }) + + t.test('should reset stackTraceLimit with safe option', t => { + const text = '{ "a": 5, "b": 6, "__proto__": { "x": 7 } }' + Error.stackTraceLimit = 42 + t.strictEqual(j.parse(text, { safe: true }), null) + t.same(Error.stackTraceLimit, 42) + t.end() + }) + + t.end() +}) diff --git a/node_modules/secure-json-parse/types/index.d.ts b/node_modules/secure-json-parse/types/index.d.ts new file mode 100644 index 0000000..4ccce04 --- /dev/null +++ b/node_modules/secure-json-parse/types/index.d.ts @@ -0,0 +1,60 @@ +type Parse = typeof parse + +declare namespace parse { + export type ParseOptions = { + /** + * What to do when a `__proto__` key is found. + * - `'error'` - throw a `SyntaxError` when a `__proto__` key is found. This is the default value. + * - `'remove'` - deletes any `__proto__` keys from the result object. + * - `'ignore'` - skips all validation (same as calling `JSON.parse()` directly). + */ + protoAction?: 'error' | 'remove' | 'ignore'; + /** + * What to do when a `constructor` key is found. + * - `'error'` - throw a `SyntaxError` when a `constructor.prototype` key is found. This is the default value. + * - `'remove'` - deletes any `constructor` keys from the result object. + * - `'ignore'` - skips all validation (same as calling `JSON.parse()` directly). + */ + constructorAction?: 'error' | 'remove' | 'ignore'; + /** If `true`, returns `null` instead of throwing an error when a security issue is found. */ + safe?: boolean; + } + + export type ScanOptions = ParseOptions + + export type Reviver = (this: any, key: string, value: any) => any + + /** + * Parses a given JSON-formatted text into an object. + * + * @param text The JSON text string. + * @param reviver The `JSON.parse()` optional `reviver` argument. + * @param options Optional configuration object. + * @returns The parsed object. + */ + export const parse: Parse + + /** + * Safely parses a given JSON-formatted text into an object. + * + * @param text The JSON text string. + * @param reviver The `JSON.parse()` optional `reviver` argument. + * @returns The parsed object, or `undefined` if there was an error or if the JSON contained possibly insecure properties. + */ + export function safeParse (text: string | Buffer, reviver?: Reviver | null): any + + /** + * Scans a given object for prototype properties. + * + * @param obj The object being scanned. + * @param options Optional configuration object. + * @returns The object, or `null` if onError is set to `nullify` + */ + export function scan (obj: { [key: string | number]: any }, options?: ParseOptions): any + + export { parse as default } +} + +declare function parse (text: string | Buffer, options?: parse.ParseOptions): any +declare function parse (text: string | Buffer, reviver?: parse.Reviver | null, options?: parse.ParseOptions): any +export = parse diff --git a/node_modules/secure-json-parse/types/index.test-d.ts b/node_modules/secure-json-parse/types/index.test-d.ts new file mode 100644 index 0000000..c12e348 --- /dev/null +++ b/node_modules/secure-json-parse/types/index.test-d.ts @@ -0,0 +1,43 @@ +import { expectType, expectError } from 'tsd' +import sjson from '..' + +expectError(sjson.parse(null)) +expectType(sjson.parse('{"anything":0}')) + +sjson.parse('"test"', null, { protoAction: 'remove' }) +expectError(sjson.parse('"test"', null, { protoAction: 'incorrect' })) +sjson.parse('"test"', null, { constructorAction: 'ignore' }) +expectError(sjson.parse('"test"', null, { constructorAction: 'incorrect' })) +expectError(sjson.parse('"test"', { constructorAction: 'incorrect' })) +sjson.parse('test', { constructorAction: 'remove' }) +sjson.parse('test', { protoAction: 'ignore' }) +sjson.parse('test', () => {}, { protoAction: 'ignore', constructorAction: 'remove' }) +sjson.parse('"test"', null, { safe: true }) +sjson.parse('"test"', { safe: true }) +sjson.parse('test', () => {}, { safe: false }) +sjson.parse('test', { protoAction: 'remove', safe: true }) +expectError(sjson.parse('"test"', null, { safe: 'incorrect' })) + +sjson.safeParse('"test"', null) +sjson.safeParse('"test"') +expectError(sjson.safeParse(null)) + +sjson.scan({}, { protoAction: 'remove' }) +sjson.scan({}, { protoAction: 'ignore' }) +sjson.scan({}, { constructorAction: 'error' }) +sjson.scan({}, { constructorAction: 'ignore' }) +sjson.scan([], {}) +sjson.scan({}, { safe: true }) +sjson.scan({}, { protoAction: 'remove', safe: false }) +expectError(sjson.scan({}, { safe: 'incorrect' })) + +declare const input: Buffer +sjson.parse(input) +sjson.safeParse(input) + +sjson.parse('{"anything":0}', (key, value) => { + expectType(key) +}) +sjson.safeParse('{"anything":0}', (key, value) => { + expectType(key) +}) diff --git a/node_modules/sonic-boom/.eslintignore b/node_modules/sonic-boom/.eslintignore new file mode 100644 index 0000000..874fc1b --- /dev/null +++ b/node_modules/sonic-boom/.eslintignore @@ -0,0 +1,2 @@ +types/index.d.ts +types/index.test-d.ts diff --git a/node_modules/sonic-boom/.taprc b/node_modules/sonic-boom/.taprc new file mode 100644 index 0000000..1ac2094 --- /dev/null +++ b/node_modules/sonic-boom/.taprc @@ -0,0 +1,3 @@ +timeout: 240 +allow-incomplete-coverage: true +reporter: terse diff --git a/node_modules/sonic-boom/LICENSE b/node_modules/sonic-boom/LICENSE new file mode 100644 index 0000000..eea791d --- /dev/null +++ b/node_modules/sonic-boom/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/sonic-boom/README.md b/node_modules/sonic-boom/README.md new file mode 100644 index 0000000..b0da7eb --- /dev/null +++ b/node_modules/sonic-boom/README.md @@ -0,0 +1,152 @@ +# sonic-boom + +[![NPM Package Version](https://img.shields.io/npm/v/sonic-boom)](https://www.npmjs.com/package/sonic-boom) +[![Build Status](https://github.com/pinojs/sonic-boom/workflows/CI/badge.svg)](https://github.com/pinojs/sonic-boom/actions?query=workflow%3ACI) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) + +Extremely fast utf8-only stream implementation to write to files and +file descriptors. + +This implementation is partial, but support backpressure and `.pipe()` in is here. +However, it is 2-3x faster than Node Core `fs.createWriteStream()`: + +``` +benchSonic*1000: 1916.904ms +benchSonicSync*1000: 8605.265ms +benchSonic4k*1000: 1965.231ms +benchSonicSync4k*1000: 1588.224ms +benchCore*1000: 5851.959ms +benchConsole*1000: 7605.713ms +``` + +Note that sync mode without buffering is _slower_ than a Node Core WritableStream, however +this mode matches the expected behavior of `console.log()`. + +Note that if this is used to log to a windows terminal (`cmd.exe` or +powershell), it is needed to run `chcp 65001` in the terminal to +correctly display utf-8 characters, see +[chcp](https://ss64.com/nt/chcp.html) for more details. + +## Install + +``` +npm i sonic-boom +``` + +## Example + +```js +'use strict' + +const SonicBoom = require('sonic-boom') +const sonic = new SonicBoom({ fd: process.stdout.fd }) // or { dest: '/path/to/destination' } + +for (let i = 0; i < 10; i++) { + sonic.write('hello sonic\n') +} +``` + +## API + +### SonicBoom(opts) + +Creates a new instance of SonicBoom. + +The options are: + +* `fd`: a file descriptor, something that is returned by `fs.open` or + `fs.openSync`. +* `dest`: a string that is a path to a file to be written to (mode controlled by the `append` option). +* `minLength`: the minimum length of the internal buffer that is + required to be full before flushing. +* `maxLength`: the maximum length of the internal buffer. If a write operation would cause the buffer + to exceed `maxLength`, the data written is dropped and a `drop` event is emitted with the dropped data +* `maxWrite`: the maximum number of bytes that can be written; default: 16384 +* `periodicFlush`: calls `flush` every x`ms`. +* `sync`: perform writes synchronously (similar to `console.log`). +* `fsync`: perform a [fsyncSync](https://nodejs.org/api/fs.html#fsfsyncsyncfd) every time a write is completed. +* `append`: appends writes to dest file instead of truncating it (default `true`). +* `mode`: specify the creating file `mode` (see [fs.open()](https://nodejs.org/api/fs.html#fsopenpath-flags-mode-callback) from Node.js core). +* `mkdir`: ensure directory for dest file exists when `true` (default `false`). +* `retryEAGAIN(err, writeBufferLen, remainingBufferLen)`: a function that will be called when sonic-boom + write/writeSync/flushSync encounters a EAGAIN or EBUSY error. If the return value is + true sonic-boom will retry the operation, otherwise it will bubble the + error. `err` is the error that caused this function to be called, + `writeBufferLen` is the length of the buffer sonic-boom tried to write, and + `remainingBufferLen` is the length of the remaining buffer sonic-boom didn't try to write. + +For `sync:false` a `SonicBoom` instance will emit the `'ready'` event when a file descriptor is available. +For `sync:true` this is not relevant because the `'ready'` event will be fired when the `SonicBoom` instance is created, before it can be subscribed to. + + +### SonicBoom#write(string) + +Writes the string to the file. +It will return false to signal the producer to slow down. + +### SonicBoom#flush([cb]) + +Writes the current buffer to the file if a write was not in progress. +Do nothing if `minLength` is zero or if it is already writing. + +call the callback when the flush operation is completed. when failed the callback is called with an error. + +### SonicBoom#reopen([file]) + +Reopen the file in place, useful for log rotation. + +Example: + +```js +const stream = new SonicBoom('./my.log') +process.on('SIGUSR2', function () { + stream.reopen() +}) +``` + +### SonicBoom#flushSync() + +Flushes the buffered data synchronously. This is a costly operation. + +### SonicBoom#end() + +Closes the stream, the data will be flushed down asynchronously + +### SonicBoom#destroy() + +Closes the stream immediately, the data is not flushed. + +### Events + + +#### SonicBoom#close + +See [Stream#close](https://nodejs.org/api/stream.html#event-close). The `'close'` event when the instance has been closed. + +#### SonicBoom#drain + +See [Stream#drain](https://nodejs.org/api/stream.html#event-drain). The `'drain'` event is emitted when source can resume sending data. + +#### SonicBoom#drop + +When destination file maximal length is reached, the `'drop'` event is emitted with data that could not be written. + +#### SonicBoom#error + +The `'error'` event is emitted when the destination file can not be opened, or written. + +#### SonicBoom#finish + +See [Stream#finish](https://nodejs.org/api/stream.html#event-finish). The `'finish'` event after calling `end()` method and when all data was written. + +#### SonicBoom#ready + +The `'ready'` event occurs when the created instance is ready to process input. + +#### SonicBoom#write + +The `'write'` event occurs every time data is written to the underlying file. It emits the number of written bytes. + +## License + +MIT diff --git a/node_modules/sonic-boom/bench.js b/node_modules/sonic-boom/bench.js new file mode 100644 index 0000000..26cd526 --- /dev/null +++ b/node_modules/sonic-boom/bench.js @@ -0,0 +1,98 @@ +'use strict' + +const bench = require('fastbench') +const SonicBoom = require('./') +const Console = require('console').Console +const fs = require('fs') + +const core = fs.createWriteStream('/dev/null') +const fd = fs.openSync('/dev/null', 'w') +const sonic = new SonicBoom({ fd }) +const sonic4k = new SonicBoom({ fd, minLength: 4096 }) +const sonicSync = new SonicBoom({ fd, sync: true }) +const sonicSync4k = new SonicBoom({ fd, minLength: 4096, sync: true }) +const sonicBuffer = new SonicBoom({ fd, contentMode: 'buffer' }) +const sonic4kBuffer = new SonicBoom({ fd, contentMode: 'buffer', minLength: 4096 }) +const sonicSyncBuffer = new SonicBoom({ fd, contentMode: 'buffer', sync: true }) +const sonicSync4kBuffer = new SonicBoom({ fd, contentMode: 'buffer', minLength: 4096, sync: true }) +const dummyConsole = new Console(fs.createWriteStream('/dev/null')) + +const MAX = 10000 + +const buf = Buffer.alloc(50, 'hello', 'utf8') +const str = buf.toString() + +setTimeout(doBench, 100) + +const run = bench([ + function benchSonic (cb) { + sonic.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonic.write(str) + } + }, + function benchSonicSync (cb) { + sonicSync.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonicSync.write(str) + } + }, + function benchSonic4k (cb) { + sonic4k.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonic4k.write(str) + } + }, + function benchSonicSync4k (cb) { + sonicSync4k.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonicSync4k.write(str) + } + }, + function benchCore (cb) { + core.once('drain', cb) + for (let i = 0; i < MAX; i++) { + core.write(str) + } + }, + function benchConsole (cb) { + for (let i = 0; i < MAX; i++) { + dummyConsole.log(str) + } + setImmediate(cb) + }, + function benchSonicBuf (cb) { + sonicBuffer.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonicBuffer.write(buf) + } + }, + function benchSonicSyncBuf (cb) { + sonicSyncBuffer.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonicSyncBuffer.write(buf) + } + }, + function benchSonic4kBuf (cb) { + sonic4kBuffer.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonic4kBuffer.write(buf) + } + }, + function benchSonicSync4kBuf (cb) { + sonicSync4kBuffer.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonicSync4kBuffer.write(buf) + } + }, + function benchCoreBuf (cb) { + core.once('drain', cb) + for (let i = 0; i < MAX; i++) { + core.write(buf) + } + } +], 1000) + +function doBench () { + run(run) +} diff --git a/node_modules/sonic-boom/check.js b/node_modules/sonic-boom/check.js new file mode 100644 index 0000000..509905e --- /dev/null +++ b/node_modules/sonic-boom/check.js @@ -0,0 +1,18 @@ +'use strict' + +const SonicBoom = require('.') +const sonic = new SonicBoom({ fd: process.stdout.fd }) + +let count = 0 +function scheduleWrites () { + for (let i = 0; i < 1000; i++) { + sonic.write('hello sonic\n') + console.log('hello console') + } + + if (++count < 10) { + setTimeout(scheduleWrites, 100) + } +} + +scheduleWrites() diff --git a/node_modules/sonic-boom/example.js b/node_modules/sonic-boom/example.js new file mode 100644 index 0000000..3f569cd --- /dev/null +++ b/node_modules/sonic-boom/example.js @@ -0,0 +1,8 @@ +'use strict' + +const SonicBoom = require('.') +const sonic = new SonicBoom({ fd: process.stdout.fd }) // or 'destination' + +for (let i = 0; i < 10; i++) { + sonic.write('hello sonic\n') +} diff --git a/node_modules/sonic-boom/fixtures/firehose.js b/node_modules/sonic-boom/fixtures/firehose.js new file mode 100644 index 0000000..85a8f31 --- /dev/null +++ b/node_modules/sonic-boom/fixtures/firehose.js @@ -0,0 +1,22 @@ +'use strict' + +const SonicBoom = require('..') + +const out = new SonicBoom({ fd: process.stdout.fd }) +const str = Buffer.alloc(1000).fill('a').toString() + +let i = 0 + +function write () { + if (i++ === 10) { + return + } + + if (out.write(str)) { + write() + } else { + out.once('drain', write) + } +} + +write() diff --git a/node_modules/sonic-boom/index.js b/node_modules/sonic-boom/index.js new file mode 100644 index 0000000..1553923 --- /dev/null +++ b/node_modules/sonic-boom/index.js @@ -0,0 +1,719 @@ +'use strict' + +const fs = require('fs') +const EventEmitter = require('events') +const inherits = require('util').inherits +const path = require('path') +const sleep = require('atomic-sleep') +const assert = require('assert') + +const BUSY_WRITE_TIMEOUT = 100 +const kEmptyBuffer = Buffer.allocUnsafe(0) + +// 16 KB. Don't write more than docker buffer size. +// https://github.com/moby/moby/blob/513ec73831269947d38a644c278ce3cac36783b2/daemon/logger/copier.go#L13 +const MAX_WRITE = 16 * 1024 + +const kContentModeBuffer = 'buffer' +const kContentModeUtf8 = 'utf8' + +const [major, minor] = (process.versions.node || '0.0').split('.').map(Number) +const kCopyBuffer = major >= 22 && minor >= 7 + +function openFile (file, sonic) { + sonic._opening = true + sonic._writing = true + sonic._asyncDrainScheduled = false + + // NOTE: 'error' and 'ready' events emitted below only relevant when sonic.sync===false + // for sync mode, there is no way to add a listener that will receive these + + function fileOpened (err, fd) { + if (err) { + sonic._reopening = false + sonic._writing = false + sonic._opening = false + + if (sonic.sync) { + process.nextTick(() => { + if (sonic.listenerCount('error') > 0) { + sonic.emit('error', err) + } + }) + } else { + sonic.emit('error', err) + } + return + } + + const reopening = sonic._reopening + + sonic.fd = fd + sonic.file = file + sonic._reopening = false + sonic._opening = false + sonic._writing = false + + if (sonic.sync) { + process.nextTick(() => sonic.emit('ready')) + } else { + sonic.emit('ready') + } + + if (sonic.destroyed) { + return + } + + // start + if ((!sonic._writing && sonic._len > sonic.minLength) || sonic._flushPending) { + sonic._actualWrite() + } else if (reopening) { + process.nextTick(() => sonic.emit('drain')) + } + } + + const flags = sonic.append ? 'a' : 'w' + const mode = sonic.mode + + if (sonic.sync) { + try { + if (sonic.mkdir) fs.mkdirSync(path.dirname(file), { recursive: true }) + const fd = fs.openSync(file, flags, mode) + fileOpened(null, fd) + } catch (err) { + fileOpened(err) + throw err + } + } else if (sonic.mkdir) { + fs.mkdir(path.dirname(file), { recursive: true }, (err) => { + if (err) return fileOpened(err) + fs.open(file, flags, mode, fileOpened) + }) + } else { + fs.open(file, flags, mode, fileOpened) + } +} + +function SonicBoom (opts) { + if (!(this instanceof SonicBoom)) { + return new SonicBoom(opts) + } + + let { fd, dest, minLength, maxLength, maxWrite, periodicFlush, sync, append = true, mkdir, retryEAGAIN, fsync, contentMode, mode } = opts || {} + + fd = fd || dest + + this._len = 0 + this.fd = -1 + this._bufs = [] + this._lens = [] + this._writing = false + this._ending = false + this._reopening = false + this._asyncDrainScheduled = false + this._flushPending = false + this._hwm = Math.max(minLength || 0, 16387) + this.file = null + this.destroyed = false + this.minLength = minLength || 0 + this.maxLength = maxLength || 0 + this.maxWrite = maxWrite || MAX_WRITE + this._periodicFlush = periodicFlush || 0 + this._periodicFlushTimer = undefined + this.sync = sync || false + this.writable = true + this._fsync = fsync || false + this.append = append || false + this.mode = mode + this.retryEAGAIN = retryEAGAIN || (() => true) + this.mkdir = mkdir || false + + let fsWriteSync + let fsWrite + if (contentMode === kContentModeBuffer) { + this._writingBuf = kEmptyBuffer + this.write = writeBuffer + this.flush = flushBuffer + this.flushSync = flushBufferSync + this._actualWrite = actualWriteBuffer + fsWriteSync = () => fs.writeSync(this.fd, this._writingBuf) + fsWrite = () => fs.write(this.fd, this._writingBuf, this.release) + } else if (contentMode === undefined || contentMode === kContentModeUtf8) { + this._writingBuf = '' + this.write = write + this.flush = flush + this.flushSync = flushSync + this._actualWrite = actualWrite + fsWriteSync = () => fs.writeSync(this.fd, this._writingBuf, 'utf8') + fsWrite = () => fs.write(this.fd, this._writingBuf, 'utf8', this.release) + } else { + throw new Error(`SonicBoom supports "${kContentModeUtf8}" and "${kContentModeBuffer}", but passed ${contentMode}`) + } + + if (typeof fd === 'number') { + this.fd = fd + process.nextTick(() => this.emit('ready')) + } else if (typeof fd === 'string') { + openFile(fd, this) + } else { + throw new Error('SonicBoom supports only file descriptors and files') + } + if (this.minLength >= this.maxWrite) { + throw new Error(`minLength should be smaller than maxWrite (${this.maxWrite})`) + } + + this.release = (err, n) => { + if (err) { + if ((err.code === 'EAGAIN' || err.code === 'EBUSY') && this.retryEAGAIN(err, this._writingBuf.length, this._len - this._writingBuf.length)) { + if (this.sync) { + // This error code should not happen in sync mode, because it is + // not using the underlining operating system asynchronous functions. + // However it happens, and so we handle it. + // Ref: https://github.com/pinojs/pino/issues/783 + try { + sleep(BUSY_WRITE_TIMEOUT) + this.release(undefined, 0) + } catch (err) { + this.release(err) + } + } else { + // Let's give the destination some time to process the chunk. + setTimeout(fsWrite, BUSY_WRITE_TIMEOUT) + } + } else { + this._writing = false + + this.emit('error', err) + } + return + } + + this.emit('write', n) + const releasedBufObj = releaseWritingBuf(this._writingBuf, this._len, n) + this._len = releasedBufObj.len + this._writingBuf = releasedBufObj.writingBuf + + if (this._writingBuf.length) { + if (!this.sync) { + fsWrite() + return + } + + try { + do { + const n = fsWriteSync() + const releasedBufObj = releaseWritingBuf(this._writingBuf, this._len, n) + this._len = releasedBufObj.len + this._writingBuf = releasedBufObj.writingBuf + } while (this._writingBuf.length) + } catch (err) { + this.release(err) + return + } + } + + if (this._fsync) { + fs.fsyncSync(this.fd) + } + + const len = this._len + if (this._reopening) { + this._writing = false + this._reopening = false + this.reopen() + } else if (len > this.minLength) { + this._actualWrite() + } else if (this._ending) { + if (len > 0) { + this._actualWrite() + } else { + this._writing = false + actualClose(this) + } + } else { + this._writing = false + if (this.sync) { + if (!this._asyncDrainScheduled) { + this._asyncDrainScheduled = true + process.nextTick(emitDrain, this) + } + } else { + this.emit('drain') + } + } + } + + this.on('newListener', function (name) { + if (name === 'drain') { + this._asyncDrainScheduled = false + } + }) + + if (this._periodicFlush !== 0) { + this._periodicFlushTimer = setInterval(() => this.flush(null), this._periodicFlush) + this._periodicFlushTimer.unref() + } +} + +/** + * Release the writingBuf after fs.write n bytes data + * @param {string | Buffer} writingBuf - currently writing buffer, usually be instance._writingBuf. + * @param {number} len - currently buffer length, usually be instance._len. + * @param {number} n - number of bytes fs already written + * @returns {{writingBuf: string | Buffer, len: number}} released writingBuf and length + */ +function releaseWritingBuf (writingBuf, len, n) { + // if Buffer.byteLength is equal to n, that means writingBuf contains no multi-byte character + if (typeof writingBuf === 'string' && Buffer.byteLength(writingBuf) !== n) { + // Since the fs.write callback parameter `n` means how many bytes the passed of string + // We calculate the original string length for avoiding the multi-byte character issue + n = Buffer.from(writingBuf).subarray(0, n).toString().length + } + len = Math.max(len - n, 0) + writingBuf = writingBuf.slice(n) + return { writingBuf, len } +} + +function emitDrain (sonic) { + const hasListeners = sonic.listenerCount('drain') > 0 + if (!hasListeners) return + sonic._asyncDrainScheduled = false + sonic.emit('drain') +} + +inherits(SonicBoom, EventEmitter) + +function mergeBuf (bufs, len) { + if (bufs.length === 0) { + return kEmptyBuffer + } + + if (bufs.length === 1) { + return bufs[0] + } + + return Buffer.concat(bufs, len) +} + +function write (data) { + if (this.destroyed) { + throw new Error('SonicBoom destroyed') + } + + const len = this._len + data.length + const bufs = this._bufs + + if (this.maxLength && len > this.maxLength) { + this.emit('drop', data) + return this._len < this._hwm + } + + if ( + bufs.length === 0 || + bufs[bufs.length - 1].length + data.length > this.maxWrite + ) { + bufs.push('' + data) + } else { + bufs[bufs.length - 1] += data + } + + this._len = len + + if (!this._writing && this._len >= this.minLength) { + this._actualWrite() + } + + return this._len < this._hwm +} + +function writeBuffer (data) { + if (this.destroyed) { + throw new Error('SonicBoom destroyed') + } + + const len = this._len + data.length + const bufs = this._bufs + const lens = this._lens + + if (this.maxLength && len > this.maxLength) { + this.emit('drop', data) + return this._len < this._hwm + } + + if ( + bufs.length === 0 || + lens[lens.length - 1] + data.length > this.maxWrite + ) { + bufs.push([data]) + lens.push(data.length) + } else { + bufs[bufs.length - 1].push(data) + lens[lens.length - 1] += data.length + } + + this._len = len + + if (!this._writing && this._len >= this.minLength) { + this._actualWrite() + } + + return this._len < this._hwm +} + +function callFlushCallbackOnDrain (cb) { + this._flushPending = true + const onDrain = () => { + // only if _fsync is false to avoid double fsync + if (!this._fsync) { + try { + fs.fsync(this.fd, (err) => { + this._flushPending = false + cb(err) + }) + } catch (err) { + cb(err) + } + } else { + this._flushPending = false + cb() + } + this.off('error', onError) + } + const onError = (err) => { + this._flushPending = false + cb(err) + this.off('drain', onDrain) + } + + this.once('drain', onDrain) + this.once('error', onError) +} + +function flush (cb) { + if (cb != null && typeof cb !== 'function') { + throw new Error('flush cb must be a function') + } + + if (this.destroyed) { + const error = new Error('SonicBoom destroyed') + if (cb) { + cb(error) + return + } + + throw error + } + + if (this.minLength <= 0) { + cb?.() + return + } + + if (cb) { + callFlushCallbackOnDrain.call(this, cb) + } + + if (this._writing) { + return + } + + if (this._bufs.length === 0) { + this._bufs.push('') + } + + this._actualWrite() +} + +function flushBuffer (cb) { + if (cb != null && typeof cb !== 'function') { + throw new Error('flush cb must be a function') + } + + if (this.destroyed) { + const error = new Error('SonicBoom destroyed') + if (cb) { + cb(error) + return + } + + throw error + } + + if (this.minLength <= 0) { + cb?.() + return + } + + if (cb) { + callFlushCallbackOnDrain.call(this, cb) + } + + if (this._writing) { + return + } + + if (this._bufs.length === 0) { + this._bufs.push([]) + this._lens.push(0) + } + + this._actualWrite() +} + +SonicBoom.prototype.reopen = function (file) { + if (this.destroyed) { + throw new Error('SonicBoom destroyed') + } + + if (this._opening) { + this.once('ready', () => { + this.reopen(file) + }) + return + } + + if (this._ending) { + return + } + + if (!this.file) { + throw new Error('Unable to reopen a file descriptor, you must pass a file to SonicBoom') + } + + if (file) { + this.file = file + } + this._reopening = true + + if (this._writing) { + return + } + + const fd = this.fd + this.once('ready', () => { + if (fd !== this.fd) { + fs.close(fd, (err) => { + if (err) { + return this.emit('error', err) + } + }) + } + }) + + openFile(this.file, this) +} + +SonicBoom.prototype.end = function () { + if (this.destroyed) { + throw new Error('SonicBoom destroyed') + } + + if (this._opening) { + this.once('ready', () => { + this.end() + }) + return + } + + if (this._ending) { + return + } + + this._ending = true + + if (this._writing) { + return + } + + if (this._len > 0 && this.fd >= 0) { + this._actualWrite() + } else { + actualClose(this) + } +} + +function flushSync () { + if (this.destroyed) { + throw new Error('SonicBoom destroyed') + } + + if (this.fd < 0) { + throw new Error('sonic boom is not ready yet') + } + + if (!this._writing && this._writingBuf.length > 0) { + this._bufs.unshift(this._writingBuf) + this._writingBuf = '' + } + + let buf = '' + while (this._bufs.length || buf) { + if (buf.length <= 0) { + buf = this._bufs[0] + } + try { + const n = fs.writeSync(this.fd, buf, 'utf8') + const releasedBufObj = releaseWritingBuf(buf, this._len, n) + buf = releasedBufObj.writingBuf + this._len = releasedBufObj.len + if (buf.length <= 0) { + this._bufs.shift() + } + } catch (err) { + const shouldRetry = err.code === 'EAGAIN' || err.code === 'EBUSY' + if (shouldRetry && !this.retryEAGAIN(err, buf.length, this._len - buf.length)) { + throw err + } + + sleep(BUSY_WRITE_TIMEOUT) + } + } + + try { + fs.fsyncSync(this.fd) + } catch { + // Skip the error. The fd might not support fsync. + } +} + +function flushBufferSync () { + if (this.destroyed) { + throw new Error('SonicBoom destroyed') + } + + if (this.fd < 0) { + throw new Error('sonic boom is not ready yet') + } + + if (!this._writing && this._writingBuf.length > 0) { + this._bufs.unshift([this._writingBuf]) + this._writingBuf = kEmptyBuffer + } + + let buf = kEmptyBuffer + while (this._bufs.length || buf.length) { + if (buf.length <= 0) { + buf = mergeBuf(this._bufs[0], this._lens[0]) + } + try { + const n = fs.writeSync(this.fd, buf) + buf = buf.subarray(n) + this._len = Math.max(this._len - n, 0) + if (buf.length <= 0) { + this._bufs.shift() + this._lens.shift() + } + } catch (err) { + const shouldRetry = err.code === 'EAGAIN' || err.code === 'EBUSY' + if (shouldRetry && !this.retryEAGAIN(err, buf.length, this._len - buf.length)) { + throw err + } + + sleep(BUSY_WRITE_TIMEOUT) + } + } +} + +SonicBoom.prototype.destroy = function () { + if (this.destroyed) { + return + } + actualClose(this) +} + +function actualWrite () { + const release = this.release + this._writing = true + this._writingBuf = this._writingBuf || this._bufs.shift() || '' + + if (this.sync) { + try { + const written = fs.writeSync(this.fd, this._writingBuf, 'utf8') + release(null, written) + } catch (err) { + release(err) + } + } else { + fs.write(this.fd, this._writingBuf, 'utf8', release) + } +} + +function actualWriteBuffer () { + const release = this.release + this._writing = true + this._writingBuf = this._writingBuf.length ? this._writingBuf : mergeBuf(this._bufs.shift(), this._lens.shift()) + + if (this.sync) { + try { + const written = fs.writeSync(this.fd, this._writingBuf) + release(null, written) + } catch (err) { + release(err) + } + } else { + // fs.write will need to copy string to buffer anyway so + // we do it here to avoid the overhead of calculating the buffer size + // in releaseWritingBuf. + if (kCopyBuffer) { + this._writingBuf = Buffer.from(this._writingBuf) + } + fs.write(this.fd, this._writingBuf, release) + } +} + +function actualClose (sonic) { + if (sonic.fd === -1) { + sonic.once('ready', actualClose.bind(null, sonic)) + return + } + + if (sonic._periodicFlushTimer !== undefined) { + clearInterval(sonic._periodicFlushTimer) + } + + sonic.destroyed = true + sonic._bufs = [] + sonic._lens = [] + + assert(typeof sonic.fd === 'number', `sonic.fd must be a number, got ${typeof sonic.fd}`) + try { + fs.fsync(sonic.fd, closeWrapped) + } catch { + } + + function closeWrapped () { + // We skip errors in fsync + + if (sonic.fd !== 1 && sonic.fd !== 2) { + fs.close(sonic.fd, done) + } else { + done() + } + } + + function done (err) { + if (err) { + sonic.emit('error', err) + return + } + + if (sonic._ending && !sonic._writing) { + sonic.emit('finish') + } + sonic.emit('close') + } +} + +/** + * These export configurations enable JS and TS developers + * to consumer SonicBoom in whatever way best suits their needs. + * Some examples of supported import syntax includes: + * - `const SonicBoom = require('SonicBoom')` + * - `const { SonicBoom } = require('SonicBoom')` + * - `import * as SonicBoom from 'SonicBoom'` + * - `import { SonicBoom } from 'SonicBoom'` + * - `import SonicBoom from 'SonicBoom'` + */ +SonicBoom.SonicBoom = SonicBoom +SonicBoom.default = SonicBoom +module.exports = SonicBoom diff --git a/node_modules/sonic-boom/package.json b/node_modules/sonic-boom/package.json new file mode 100644 index 0000000..df096bb --- /dev/null +++ b/node_modules/sonic-boom/package.json @@ -0,0 +1,52 @@ +{ + "name": "sonic-boom", + "version": "4.2.0", + "description": "Extremely fast utf8 only stream implementation", + "main": "index.js", + "type": "commonjs", + "types": "types/index.d.ts", + "scripts": { + "test": "npm run test:types && standard && npm run test:unit", + "test:unit": "tap", + "test:types": "tsc && tsd" + }, + "pre-commit": [ + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/pinojs/sonic-boom.git" + }, + "keywords": [ + "stream", + "fs", + "net", + "fd", + "file", + "descriptor", + "fast" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/pinojs/sonic-boom/issues" + }, + "homepage": "https://github.com/pinojs/sonic-boom#readme", + "devDependencies": { + "@fastify/pre-commit": "^2.1.0", + "@sinonjs/fake-timers": "^13.0.1", + "@types/node": "^22.0.0", + "fastbench": "^1.0.1", + "proxyquire": "^2.1.3", + "standard": "^17.0.0", + "tap": "^18.2.0", + "tsd": "^0.31.0", + "typescript": "^5.0.2" + }, + "dependencies": { + "atomic-sleep": "^1.0.0" + }, + "tsd": { + "directory": "./types" + } +} diff --git a/node_modules/sonic-boom/test/destroy.test.js b/node_modules/sonic-boom/test/destroy.test.js new file mode 100644 index 0000000..8fd09ac --- /dev/null +++ b/node_modules/sonic-boom/test/destroy.test.js @@ -0,0 +1,49 @@ +'use strict' + +const fs = require('fs') +const SonicBoom = require('../') +const { file, runTests } = require('./helper') + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the umask for testing + process.umask(0o000) + + test('destroy', (t) => { + t.plan(5) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync }) + + t.ok(stream.write('hello world\n')) + stream.destroy() + t.throws(() => { stream.write('hello world\n') }) + + fs.readFile(dest, 'utf8', function (err, data) { + t.error(err) + t.equal(data, 'hello world\n') + }) + + stream.on('finish', () => { + t.fail('finish emitted') + }) + + stream.on('close', () => { + t.pass('close emitted') + }) + }) + + test('destroy while opening', (t) => { + t.plan(1) + + const dest = file() + const stream = new SonicBoom({ dest }) + + stream.destroy() + stream.on('close', () => { + t.pass('close emitted') + }) + }) +} diff --git a/node_modules/sonic-boom/test/end.test.js b/node_modules/sonic-boom/test/end.test.js new file mode 100644 index 0000000..2401d1e --- /dev/null +++ b/node_modules/sonic-boom/test/end.test.js @@ -0,0 +1,98 @@ +'use strict' + +const { join } = require('path') +const { fork } = require('child_process') +const fs = require('fs') +const SonicBoom = require('../') +const { file, runTests } = require('./helper') + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the umask for testing + process.umask(0o000) + + test('end after reopen', (t) => { + t.plan(4) + + const dest = file() + const stream = new SonicBoom({ dest, minLength: 4096, sync }) + + stream.once('ready', () => { + t.pass('ready emitted') + const after = dest + '-moved' + stream.reopen(after) + stream.write('after reopen\n') + stream.on('finish', () => { + t.pass('finish emitted') + fs.readFile(after, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'after reopen\n') + }) + }) + stream.end() + }) + }) + + test('end after 2x reopen', (t) => { + t.plan(4) + + const dest = file() + const stream = new SonicBoom({ dest, minLength: 4096, sync }) + + stream.once('ready', () => { + t.pass('ready emitted') + stream.reopen(dest + '-moved') + const after = dest + '-moved-moved' + stream.reopen(after) + stream.write('after reopen\n') + stream.on('finish', () => { + t.pass('finish emitted') + fs.readFile(after, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'after reopen\n') + }) + }) + stream.end() + }) + }) + + test('end if not ready', (t) => { + t.plan(3) + + const dest = file() + const stream = new SonicBoom({ dest, minLength: 4096, sync }) + const after = dest + '-moved' + stream.reopen(after) + stream.write('after reopen\n') + stream.on('finish', () => { + t.pass('finish emitted') + fs.readFile(after, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'after reopen\n') + }) + }) + stream.end() + }) + + test('chunk data accordingly', (t) => { + t.plan(2) + + const child = fork(join(__dirname, '..', 'fixtures', 'firehose.js'), { silent: true }) + const str = Buffer.alloc(10000).fill('a').toString() + + let data = '' + + child.stdout.on('data', function (chunk) { + data += chunk.toString() + }) + + child.stdout.on('end', function () { + t.equal(data, str) + }) + + child.on('close', function (code) { + t.equal(code, 0) + }) + }) +} diff --git a/node_modules/sonic-boom/test/flush-sync.test.js b/node_modules/sonic-boom/test/flush-sync.test.js new file mode 100644 index 0000000..dc76282 --- /dev/null +++ b/node_modules/sonic-boom/test/flush-sync.test.js @@ -0,0 +1,140 @@ +'use strict' + +const { test } = require('tap') +const fs = require('fs') +const proxyquire = require('proxyquire') +const SonicBoom = require('../') +const { file, runTests } = require('./helper') + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the umask for testing + process.umask(0o000) + + test('flushSync', (t) => { + t.plan(4) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 4096, sync }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.flushSync() + + // let the file system settle down things + setImmediate(function () { + stream.end() + const data = fs.readFileSync(dest, 'utf8') + t.equal(data, 'hello world\nsomething else\n') + + stream.on('close', () => { + t.pass('close emitted') + }) + }) + }) +} + +test('retry in flushSync on EAGAIN', (t) => { + t.plan(7) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync: false, minLength: 0 }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + + fakeFs.writeSync = function (fd, buf, enc) { + t.pass('fake fs.write called') + fakeFs.writeSync = fs.writeSync + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + throw err + } + + t.ok(stream.write('something else\n')) + + stream.flushSync() + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('throw error in flushSync on EAGAIN', (t) => { + t.plan(12) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + sync: false, + minLength: 1000, + retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => { + t.equal(err.code, 'EAGAIN') + t.equal(writeBufferLen, 12) + t.equal(remainingBufferLen, 0) + return false + } + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + fakeFs.writeSync = function (fd, buf, enc) { + Error.captureStackTrace(err) + t.pass('fake fs.write called') + fakeFs.writeSync = fs.writeSync + throw err + } + + fakeFs.fsyncSync = function (...args) { + t.pass('fake fs.fsyncSync called') + fakeFs.fsyncSync = fs.fsyncSync + return fs.fsyncSync.apply(null, args) + } + + t.ok(stream.write('hello world\n')) + t.throws(stream.flushSync.bind(stream), err, 'EAGAIN') + + t.ok(stream.write('something else\n')) + stream.flushSync() + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) diff --git a/node_modules/sonic-boom/test/flush.test.js b/node_modules/sonic-boom/test/flush.test.js new file mode 100644 index 0000000..1bbd389 --- /dev/null +++ b/node_modules/sonic-boom/test/flush.test.js @@ -0,0 +1,419 @@ +'use strict' + +const fs = require('fs') +const path = require('path') +const SonicBoom = require('../') +const { file, runTests } = require('./helper') +const proxyquire = require('proxyquire') + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the unmask for testing + process.umask(0o000) + + test('append', (t) => { + t.plan(4) + + const dest = file() + fs.writeFileSync(dest, 'hello world\n') + const stream = new SonicBoom({ dest, append: false, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('something else\n')) + + stream.flush() + + stream.on('drain', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'something else\n') + stream.end() + }) + }) + }) + + test('mkdir', (t) => { + t.plan(4) + + const dest = path.join(file(), 'out.log') + const stream = new SonicBoom({ dest, mkdir: true, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + + stream.flush() + + stream.on('drain', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\n') + stream.end() + }) + }) + }) + + test('flush', (t) => { + t.plan(5) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 4096, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.flush() + + stream.on('drain', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + stream.end() + }) + }) + }) + + test('flush with no data', (t) => { + t.plan(2) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 4096, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + stream.flush() + + stream.on('drain', () => { + t.pass('drain emitted') + }) + }) + + test('call flush cb after flushed', (t) => { + t.plan(4) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 4096, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.flush((err) => { + if (err) t.fail(err) + else t.pass('flush cb called') + }) + }) + + test('only call fsyncSync and not fsync when fsync: true', (t) => { + t.plan(6) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + sync, + fsync: true, + minLength: 4096 + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + fakeFs.fsync = function (fd, cb) { + t.fail('fake fs.fsync called while should not') + cb() + } + fakeFs.fsyncSync = function (fd) { + t.pass('fake fsyncSync called') + } + + function successOnAsyncOrSyncFn (isSync, originalFn) { + return function (...args) { + t.pass(`fake fs.${originalFn.name} called`) + fakeFs[originalFn.name] = originalFn + return fakeFs[originalFn.name](...args) + } + } + + if (sync) { + fakeFs.writeSync = successOnAsyncOrSyncFn(true, fs.writeSync) + } else { + fakeFs.write = successOnAsyncOrSyncFn(false, fs.write) + } + + t.ok(stream.write('hello world\n')) + stream.flush((err) => { + if (err) t.fail(err) + else t.pass('flush cb called') + + process.nextTick(() => { + // to make sure fsync is not called as well + t.pass('nextTick after flush called') + }) + }) + }) + + test('call flush cb with error when fsync failed', (t) => { + t.plan(5) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + sync, + minLength: 4096 + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + const err = new Error('other') + err.code = 'other' + + function onFsyncOnFsyncSync (isSync, originalFn) { + return function (...args) { + Error.captureStackTrace(err) + t.pass(`fake fs.${originalFn.name} called`) + fakeFs[originalFn.name] = originalFn + const cb = args[args.length - 1] + + cb(err) + } + } + + // only one is called depends on sync + fakeFs.fsync = onFsyncOnFsyncSync(false, fs.fsync) + + function successOnAsyncOrSyncFn (isSync, originalFn) { + return function (...args) { + t.pass(`fake fs.${originalFn.name} called`) + fakeFs[originalFn.name] = originalFn + return fakeFs[originalFn.name](...args) + } + } + + if (sync) { + fakeFs.writeSync = successOnAsyncOrSyncFn(true, fs.writeSync) + } else { + fakeFs.write = successOnAsyncOrSyncFn(false, fs.write) + } + + t.ok(stream.write('hello world\n')) + stream.flush((err) => { + if (err) t.equal(err.code, 'other') + else t.fail('flush cb called without an error') + }) + }) + + test('call flush cb even when have no data', (t) => { + t.plan(2) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 4096, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + + stream.flush((err) => { + if (err) t.fail(err) + else t.pass('flush cb called') + }) + }) + }) + + test('call flush cb even when minLength is 0', (t) => { + t.plan(1) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync }) + + stream.flush((err) => { + if (err) t.fail(err) + else t.pass('flush cb called') + }) + }) + + test('call flush cb with an error when trying to flush destroyed stream', (t) => { + t.plan(1) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 4096, sync }) + stream.destroy() + + stream.flush((err) => { + if (err) t.pass(err) + else t.fail('flush cb called without an error') + }) + }) + + test('call flush cb with an error when failed to flush', (t) => { + t.plan(5) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + sync, + minLength: 4096 + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + const err = new Error('other') + err.code = 'other' + + function onWriteOrWriteSync (isSync, originalFn) { + return function (...args) { + Error.captureStackTrace(err) + t.pass(`fake fs.${originalFn.name} called`) + fakeFs[originalFn.name] = originalFn + + if (isSync) throw err + const cb = args[args.length - 1] + + cb(err) + } + } + + // only one is called depends on sync + fakeFs.write = onWriteOrWriteSync(false, fs.write) + fakeFs.writeSync = onWriteOrWriteSync(true, fs.writeSync) + + t.ok(stream.write('hello world\n')) + stream.flush((err) => { + if (err) t.equal(err.code, 'other') + else t.fail('flush cb called without an error') + }) + + stream.end() + + stream.on('close', () => { + t.pass('close emitted') + }) + }) + + test('call flush cb when finish writing when currently in the middle', (t) => { + t.plan(4) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + sync, + + // to trigger write without calling flush + minLength: 1 + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + function onWriteOrWriteSync (originalFn) { + return function (...args) { + stream.flush((err) => { + if (err) t.fail(err) + else t.pass('flush cb called') + }) + + t.pass(`fake fs.${originalFn.name} called`) + fakeFs[originalFn.name] = originalFn + return originalFn(...args) + } + } + + // only one is called depends on sync + fakeFs.write = onWriteOrWriteSync(fs.write) + fakeFs.writeSync = onWriteOrWriteSync(fs.writeSync) + + t.ok(stream.write('hello world\n')) + }) + + test('call flush cb when writing and trying to flush before ready (on async)', (t) => { + t.plan(4) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + fakeFs.open = fsOpen + + const dest = file() + const stream = new SonicBoom({ + fd: dest, + // only async as sync is part of the constructor so the user will not be able to call write/flush + // before ready + sync: false, + + // to not trigger write without calling flush + minLength: 4096 + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + function fsOpen (...args) { + process.nextTick(() => { + // try writing and flushing before ready and in the middle of opening + t.pass('fake fs.open called') + t.ok(stream.write('hello world\n')) + + // calling flush + stream.flush((err) => { + if (err) t.fail(err) + else t.pass('flush cb called') + }) + + fakeFs.open = fs.open + fs.open(...args) + }) + } + }) +} diff --git a/node_modules/sonic-boom/test/fsync.test.js b/node_modules/sonic-boom/test/fsync.test.js new file mode 100644 index 0000000..9176de4 --- /dev/null +++ b/node_modules/sonic-boom/test/fsync.test.js @@ -0,0 +1,63 @@ +'use strict' + +const { test } = require('tap') +const fs = require('fs') +const proxyquire = require('proxyquire') +const { file } = require('./helper') + +test('fsync with sync', (t) => { + t.plan(5) + + const fakeFs = Object.create(fs) + fakeFs.fsyncSync = function (fd) { + t.pass('fake fs.fsyncSync called') + return fs.fsyncSync(fd) + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync: true, fsync: true }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + const data = fs.readFileSync(dest, 'utf8') + t.equal(data, 'hello world\nsomething else\n') +}) + +test('fsync with async', (t) => { + t.plan(7) + + const fakeFs = Object.create(fs) + fakeFs.fsyncSync = function (fd) { + t.pass('fake fs.fsyncSync called') + return fs.fsyncSync(fd) + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, fsync: true }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) diff --git a/node_modules/sonic-boom/test/helper.js b/node_modules/sonic-boom/test/helper.js new file mode 100644 index 0000000..462281e --- /dev/null +++ b/node_modules/sonic-boom/test/helper.js @@ -0,0 +1,42 @@ +'use strict' + +const { test, teardown } = require('tap') +const fs = require('fs') +const os = require('os') +const path = require('path') + +const files = [] +let count = 0 + +function file () { + const file = path.join(os.tmpdir(), `sonic-boom-${process.pid}-${process.hrtime().toString()}-${count++}`) + files.push(file) + return file +} + +teardown(() => { + const rmSync = fs.rmSync || fs.rmdirSync + files.forEach((file) => { + try { + if (fs.existsSync(file)) { + fs.statSync(file).isDirectory() ? rmSync(file, { recursive: true, maxRetries: 10 }) : fs.unlinkSync(file) + } + } catch (e) { + console.log(e) + } + }) +}) + +function runTests (buildTests) { + test('sync false', (t) => { + buildTests(t.test, false) + t.end() + }) + + test('sync true', (t) => { + buildTests(t.test, true) + t.end() + }) +} + +module.exports = { file, runTests } diff --git a/node_modules/sonic-boom/test/minlength.test.js b/node_modules/sonic-boom/test/minlength.test.js new file mode 100644 index 0000000..db488c8 --- /dev/null +++ b/node_modules/sonic-boom/test/minlength.test.js @@ -0,0 +1,35 @@ +'use strict' + +const { test } = require('tap') +const fs = require('fs') +const SonicBoom = require('../') +const { file } = require('./helper') + +const MAX_WRITE = 16 * 1024 + +test('drain deadlock', (t) => { + t.plan(4) + + const dest = file() + const stream = new SonicBoom({ dest, sync: false, minLength: 9999 }) + + t.ok(stream.write(Buffer.alloc(1500).fill('x').toString())) + t.ok(stream.write(Buffer.alloc(1500).fill('x').toString())) + t.ok(!stream.write(Buffer.alloc(MAX_WRITE).fill('x').toString())) + stream.on('drain', () => { + t.pass() + }) +}) + +test('should throw if minLength >= maxWrite', (t) => { + t.plan(1) + t.throws(() => { + const dest = file() + const fd = fs.openSync(dest, 'w') + + SonicBoom({ + fd, + minLength: MAX_WRITE + }) + }) +}) diff --git a/node_modules/sonic-boom/test/mode.test.js b/node_modules/sonic-boom/test/mode.test.js new file mode 100644 index 0000000..bac82ad --- /dev/null +++ b/node_modules/sonic-boom/test/mode.test.js @@ -0,0 +1,116 @@ +'use strict' + +const fs = require('fs') +const path = require('path') +const SonicBoom = require('../') +const { file, runTests } = require('./helper') + +const isWindows = process.platform === 'win32' + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the umask for testing + process.umask(0o000) + + test('mode', { skip: isWindows }, (t) => { + t.plan(6) + + const dest = file() + const mode = 0o666 + const stream = new SonicBoom({ dest, sync, mode }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + t.equal(fs.statSync(dest).mode & 0o777, stream.mode) + }) + }) + }) + + test('mode default', { skip: isWindows }, (t) => { + t.plan(6) + + const dest = file() + const defaultMode = 0o666 + const stream = new SonicBoom({ dest, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + t.equal(fs.statSync(dest).mode & 0o777, defaultMode) + }) + }) + }) + + test('mode on mkdir', { skip: isWindows }, (t) => { + t.plan(5) + + const dest = path.join(file(), 'out.log') + const mode = 0o666 + const stream = new SonicBoom({ dest, mkdir: true, mode, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + + stream.flush() + + stream.on('drain', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\n') + t.equal(fs.statSync(dest).mode & 0o777, stream.mode) + stream.end() + }) + }) + }) + + test('mode on append', { skip: isWindows }, (t) => { + t.plan(5) + + const dest = file() + fs.writeFileSync(dest, 'hello world\n', 'utf8', 0o422) + const mode = isWindows ? 0o444 : 0o666 + const stream = new SonicBoom({ dest, append: false, mode, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('something else\n')) + + stream.flush() + + stream.on('drain', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'something else\n') + t.equal(fs.statSync(dest).mode & 0o777, stream.mode) + stream.end() + }) + }) + }) +} diff --git a/node_modules/sonic-boom/test/periodicflush.test.js b/node_modules/sonic-boom/test/periodicflush.test.js new file mode 100644 index 0000000..5c0e093 --- /dev/null +++ b/node_modules/sonic-boom/test/periodicflush.test.js @@ -0,0 +1,61 @@ +'use strict' + +const FakeTimers = require('@sinonjs/fake-timers') +const fs = require('fs') +const SonicBoom = require('../') +const { file, runTests } = require('./helper') + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the umask for testing + process.umask(0o000) + + test('periodicflush_off', (t) => { + t.plan(4) + + const clock = FakeTimers.install() + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync, minLength: 5000 }) + + t.ok(stream.write('hello world\n')) + + setTimeout(function () { + fs.readFile(dest, 'utf8', function (err, data) { + t.error(err) + t.equal(data, '') + + stream.destroy() + t.pass('file empty') + }) + }, 2000) + + clock.tick(2000) + clock.uninstall() + }) + + test('periodicflush_on', (t) => { + t.plan(4) + + const clock = FakeTimers.install() + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync, minLength: 5000, periodicFlush: 1000 }) + + t.ok(stream.write('hello world\n')) + + setTimeout(function () { + fs.readFile(dest, 'utf8', function (err, data) { + t.error(err) + t.equal(data, 'hello world\n') + + stream.destroy() + t.pass('file not empty') + }) + }, 2000) + + clock.tick(2000) + clock.uninstall() + }) +} diff --git a/node_modules/sonic-boom/test/reopen.test.js b/node_modules/sonic-boom/test/reopen.test.js new file mode 100644 index 0000000..13fd8ad --- /dev/null +++ b/node_modules/sonic-boom/test/reopen.test.js @@ -0,0 +1,239 @@ +'use strict' + +const fs = require('fs') +const proxyquire = require('proxyquire') +const SonicBoom = require('../') +const { file, runTests } = require('./helper') + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the umask for testing + process.umask(0o000) + + test('reopen', (t) => { + t.plan(9) + + const dest = file() + const stream = new SonicBoom({ dest, sync }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const after = dest + '-moved' + + stream.once('drain', () => { + t.pass('drain emitted') + + fs.renameSync(dest, after) + stream.reopen() + + stream.once('ready', () => { + t.pass('ready emitted') + t.ok(stream.write('after reopen\n')) + + stream.once('drain', () => { + fs.readFile(after, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'after reopen\n') + stream.end() + }) + }) + }) + }) + }) + }) + + test('reopen with buffer', (t) => { + t.plan(9) + + const dest = file() + const stream = new SonicBoom({ dest, minLength: 4096, sync }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const after = dest + '-moved' + + stream.once('ready', () => { + t.pass('drain emitted') + + stream.flush() + fs.renameSync(dest, after) + stream.reopen() + + stream.once('ready', () => { + t.pass('ready emitted') + t.ok(stream.write('after reopen\n')) + stream.flush() + + stream.once('drain', () => { + fs.readFile(after, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'after reopen\n') + stream.end() + }) + }) + }) + }) + }) + }) + + test('reopen if not open', (t) => { + t.plan(3) + + const dest = file() + const stream = new SonicBoom({ dest, sync }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.reopen() + + stream.end() + stream.on('close', function () { + t.pass('ended') + }) + }) + + test('reopen with file', (t) => { + t.plan(10) + + const dest = file() + const stream = new SonicBoom({ dest, minLength: 0, sync }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const after = dest + '-new' + + stream.once('drain', () => { + t.pass('drain emitted') + + stream.reopen(after) + t.equal(stream.file, after) + + stream.once('ready', () => { + t.pass('ready emitted') + t.ok(stream.write('after reopen\n')) + + stream.once('drain', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + fs.readFile(after, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'after reopen\n') + stream.end() + }) + }) + }) + }) + }) + }) + + test('reopen throws an error', (t) => { + t.plan(sync ? 10 : 9) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const stream = new SonicBoom({ dest, sync }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const after = dest + '-moved' + + stream.on('error', () => { + t.pass('error emitted') + }) + + stream.once('drain', () => { + t.pass('drain emitted') + + fs.renameSync(dest, after) + if (sync) { + fakeFs.openSync = function (file, flags) { + t.pass('fake fs.openSync called') + throw new Error('open error') + } + } else { + fakeFs.open = function (file, flags, mode, cb) { + t.pass('fake fs.open called') + setTimeout(() => cb(new Error('open error')), 0) + } + } + + if (sync) { + try { + stream.reopen() + } catch (err) { + t.pass('reopen throwed') + } + } else { + stream.reopen() + } + + setTimeout(() => { + t.ok(stream.write('after reopen\n')) + + stream.end() + stream.on('finish', () => { + fs.readFile(after, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\nafter reopen\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) + }, 0) + }) + }) + + test('reopen emits drain', (t) => { + t.plan(9) + + const dest = file() + const stream = new SonicBoom({ dest, sync }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const after = dest + '-moved' + + stream.once('drain', () => { + t.pass('drain emitted') + + fs.renameSync(dest, after) + stream.reopen() + + stream.once('drain', () => { + t.pass('drain emitted') + t.ok(stream.write('after reopen\n')) + + stream.once('drain', () => { + fs.readFile(after, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'after reopen\n') + stream.end() + }) + }) + }) + }) + }) + }) +} diff --git a/node_modules/sonic-boom/test/retry.test.js b/node_modules/sonic-boom/test/retry.test.js new file mode 100644 index 0000000..6b62ee7 --- /dev/null +++ b/node_modules/sonic-boom/test/retry.test.js @@ -0,0 +1,414 @@ +'use strict' + +const { test } = require('tap') +const fs = require('fs') +const proxyquire = require('proxyquire') +const { file, runTests } = require('./helper') + +const MAX_WRITE = 16 * 1024 + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the umask for testing + process.umask(0o000) + test('retry on EAGAIN', (t) => { + t.plan(7) + + const fakeFs = Object.create(fs) + fakeFs.write = function (fd, buf, ...args) { + t.pass('fake fs.write called') + fakeFs.write = fs.write + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + process.nextTick(args.pop(), err) + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync: false, minLength: 0 }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) + }) +} + +test('emit error on async EAGAIN', (t) => { + t.plan(11) + + const fakeFs = Object.create(fs) + fakeFs.write = function (fd, buf, ...args) { + t.pass('fake fs.write called') + fakeFs.write = fs.write + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + process.nextTick(args[args.length - 1], err) + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + sync: false, + minLength: 12, + retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => { + t.equal(err.code, 'EAGAIN') + t.equal(writeBufferLen, 12) + t.equal(remainingBufferLen, 0) + return false + } + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + stream.once('error', err => { + t.equal(err.code, 'EAGAIN') + t.ok(stream.write('something else\n')) + }) + + t.ok(stream.write('hello world\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('retry on EAGAIN (sync)', (t) => { + t.plan(7) + + const fakeFs = Object.create(fs) + fakeFs.writeSync = function (fd, buf, enc) { + t.pass('fake fs.writeSync called') + fakeFs.writeSync = fs.writeSync + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + throw err + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync: true }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('emit error on EAGAIN (sync)', (t) => { + t.plan(11) + + const fakeFs = Object.create(fs) + fakeFs.writeSync = function (fd, buf, enc) { + t.pass('fake fs.writeSync called') + fakeFs.writeSync = fs.writeSync + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + throw err + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + minLength: 0, + sync: true, + retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => { + t.equal(err.code, 'EAGAIN') + t.equal(writeBufferLen, 12) + t.equal(remainingBufferLen, 0) + return false + } + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + stream.once('error', err => { + t.equal(err.code, 'EAGAIN') + t.ok(stream.write('something else\n')) + }) + + t.ok(stream.write('hello world\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('retryEAGAIN receives remaining buffer on async if write fails', (t) => { + t.plan(12) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + sync: false, + minLength: 12, + retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => { + t.equal(err.code, 'EAGAIN') + t.equal(writeBufferLen, 12) + t.equal(remainingBufferLen, 11) + return false + } + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + stream.once('error', err => { + t.equal(err.code, 'EAGAIN') + t.ok(stream.write('done')) + }) + + fakeFs.write = function (fd, buf, ...args) { + t.pass('fake fs.write called') + fakeFs.write = fs.write + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + t.ok(stream.write('sonic boom\n')) + process.nextTick(args[args.length - 1], err) + } + + t.ok(stream.write('hello world\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsonic boom\ndone') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('retryEAGAIN receives remaining buffer if exceeds maxWrite', (t) => { + t.plan(17) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const buf = Buffer.alloc(MAX_WRITE - 2).fill('x').toString() // 1 MB + const stream = new SonicBoom({ + fd, + sync: false, + minLength: MAX_WRITE - 1, + retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => { + t.equal(err.code, 'EAGAIN', 'retryEAGAIN received EAGAIN error') + t.equal(writeBufferLen, buf.length, 'writeBufferLen === buf.length') + t.equal(remainingBufferLen, 23, 'remainingBufferLen === 23') + return false + } + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + fakeFs.write = function (fd, buf, ...args) { + t.pass('fake fs.write called') + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + process.nextTick(args.pop(), err) + } + + fakeFs.writeSync = function (fd, buf, enc) { + t.pass('fake fs.write called') + const err = new Error('EAGAIN') + err.code = 'EAGAIN' + throw err + } + + t.ok(stream.write(buf), 'write buf') + t.notOk(stream.write('hello world\nsonic boom\n'), 'write hello world sonic boom') + + stream.once('error', err => { + t.equal(err.code, 'EAGAIN', 'bubbled error should be EAGAIN') + + try { + stream.flushSync() + } catch (err) { + t.equal(err.code, 'EAGAIN', 'thrown error should be EAGAIN') + fakeFs.write = fs.write + fakeFs.writeSync = fs.writeSync + stream.end() + } + }) + + stream.on('finish', () => { + t.pass('finish emitted') + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, `${buf}hello world\nsonic boom\n`, 'data on file should match written') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('retry on EBUSY', (t) => { + t.plan(7) + + const fakeFs = Object.create(fs) + fakeFs.write = function (fd, buf, ...args) { + t.pass('fake fs.write called') + fakeFs.write = fs.write + const err = new Error('EBUSY') + err.code = 'EBUSY' + process.nextTick(args.pop(), err) + } + const SonicBoom = proxyquire('..', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync: false, minLength: 0 }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('emit error on async EBUSY', (t) => { + t.plan(11) + + const fakeFs = Object.create(fs) + fakeFs.write = function (fd, buf, ...args) { + t.pass('fake fs.write called') + fakeFs.write = fs.write + const err = new Error('EBUSY') + err.code = 'EBUSY' + process.nextTick(args.pop(), err) + } + const SonicBoom = proxyquire('..', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ + fd, + sync: false, + minLength: 12, + retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => { + t.equal(err.code, 'EBUSY') + t.equal(writeBufferLen, 12) + t.equal(remainingBufferLen, 0) + return false + } + }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + stream.once('error', err => { + t.equal(err.code, 'EBUSY') + t.ok(stream.write('something else\n')) + }) + + t.ok(stream.write('hello world\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) diff --git a/node_modules/sonic-boom/test/sync.test.js b/node_modules/sonic-boom/test/sync.test.js new file mode 100644 index 0000000..6cd1d74 --- /dev/null +++ b/node_modules/sonic-boom/test/sync.test.js @@ -0,0 +1,261 @@ +'use strict' + +const { test } = require('tap') +const fs = require('fs') +const proxyquire = require('proxyquire') +const SonicBoom = require('../') +const { file } = require('./helper') + +test('write buffers that are not totally written with sync mode', (t) => { + t.plan(9) + + const fakeFs = Object.create(fs) + fakeFs.writeSync = function (fd, buf, enc) { + t.pass('fake fs.write called') + fakeFs.writeSync = (fd, buf, enc) => { + t.pass('calling real fs.writeSync, ' + buf) + return fs.writeSync(fd, buf, enc) + } + return 0 + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync: true }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('write buffers that are not totally written with flush sync', (t) => { + t.plan(7) + + const fakeFs = Object.create(fs) + fakeFs.writeSync = function (fd, buf, enc) { + t.pass('fake fs.write called') + fakeFs.writeSync = fs.writeSync + return 0 + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 100, sync: false }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.flushSync() + + stream.on('write', (n) => { + if (n === 0) { + t.fail('throwing to avoid infinite loop') + throw Error('shouldn\'t call write handler after flushing with n === 0') + } + }) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('sync writing is fully sync', (t) => { + t.plan(6) + + const fakeFs = Object.create(fs) + fakeFs.writeSync = function (fd, buf, enc, cb) { + t.pass('fake fs.write called') + return fs.writeSync(fd, buf, enc) + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync: true }) + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + // 'drain' will be only emitted once, + // the number of assertions at the top check this. + stream.on('drain', () => { + t.pass('drain emitted') + }) + + const data = fs.readFileSync(dest, 'utf8') + t.equal(data, 'hello world\nsomething else\n') +}) + +test('write enormously large buffers sync', (t) => { + t.plan(3) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync: true }) + + const buf = Buffer.alloc(1024).fill('x').toString() // 1 MB + let length = 0 + + for (let i = 0; i < 1024 * 512; i++) { + length += buf.length + stream.write(buf) + } + + stream.end() + + stream.on('finish', () => { + fs.stat(dest, (err, stat) => { + t.error(err) + t.equal(stat.size, length) + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('write enormously large buffers sync with utf8 multi-byte split', (t) => { + t.plan(4) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync: true }) + + let buf = Buffer.alloc((1024 * 16) - 2).fill('x') // 16MB - 3B + const length = buf.length + 4 + buf = buf.toString() + '🌲' // 16 MB + 1B + + stream.write(buf) + + stream.end() + + stream.on('finish', () => { + fs.stat(dest, (err, stat) => { + t.error(err) + t.equal(stat.size, length) + const char = Buffer.alloc(4) + const fd = fs.openSync(dest, 'r') + fs.readSync(fd, char, 0, 4, length - 4) + t.equal(char.toString(), '🌲') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +// for context see this issue https://github.com/pinojs/pino/issues/871 +test('file specified by dest path available immediately when options.sync is true', (t) => { + t.plan(3) + const dest = file() + const stream = new SonicBoom({ dest, sync: true }) + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + stream.flushSync() + t.pass('file opened and written to without error') +}) + +test('sync error handling', (t) => { + t.plan(1) + try { + /* eslint no-new: off */ + new SonicBoom({ dest: '/path/to/nowwhere', sync: true }) + t.fail('must throw synchronously') + } catch (err) { + t.pass('an error happened') + } +}) + +for (const fd of [1, 2]) { + test(`fd ${fd}`, (t) => { + t.plan(1) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const stream = new SonicBoom({ fd }) + + fakeFs.close = function (fd, cb) { + t.fail(`should not close fd ${fd}`) + } + + stream.end() + + stream.on('close', () => { + t.pass('close emitted') + }) + }) +} + +test('._len must always be equal or greater than 0', (t) => { + t.plan(3) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync: true }) + + t.ok(stream.write('hello world 👀\n')) + t.ok(stream.write('another line 👀\n')) + + t.equal(stream._len, 0) + + stream.end() +}) + +test('._len must always be equal or greater than 0', (t) => { + const n = 20 + t.plan(n + 3) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync: true, minLength: 20 }) + + let str = '' + for (let i = 0; i < 20; i++) { + t.ok(stream.write('👀')) + str += '👀' + } + + t.equal(stream._len, 0) + + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, str) + }) +}) diff --git a/node_modules/sonic-boom/test/write.test.js b/node_modules/sonic-boom/test/write.test.js new file mode 100644 index 0000000..b619507 --- /dev/null +++ b/node_modules/sonic-boom/test/write.test.js @@ -0,0 +1,465 @@ +'use strict' + +const { test } = require('tap') +const fs = require('fs') +const proxyquire = require('proxyquire') +const SonicBoom = require('../') +const { file, runTests } = require('./helper') + +runTests(buildTests) + +function buildTests (test, sync) { + // Reset the umask for testing + process.umask(0o000) + + test('write things to a file descriptor', (t) => { + t.plan(6) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) + }) + + test('write things in a streaming fashion', (t) => { + t.plan(8) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync }) + + stream.once('drain', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\n') + t.ok(stream.write('something else\n')) + }) + + stream.once('drain', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + stream.end() + }) + }) + }) + + t.ok(stream.write('hello world\n')) + + stream.on('finish', () => { + t.pass('finish emitted') + }) + stream.on('close', () => { + t.pass('close emitted') + }) + }) + + test('can be piped into', (t) => { + t.plan(4) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, sync }) + const source = fs.createReadStream(__filename, { encoding: 'utf8' }) + + source.pipe(stream) + + stream.on('finish', () => { + fs.readFile(__filename, 'utf8', (err, expected) => { + t.error(err) + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, expected) + }) + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) + }) + + test('write things to a file', (t) => { + t.plan(6) + + const dest = file() + const stream = new SonicBoom({ dest, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) + }) + + test('minLength', (t) => { + t.plan(8) + + const dest = file() + const stream = new SonicBoom({ dest, minLength: 4096, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const fail = t.fail + stream.on('drain', fail) + + // bad use of timer + // TODO refactor + setTimeout(function () { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, '') + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + }) + }, 100) + + stream.on('close', () => { + t.pass('close emitted') + }) + }) + + test('write later on recoverable error', (t) => { + t.plan(8) + + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + stream.on('error', () => { + t.pass('error emitted') + }) + + if (sync) { + fakeFs.writeSync = function (fd, buf, enc) { + t.pass('fake fs.writeSync called') + throw new Error('recoverable error') + } + } else { + fakeFs.write = function (fd, buf, ...args) { + t.pass('fake fs.write called') + setTimeout(() => args.pop()(new Error('recoverable error')), 0) + } + } + + t.ok(stream.write('hello world\n')) + + setTimeout(() => { + if (sync) { + fakeFs.writeSync = fs.writeSync + } else { + fakeFs.write = fs.write + } + + t.ok(stream.write('something else\n')) + + stream.end() + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) + }, 0) + }) + + test('emit write events', (t) => { + t.plan(7) + + const dest = file() + const stream = new SonicBoom({ dest, sync }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + let length = 0 + stream.on('write', (bytes) => { + length += bytes + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + t.equal(length, 27) + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) + }) + + test('write multi-byte characters string over than maxWrite', (t) => { + const fakeFs = Object.create(fs) + const MAX_WRITE = 65535 + fakeFs.write = function (fd, buf, ...args) { + // only write byteLength === MAX_WRITE + const _buf = Buffer.from(buf).subarray(0, MAX_WRITE).toString() + fs.write(fd, _buf, ...args) + setImmediate(args[args.length - 1], null, MAX_WRITE) + fakeFs.write = function (fd, buf, ...args) { + fs.write(fd, buf, ...args) + } + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync, maxWrite: MAX_WRITE }) + let buf = Buffer.alloc(MAX_WRITE).fill('x') + buf = '🌲' + buf.toString() + stream.write(buf) + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, buf) + t.end() + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) + stream.on('error', () => { + t.pass('error emitted') + }) + }) +} + +test('write buffers that are not totally written', (t) => { + t.plan(9) + + const fakeFs = Object.create(fs) + fakeFs.write = function (fd, buf, ...args) { + t.pass('fake fs.write called') + fakeFs.write = function (fd, buf, ...args) { + t.pass('calling real fs.write, ' + buf) + fs.write(fd, buf, ...args) + } + process.nextTick(args[args.length - 1], null, 0) + } + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync: false }) + + stream.on('ready', () => { + t.pass('ready emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + fs.readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('write enormously large buffers async', (t) => { + t.plan(3) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync: false }) + + const buf = Buffer.alloc(1024).fill('x').toString() // 1 MB + let length = 0 + + for (let i = 0; i < 1024 * 512; i++) { + length += buf.length + stream.write(buf) + } + + stream.end() + + stream.on('finish', () => { + fs.stat(dest, (err, stat) => { + t.error(err) + t.equal(stat.size, length) + }) + }) + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('make sure `maxWrite` is passed', (t) => { + t.plan(1) + const dest = file() + const stream = new SonicBoom({ dest, maxLength: 65536 }) + t.equal(stream.maxLength, 65536) +}) + +test('write enormously large buffers async atomicly', (t) => { + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 0, sync: false }) + + const buf = Buffer.alloc(1023).fill('x').toString() + + fakeFs.write = function (fd, _buf, ...args) { + if (_buf.length % buf.length !== 0) { + t.fail('write called with wrong buffer size') + } + + setImmediate(args[args.length - 1], null, _buf.length) + } + + for (let i = 0; i < 1024 * 512; i++) { + stream.write(buf) + } + + setImmediate(() => { + for (let i = 0; i < 1024 * 512; i++) { + stream.write(buf) + } + + stream.end() + }) + + stream.on('close', () => { + t.pass('close emitted') + t.end() + }) +}) + +test('write should not drop new data if buffer is not full', (t) => { + t.plan(2) + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 101, maxLength: 102, sync: false }) + + const buf = Buffer.alloc(100).fill('x').toString() + + fakeFs.write = function (fd, _buf, ...args) { + t.equal(_buf.length, buf.length + 2) + setImmediate(args[args.length - 1], null, _buf.length) + fakeFs.write = () => t.error('shouldnt call write again') + stream.end() + } + + stream.on('drop', (data) => { + t.error('should not drop') + }) + + stream.write(buf) + stream.write('aa') + + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('write should drop new data if buffer is full', (t) => { + t.plan(3) + const fakeFs = Object.create(fs) + const SonicBoom = proxyquire('../', { + fs: fakeFs + }) + + const dest = file() + const fd = fs.openSync(dest, 'w') + const stream = new SonicBoom({ fd, minLength: 101, maxLength: 102, sync: false }) + + const buf = Buffer.alloc(100).fill('x').toString() + + fakeFs.write = function (fd, _buf, ...args) { + t.equal(_buf.length, buf.length) + setImmediate(args[args.length - 1], null, _buf.length) + fakeFs.write = () => t.error('shouldnt call write more than once') + } + + stream.on('drop', (data) => { + t.equal(data.length, 3) + stream.end() + }) + + stream.write(buf) + stream.write('aaa') + + stream.on('close', () => { + t.pass('close emitted') + }) +}) diff --git a/node_modules/sonic-boom/types/index.d.ts b/node_modules/sonic-boom/types/index.d.ts new file mode 100644 index 0000000..97057f6 --- /dev/null +++ b/node_modules/sonic-boom/types/index.d.ts @@ -0,0 +1,63 @@ +// Type definitions for sonic-boom 0.7 +// Definitions by: Alex Ferrando +// Igor Savin +/// + +import { EventEmitter } from 'events'; + +export default SonicBoom; +export type SonicBoomOpts = { + fd?: number | string | symbol + dest?: string | number + maxLength?: number + minLength?: number + maxWrite?: number + periodicFlush?: number + sync?: boolean + fsync?: boolean + append?: boolean + mode?: string | number + mkdir?: boolean + contentMode?: 'buffer' | 'utf8' + retryEAGAIN?: (err: Error, writeBufferLen: number, remainingBufferLen: number) => boolean +} + +export class SonicBoom extends EventEmitter { + /** + * @param [fileDescriptor] File path or numerical file descriptor + * relative protocol is enabled. Default: process.stdout + * @returns a new sonic-boom instance + */ + constructor(opts: SonicBoomOpts) + + /** + * Writes the string to the file. It will return false to signal the producer to slow down. + */ + write(string: string): boolean; + + /** + * Writes the current buffer to the file if a write was not in progress. + * Do nothing if minLength is zero or if it is already writing. + */ + flush(cb?: (err?: Error) => unknown): void; + + /** + * Reopen the file in place, useful for log rotation. + */ + reopen(fileDescriptor?: string | number): void; + + /** + * Flushes the buffered data synchronously. This is a costly operation. + */ + flushSync(): void; + + /** + * Closes the stream, the data will be flushed down asynchronously + */ + end(): void; + + /** + * Closes the stream immediately, the data is not flushed. + */ + destroy(): void; +} diff --git a/node_modules/sonic-boom/types/tests/test.ts b/node_modules/sonic-boom/types/tests/test.ts new file mode 100644 index 0000000..f222b54 --- /dev/null +++ b/node_modules/sonic-boom/types/tests/test.ts @@ -0,0 +1,4 @@ +import { SonicBoom } from '../../' + +const sonic = new SonicBoom({ fd: process.stdout.fd }) +sonic.write('hello sonic\n') diff --git a/node_modules/split2/LICENSE b/node_modules/split2/LICENSE new file mode 100644 index 0000000..a91afe5 --- /dev/null +++ b/node_modules/split2/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2014-2018, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/split2/README.md b/node_modules/split2/README.md new file mode 100644 index 0000000..36f03ab --- /dev/null +++ b/node_modules/split2/README.md @@ -0,0 +1,85 @@ +# Split2(matcher, mapper, options) + +![ci](https://github.com/mcollina/split2/workflows/ci/badge.svg) + +Break up a stream and reassemble it so that each line is a chunk. +`split2` is inspired by [@dominictarr](https://github.com/dominictarr) [`split`](https://github.com/dominictarr/split) module, +and it is totally API compatible with it. +However, it is based on Node.js core [`Transform`](https://nodejs.org/api/stream.html#stream_new_stream_transform_options). + +`matcher` may be a `String`, or a `RegExp`. Example, read every line in a file ... + +``` js + fs.createReadStream(file) + .pipe(split2()) + .on('data', function (line) { + //each chunk now is a separate line! + }) + +``` + +`split` takes the same arguments as `string.split` except it defaults to '/\r?\n/', and the optional `limit` paremeter is ignored. +[String#split](https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/String/split) + +`split` takes an optional options object on it's third argument, which +is directly passed as a +[Transform](https://nodejs.org/api/stream.html#stream_new_stream_transform_options) +option. + +Additionally, the `.maxLength` and `.skipOverflow` options are implemented, which set limits on the internal +buffer size and the stream's behavior when the limit is exceeded. There is no limit unless `maxLength` is set. When +the internal buffer size exceeds `maxLength`, the stream emits an error by default. You may also set `skipOverflow` to +true to suppress the error and instead skip past any lines that cause the internal buffer to exceed `maxLength`. + +Calling `.destroy` will make the stream emit `close`. Use this to perform cleanup logic + +``` js +var splitFile = function(filename) { + var file = fs.createReadStream(filename) + + return file + .pipe(split2()) + .on('close', function() { + // destroy the file stream in case the split stream was destroyed + file.destroy() + }) +} + +var stream = splitFile('my-file.txt') + +stream.destroy() // will destroy the input file stream +``` + +# NDJ - Newline Delimited Json + +`split2` accepts a function which transforms each line. + +``` js +fs.createReadStream(file) + .pipe(split2(JSON.parse)) + .on('data', function (obj) { + //each chunk now is a js object + }) + .on("error", function(error) { + //handling parsing errors + }) +``` + +However, in [@dominictarr](https://github.com/dominictarr) [`split`](https://github.com/dominictarr/split) the mapper +is wrapped in a try-catch, while here it is not: if your parsing logic can throw, wrap it yourself. Otherwise, you can also use the stream error handling when mapper function throw. + +# License + +Copyright (c) 2014-2021, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/split2/bench.js b/node_modules/split2/bench.js new file mode 100644 index 0000000..15ec5df --- /dev/null +++ b/node_modules/split2/bench.js @@ -0,0 +1,27 @@ +'use strict' + +const split = require('./') +const bench = require('fastbench') +const binarySplit = require('binary-split') +const fs = require('fs') + +function benchSplit (cb) { + fs.createReadStream('package.json') + .pipe(split()) + .on('end', cb) + .resume() +} + +function benchBinarySplit (cb) { + fs.createReadStream('package.json') + .pipe(binarySplit()) + .on('end', cb) + .resume() +} + +const run = bench([ + benchSplit, + benchBinarySplit +], 10000) + +run(run) diff --git a/node_modules/split2/index.js b/node_modules/split2/index.js new file mode 100644 index 0000000..9b59f6c --- /dev/null +++ b/node_modules/split2/index.js @@ -0,0 +1,141 @@ +/* +Copyright (c) 2014-2021, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +'use strict' + +const { Transform } = require('stream') +const { StringDecoder } = require('string_decoder') +const kLast = Symbol('last') +const kDecoder = Symbol('decoder') + +function transform (chunk, enc, cb) { + let list + if (this.overflow) { // Line buffer is full. Skip to start of next line. + const buf = this[kDecoder].write(chunk) + list = buf.split(this.matcher) + + if (list.length === 1) return cb() // Line ending not found. Discard entire chunk. + + // Line ending found. Discard trailing fragment of previous line and reset overflow state. + list.shift() + this.overflow = false + } else { + this[kLast] += this[kDecoder].write(chunk) + list = this[kLast].split(this.matcher) + } + + this[kLast] = list.pop() + + for (let i = 0; i < list.length; i++) { + try { + push(this, this.mapper(list[i])) + } catch (error) { + return cb(error) + } + } + + this.overflow = this[kLast].length > this.maxLength + if (this.overflow && !this.skipOverflow) { + cb(new Error('maximum buffer reached')) + return + } + + cb() +} + +function flush (cb) { + // forward any gibberish left in there + this[kLast] += this[kDecoder].end() + + if (this[kLast]) { + try { + push(this, this.mapper(this[kLast])) + } catch (error) { + return cb(error) + } + } + + cb() +} + +function push (self, val) { + if (val !== undefined) { + self.push(val) + } +} + +function noop (incoming) { + return incoming +} + +function split (matcher, mapper, options) { + // Set defaults for any arguments not supplied. + matcher = matcher || /\r?\n/ + mapper = mapper || noop + options = options || {} + + // Test arguments explicitly. + switch (arguments.length) { + case 1: + // If mapper is only argument. + if (typeof matcher === 'function') { + mapper = matcher + matcher = /\r?\n/ + // If options is only argument. + } else if (typeof matcher === 'object' && !(matcher instanceof RegExp) && !matcher[Symbol.split]) { + options = matcher + matcher = /\r?\n/ + } + break + + case 2: + // If mapper and options are arguments. + if (typeof matcher === 'function') { + options = mapper + mapper = matcher + matcher = /\r?\n/ + // If matcher and options are arguments. + } else if (typeof mapper === 'object') { + options = mapper + mapper = noop + } + } + + options = Object.assign({}, options) + options.autoDestroy = true + options.transform = transform + options.flush = flush + options.readableObjectMode = true + + const stream = new Transform(options) + + stream[kLast] = '' + stream[kDecoder] = new StringDecoder('utf8') + stream.matcher = matcher + stream.mapper = mapper + stream.maxLength = options.maxLength + stream.skipOverflow = options.skipOverflow || false + stream.overflow = false + stream._destroy = function (err, cb) { + // Weird Node v12 bug that we need to work around + this._writableState.errorEmitted = false + cb(err) + } + + return stream +} + +module.exports = split diff --git a/node_modules/split2/package.json b/node_modules/split2/package.json new file mode 100644 index 0000000..e04bcc8 --- /dev/null +++ b/node_modules/split2/package.json @@ -0,0 +1,39 @@ +{ + "name": "split2", + "version": "4.2.0", + "description": "split a Text Stream into a Line Stream, using Stream 3", + "main": "index.js", + "scripts": { + "lint": "standard --verbose", + "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test.js", + "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js", + "test:report": "npm run lint && npm run unit:report", + "test": "npm run lint && npm run unit", + "legacy": "tape test.js" + }, + "pre-commit": [ + "test" + ], + "website": "https://github.com/mcollina/split2", + "repository": { + "type": "git", + "url": "https://github.com/mcollina/split2.git" + }, + "bugs": { + "url": "http://github.com/mcollina/split2/issues" + }, + "engines": { + "node": ">= 10.x" + }, + "author": "Matteo Collina ", + "license": "ISC", + "devDependencies": { + "binary-split": "^1.0.3", + "callback-stream": "^1.1.0", + "fastbench": "^1.0.0", + "nyc": "^15.0.1", + "pre-commit": "^1.1.2", + "standard": "^17.0.0", + "tape": "^5.0.0" + } +} diff --git a/node_modules/split2/test.js b/node_modules/split2/test.js new file mode 100644 index 0000000..a7f9838 --- /dev/null +++ b/node_modules/split2/test.js @@ -0,0 +1,409 @@ +'use strict' + +const test = require('tape') +const split = require('./') +const callback = require('callback-stream') +const strcb = callback.bind(null, { decodeStrings: false }) +const objcb = callback.bind(null, { objectMode: true }) + +test('split two lines on end', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\nworld') +}) + +test('split two lines on two writes', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.write('hello') + input.write('\nworld') + input.end() +}) + +test('split four lines on three writes', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world', 'bye', 'world']) + })) + + input.write('hello\nwor') + input.write('ld\nbye\nwo') + input.write('rld') + input.end() +}) + +test('accumulate multiple writes', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['helloworld']) + })) + + input.write('hello') + input.write('world') + input.end() +}) + +test('split using a custom string matcher', function (t) { + t.plan(2) + + const input = split('~') + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('split using a custom regexp matcher', function (t) { + t.plan(2) + + const input = split(/~/) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('support an option argument', function (t) { + t.plan(2) + + const input = split({ highWaterMark: 2 }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\nworld') +}) + +test('support a mapper function', function (t) { + t.plan(2) + + const a = { a: '42' } + const b = { b: '24' } + + const input = split(JSON.parse) + + input.pipe(objcb(function (err, list) { + t.error(err) + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('\n') + input.end(JSON.stringify(b)) +}) + +test('split lines windows-style', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\r\nworld') +}) + +test('splits a buffer', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end(Buffer.from('hello\nworld')) +}) + +test('do not end on undefined', function (t) { + t.plan(2) + + const input = split(function (line) { }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, []) + })) + + input.end(Buffer.from('hello\nworld')) +}) + +test('has destroy method', function (t) { + t.plan(1) + + const input = split(function (line) { }) + + input.on('close', function () { + t.ok(true, 'close emitted') + t.end() + }) + + input.destroy() +}) + +test('support custom matcher and mapper', function (t) { + t.plan(4) + + const a = { a: '42' } + const b = { b: '24' } + const input = split('~', JSON.parse) + + t.equal(input.matcher, '~') + t.equal(typeof input.mapper, 'function') + + input.pipe(objcb(function (err, list) { + t.notOk(err, 'no errors') + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('~') + input.end(JSON.stringify(b)) +}) + +test('support custom matcher and options', function (t) { + t.plan(6) + + const input = split('~', { highWaterMark: 1024 }) + + t.equal(input.matcher, '~') + t.equal(typeof input.mapper, 'function') + t.equal(input._readableState.highWaterMark, 1024) + t.equal(input._writableState.highWaterMark, 1024) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('support mapper and options', function (t) { + t.plan(6) + + const a = { a: '42' } + const b = { b: '24' } + const input = split(JSON.parse, { highWaterMark: 1024 }) + + t.ok(input.matcher instanceof RegExp, 'matcher is RegExp') + t.equal(typeof input.mapper, 'function') + t.equal(input._readableState.highWaterMark, 1024) + t.equal(input._writableState.highWaterMark, 1024) + + input.pipe(objcb(function (err, list) { + t.error(err) + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('\n') + input.end(JSON.stringify(b)) +}) + +test('split utf8 chars', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫烫烫', '锟斤拷']) + })) + + const buf = Buffer.from('烫烫烫\r\n锟斤拷', 'utf8') + for (let i = 0; i < buf.length; ++i) { + input.write(buf.slice(i, i + 1)) + } + input.end() +}) + +test('split utf8 chars 2by2', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫烫烫', '烫烫烫']) + })) + + const str = '烫烫烫\r\n烫烫烫' + const buf = Buffer.from(str, 'utf8') + for (let i = 0; i < buf.length; i += 2) { + input.write(buf.slice(i, i + 2)) + } + input.end() +}) + +test('split lines when the \n comes at the end of a chunk', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.write('hello\n') + input.end('world') +}) + +test('truncated utf-8 char', function (t) { + t.plan(2) + + const input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫' + Buffer.from('e7', 'hex').toString()]) + })) + + const str = '烫烫' + const buf = Buffer.from(str, 'utf8') + + input.write(buf.slice(0, 3)) + input.end(buf.slice(3, 4)) +}) + +test('maximum buffer limit', function (t) { + t.plan(1) + + const input = split({ maxLength: 2 }) + input.on('error', function (err) { + t.ok(err) + }) + + input.resume() + + input.write('hey') +}) + +test('readable highWaterMark', function (t) { + const input = split() + t.equal(input._readableState.highWaterMark, 16) + t.end() +}) + +test('maxLength < chunk size', function (t) { + t.plan(2) + + const input = split({ maxLength: 2 }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['a', 'b']) + })) + + input.end('a\nb') +}) + +test('maximum buffer limit w/skip', function (t) { + t.plan(2) + + const input = split({ maxLength: 2, skipOverflow: true }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['a', 'b', 'c']) + })) + + input.write('a\n123') + input.write('456') + input.write('789\nb\nc') + input.end() +}) + +test("don't modify the options object", function (t) { + t.plan(2) + + const options = {} + const input = split(options) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.same(options, {}) + })) + + input.end() +}) + +test('mapper throws flush', function (t) { + t.plan(1) + const error = new Error() + const input = split(function () { + throw error + }) + + input.on('error', (err, list) => { + t.same(err, error) + }) + input.end('hello') +}) + +test('mapper throws on transform', function (t) { + t.plan(1) + + const error = new Error() + const input = split(function (l) { + throw error + }) + + input.on('error', (err) => { + t.same(err, error) + }) + input.write('a') + input.write('\n') + input.end('b') +}) + +test('supports Symbol.split', function (t) { + t.plan(2) + + const input = split({ + [Symbol.split] (str) { + return str.split('~') + } + }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) diff --git a/node_modules/strip-json-comments/index.d.ts b/node_modules/strip-json-comments/index.d.ts new file mode 100644 index 0000000..ffd6258 --- /dev/null +++ b/node_modules/strip-json-comments/index.d.ts @@ -0,0 +1,41 @@ +export type Options = { + /** + Strip trailing commas in addition to comments. + + @default false + */ + readonly trailingCommas?: boolean; + + /** + Replace comments and trailing commas with whitespace instead of stripping them entirely. + + @default true + */ + readonly whitespace?: boolean; +}; + +/** +Strip comments from JSON. Lets you use comments in your JSON files! + +It will replace single-line comments `//` and multi-line comments `/**\/` with whitespace. This allows JSON error positions to remain as close as possible to the original source. + +@param jsonString - Accepts a string with JSON. +@returns A JSON string without comments. + +@example +``` +import stripJsonComments from 'strip-json-comments'; + +const json = `{ + // Rainbows + "unicorn": "cake" +}`; + +JSON.parse(stripJsonComments(json)); +//=> {unicorn: 'cake'} +``` +*/ +export default function stripJsonComments( + jsonString: string, + options?: Options +): string; diff --git a/node_modules/strip-json-comments/index.js b/node_modules/strip-json-comments/index.js new file mode 100644 index 0000000..0008e77 --- /dev/null +++ b/node_modules/strip-json-comments/index.js @@ -0,0 +1,113 @@ +const singleComment = Symbol('singleComment'); +const multiComment = Symbol('multiComment'); + +const stripWithoutWhitespace = () => ''; + +// Replace all characters except ASCII spaces, tabs and line endings with regular spaces to ensure valid JSON output. +const stripWithWhitespace = (string, start, end) => string.slice(start, end).replace(/[^ \t\r\n]/g, ' '); + +const isEscaped = (jsonString, quotePosition) => { + let index = quotePosition - 1; + let backslashCount = 0; + + while (jsonString[index] === '\\') { + index -= 1; + backslashCount += 1; + } + + return Boolean(backslashCount % 2); +}; + +export default function stripJsonComments(jsonString, {whitespace = true, trailingCommas = false} = {}) { + if (typeof jsonString !== 'string') { + throw new TypeError(`Expected argument \`jsonString\` to be a \`string\`, got \`${typeof jsonString}\``); + } + + const strip = whitespace ? stripWithWhitespace : stripWithoutWhitespace; + + let isInsideString = false; + let isInsideComment = false; + let offset = 0; + let buffer = ''; + let result = ''; + let commaIndex = -1; + + for (let index = 0; index < jsonString.length; index++) { + const currentCharacter = jsonString[index]; + const nextCharacter = jsonString[index + 1]; + + if (!isInsideComment && currentCharacter === '"') { + // Enter or exit string + const escaped = isEscaped(jsonString, index); + if (!escaped) { + isInsideString = !isInsideString; + } + } + + if (isInsideString) { + continue; + } + + if (!isInsideComment && currentCharacter + nextCharacter === '//') { + // Enter single-line comment + buffer += jsonString.slice(offset, index); + offset = index; + isInsideComment = singleComment; + index++; + } else if (isInsideComment === singleComment && currentCharacter + nextCharacter === '\r\n') { + // Exit single-line comment via \r\n + index++; + isInsideComment = false; + buffer += strip(jsonString, offset, index); + offset = index; + continue; + } else if (isInsideComment === singleComment && currentCharacter === '\n') { + // Exit single-line comment via \n + isInsideComment = false; + buffer += strip(jsonString, offset, index); + offset = index; + } else if (!isInsideComment && currentCharacter + nextCharacter === '/*') { + // Enter multiline comment + buffer += jsonString.slice(offset, index); + offset = index; + isInsideComment = multiComment; + index++; + continue; + } else if (isInsideComment === multiComment && currentCharacter + nextCharacter === '*/') { + // Exit multiline comment + index++; + isInsideComment = false; + buffer += strip(jsonString, offset, index + 1); + offset = index + 1; + continue; + } else if (trailingCommas && !isInsideComment) { + if (commaIndex !== -1) { + if (currentCharacter === '}' || currentCharacter === ']') { + // Strip trailing comma + buffer += jsonString.slice(offset, index); + result += strip(buffer, 0, 1) + buffer.slice(1); + buffer = ''; + offset = index; + commaIndex = -1; + } else if (currentCharacter !== ' ' && currentCharacter !== '\t' && currentCharacter !== '\r' && currentCharacter !== '\n') { + // Hit non-whitespace following a comma; comma is not trailing + buffer += jsonString.slice(offset, index); + offset = index; + commaIndex = -1; + } + } else if (currentCharacter === ',') { + // Flush buffer prior to this point, and save new comma index + result += buffer + jsonString.slice(offset, index); + buffer = ''; + offset = index; + commaIndex = index; + } + } + } + + const remaining = (isInsideComment === singleComment) + ? strip(jsonString, offset) + : jsonString.slice(offset); + + return result + buffer + remaining; +} diff --git a/node_modules/strip-json-comments/license b/node_modules/strip-json-comments/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/strip-json-comments/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/strip-json-comments/package.json b/node_modules/strip-json-comments/package.json new file mode 100644 index 0000000..568dfb4 --- /dev/null +++ b/node_modules/strip-json-comments/package.json @@ -0,0 +1,56 @@ +{ + "name": "strip-json-comments", + "version": "5.0.3", + "description": "Strip comments from JSON. Lets you use comments in your JSON files!", + "license": "MIT", + "repository": "sindresorhus/strip-json-comments", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "types": "./index.d.ts", + "sideEffects": false, + "engines": { + "node": ">=14.16" + }, + "scripts": { + "test": "xo && ava && tsd", + "bench": "matcha benchmark.js" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "json", + "strip", + "comments", + "remove", + "delete", + "trim", + "multiline", + "parse", + "config", + "configuration", + "settings", + "util", + "env", + "environment", + "jsonc" + ], + "devDependencies": { + "ava": "^4.3.1", + "matcha": "^0.7.0", + "tsd": "^0.22.0", + "xo": "^0.54.2" + }, + "xo": { + "rules": { + "complexity": "off" + } + } +} diff --git a/node_modules/strip-json-comments/readme.md b/node_modules/strip-json-comments/readme.md new file mode 100644 index 0000000..00f852e --- /dev/null +++ b/node_modules/strip-json-comments/readme.md @@ -0,0 +1,75 @@ +# strip-json-comments + +> Strip comments from JSON. Lets you use comments in your JSON files! + +This is now possible: + +```js +{ + // Rainbows + "unicorn": /* ❤ */ "cake" +} +``` + +It will replace single-line comments `//` and multi-line comments `/**/` with whitespace. This allows JSON error positions to remain as close as possible to the original source. + +Also available as a [Gulp](https://github.com/sindresorhus/gulp-strip-json-comments)/[Grunt](https://github.com/sindresorhus/grunt-strip-json-comments)/[Broccoli](https://github.com/sindresorhus/broccoli-strip-json-comments) plugin. + +## Install + +```sh +npm install strip-json-comments +``` + +## Usage + +```js +import stripJsonComments from 'strip-json-comments'; + +const json = `{ + // Rainbows + "unicorn": /* ❤ */ "cake" +}`; + +JSON.parse(stripJsonComments(json)); +//=> {unicorn: 'cake'} +``` + +## API + +### stripJsonComments(jsonString, options?) + +#### jsonString + +Type: `string` + +Accepts a string with JSON and returns a string without comments. + +#### options + +Type: `object` + +##### trailingCommas + +Type: `boolean`\ +Default: `false` + +Strip trailing commas in addition to comments. + +##### whitespace + +Type: `boolean`\ +Default: `true` + +Replace comments and trailing commas with whitespace instead of stripping them entirely. + +## Benchmark + +```sh +npm run bench +``` + +## Related + +- [strip-json-comments-cli](https://github.com/sindresorhus/strip-json-comments-cli) - CLI for this module +- [strip-css-comments](https://github.com/sindresorhus/strip-css-comments) - Strip comments from CSS diff --git a/node_modules/thread-stream/.github/dependabot.yml b/node_modules/thread-stream/.github/dependabot.yml new file mode 100644 index 0000000..dfa7fa6 --- /dev/null +++ b/node_modules/thread-stream/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 10 diff --git a/node_modules/thread-stream/.github/workflows/ci.yml b/node_modules/thread-stream/.github/workflows/ci.yml new file mode 100644 index 0000000..3e0950a --- /dev/null +++ b/node_modules/thread-stream/.github/workflows/ci.yml @@ -0,0 +1,94 @@ +name: CI + +on: + push: + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +# This allows a subsequently queued workflow run to interrupt previous runs +concurrency: + group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" + cancel-in-progress: true + +jobs: + dependency-review: + name: Dependency Review + if: github.event_name == 'pull_request' + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Check out repo + uses: actions/checkout@v3 + with: + persist-credentials: false + + - name: Dependency review + uses: actions/dependency-review-action@v4 + + test: + name: Test + runs-on: ${{ matrix.os }} + permissions: + contents: read + strategy: + matrix: + node-version: [18, 20, 22] + os: [macos-latest, ubuntu-latest, windows-latest] + exclude: + - os: windows-latest + node-version: 22 + + steps: + - name: Check out repo + uses: actions/checkout@v3 + with: + persist-credentials: false + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Install dependencies + run: npm i --ignore-scripts + + - name: Run tests + run: npm run test:ci + + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2.3.0 + with: + github-token: ${{ secrets.github_token }} + parallel: true + flag-name: run-${{ matrix.node-version }}-${{ matrix.os }} + + coverage: + needs: test + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@v2.3.0 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true + + automerge: + name: Automerge Dependabot PRs + if: > + github.event_name == 'pull_request' && + github.event.pull_request.user.login == 'dependabot[bot]' + needs: test + permissions: + pull-requests: write + contents: write + runs-on: ubuntu-latest + steps: + - uses: fastify/github-action-merge-dependabot@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/thread-stream/.husky/pre-commit b/node_modules/thread-stream/.husky/pre-commit new file mode 100644 index 0000000..610c2a5 --- /dev/null +++ b/node_modules/thread-stream/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +npm test diff --git a/node_modules/thread-stream/.taprc b/node_modules/thread-stream/.taprc new file mode 100644 index 0000000..954e854 --- /dev/null +++ b/node_modules/thread-stream/.taprc @@ -0,0 +1,4 @@ +jobs: 1 +check-coverage: false +# in seconds +timeout: 60 diff --git a/node_modules/thread-stream/LICENSE b/node_modules/thread-stream/LICENSE new file mode 100644 index 0000000..2c1a038 --- /dev/null +++ b/node_modules/thread-stream/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/thread-stream/README.md b/node_modules/thread-stream/README.md new file mode 100644 index 0000000..80d1b3f --- /dev/null +++ b/node_modules/thread-stream/README.md @@ -0,0 +1,135 @@ +# thread-stream +[![npm version](https://img.shields.io/npm/v/thread-stream)](https://www.npmjs.com/package/thread-stream) +[![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/thread-stream/ci.yml?branch=main)](https://github.com/pinojs/thread-stream/actions) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) + +A streaming way to send data to a Node.js Worker Thread. + +## install + +```sh +npm i thread-stream +``` + +## Usage + +```js +'use strict' + +const ThreadStream = require('thread-stream') +const { join } = require('path') + +const stream = new ThreadStream({ + filename: join(__dirname, 'worker.js'), + workerData: { dest }, + workerOpts: {}, // Other options to be passed to Worker + sync: false, // default +}) + +stream.write('hello') + +// Asynchronous flushing +stream.flush(function () { + stream.write(' ') + stream.write('world') + + // Synchronous flushing + stream.flushSync() + stream.end() +}) +``` + +In `worker.js`: + +```js +'use strict' + +const fs = require('fs') +const { once } = require('events') + +async function run (opts) { + const stream = fs.createWriteStream(opts.dest) + await once(stream, 'open') + return stream +} + +module.exports = run +``` + +Make sure that the stream emits `'close'` when the stream completes. +This can usually be achieved by passing the [`autoDestroy: true`](https://nodejs.org/api/stream.html#stream_new_stream_writable_options) +flag your stream classes. + +The underlining worker is automatically closed if the stream is garbage collected. + + +### External modules + +You may use this module within compatible external modules, that exports the `worker.js` interface. + +```js +const ThreadStream = require('thread-stream') + +const modulePath = require.resolve('pino-elasticsearch') + +const stream = new ThreadStream({ + filename: modulePath, + workerData: { node: 'http://localhost:9200' } +}) + +stream.write('log to elasticsearch!') +stream.flushSync() +stream.end() +``` + +This module works with `yarn` in PnP (plug'n play) mode too! + +### Emit events + +You can emit events on the ThreadStream from your worker using [`worker.parentPort.postMessage()`](https://nodejs.org/api/worker_threads.html#workerparentport). +The message (JSON object) must have the following data structure: + +```js +parentPort.postMessage({ + code: 'EVENT', + name: 'eventName', + args: ['list', 'of', 'args', 123, new Error('Boom')] +}) +``` + +On your ThreadStream, you can add a listener function for this event name: + +```js +const stream = new ThreadStream({ + filename: join(__dirname, 'worker.js'), + workerData: {}, +}) +stream.on('eventName', function (a, b, c, n, err) { + console.log('received:', a, b, c, n, err) // received: list of args 123 Error: Boom +}) +``` + +### Post Messages + +You can post messages to the worker by emitting a `message` event on the ThreadStream. + +```js +const stream = new ThreadStream({ + filename: join(__dirname, 'worker.js'), + workerData: {}, +}) +stream.emit('message', message) +``` + +On your worker, you can listen for this message using [`worker.parentPort.on('message', cb)`](https://nodejs.org/api/worker_threads.html#event-message). + +```js +const { parentPort } = require('worker_threads') +parentPort.on('message', function (message) { + console.log('received:', message) +}) +``` + +## License + +MIT diff --git a/node_modules/thread-stream/bench.js b/node_modules/thread-stream/bench.js new file mode 100644 index 0000000..f13454c --- /dev/null +++ b/node_modules/thread-stream/bench.js @@ -0,0 +1,85 @@ +'use strict' + +const bench = require('fastbench') +const SonicBoom = require('sonic-boom') +const ThreadStream = require('.') +const Console = require('console').Console +const fs = require('fs') +const { join } = require('path') + +const core = fs.createWriteStream('/dev/null') +const fd = fs.openSync('/dev/null', 'w') +const sonic = new SonicBoom({ fd }) +const sonicSync = new SonicBoom({ fd, sync: true }) +const out = fs.createWriteStream('/dev/null') +const dummyConsole = new Console(out) +const threadStreamSync = new ThreadStream({ + filename: join(__dirname, 'test', 'to-file.js'), + workerData: { dest: '/dev/null' }, + bufferSize: 4 * 1024 * 1024, + sync: true +}) +const threadStreamAsync = new ThreadStream({ + filename: join(__dirname, 'test', 'to-file.js'), + workerData: { dest: '/dev/null' }, + bufferSize: 4 * 1024 * 1024, + sync: false +}) + +const MAX = 10000 + +let str = '' + +for (let i = 0; i < 100; i++) { + str += 'hello' +} + +setTimeout(doBench, 100) + +const run = bench([ + function benchThreadStreamSync (cb) { + for (let i = 0; i < MAX; i++) { + threadStreamSync.write(str) + } + setImmediate(cb) + }, + function benchThreadStreamAsync (cb) { + threadStreamAsync.once('drain', cb) + for (let i = 0; i < MAX; i++) { + threadStreamAsync.write(str) + } + }, + function benchSonic (cb) { + sonic.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonic.write(str) + } + }, + function benchSonicSync (cb) { + sonicSync.once('drain', cb) + for (let i = 0; i < MAX; i++) { + sonicSync.write(str) + } + }, + function benchCore (cb) { + core.once('drain', cb) + for (let i = 0; i < MAX; i++) { + core.write(str) + } + }, + function benchConsole (cb) { + for (let i = 0; i < MAX; i++) { + dummyConsole.log(str) + } + setImmediate(cb) + } +], 1000) + +function doBench () { + run(function () { + run(function () { + // TODO figure out why it does not shut down + process.exit(0) + }) + }) +} diff --git a/node_modules/thread-stream/index.d.ts b/node_modules/thread-stream/index.d.ts new file mode 100644 index 0000000..a84254f --- /dev/null +++ b/node_modules/thread-stream/index.d.ts @@ -0,0 +1,92 @@ +import { EventEmitter } from 'events' +import * as workerThreads from 'worker_threads' + +interface ThreadStreamOptions { + /** + * The size (in bytes) of the buffer. + * Must be greater than 4 (i.e. it must at least fit a 4-byte utf-8 char). + * + * Default: `4 * 1024 * 1024` = `4194304` + */ + bufferSize?: number, + /** + * The path to the Worker's main script or module. + * Must be either an absolute path or a relative path (i.e. relative to the current working directory) starting with ./ or ../, or a WHATWG URL object using file: or data: protocol. + * When using a data: URL, the data is interpreted based on MIME type using the ECMAScript module loader. + * + * {@link workerThreads.Worker()} + */ + filename: string | URL, + /** + * If `true`, write data synchronously; otherwise write data asynchronously. + * + * Default: `false`. + */ + sync?: boolean, + /** + * {@link workerThreads.WorkerOptions.workerData} + * + * Default: `{}` + */ + workerData?: any, + /** + * {@link workerThreads.WorkerOptions} + * + * Default: `{}` + */ + workerOpts?: workerThreads.WorkerOptions +} + + +declare class ThreadStream extends EventEmitter { + /** + * @param {ThreadStreamOptions} opts + */ + constructor(opts: ThreadStreamOptions) + /** + * Write some data to the stream. + * + * **Please note that this method should not throw an {Error} if something goes wrong but emit an error event.** + * @param {string} data data to write. + * @returns {boolean} false if the stream wishes for the calling code to wait for the 'drain' event to be emitted before continuing to write additional data or if it fails to write; otherwise true. + */ + write(data: string): boolean + /** + * Signal that no more data will be written. + * + * **Please note that this method should not throw an {Error} if something goes wrong but emit an error event.** + * + * Calling the {@link write()} method after calling {@link end()} will emit an error. + */ + end(): void + /** + * Flush the stream synchronously. + * This method should be called in the shutdown phase to make sure that all data has been flushed. + * + * **Please note that this method will throw an {Error} if something goes wrong.** + * + * @throws {Error} if the stream is already flushing, if it fails to flush or if it takes more than 10 seconds to flush. + */ + flushSync(): void + /** + * Synchronously calls each of the listeners registered for the event named`eventName`, in the order they were registered, passing the supplied arguments + * to each. + * + * @param eventName the name of the event. + * @param args the arguments to be passed to the event handlers. + * @returns {boolean} `true` if the event had listeners, `false` otherwise. + */ + emit(eventName: string | symbol, ...args: any[]): boolean; + + /** + * Post a message to the Worker with specified data and an optional list of transferable objects. + * + * @param eventName the name of the event, specifically 'message'. + * @param message message data to be sent to the Worker. + * @param transferList an optional list of transferable objects to be transferred to the Worker context. + * @returns {boolean} true if the event had listeners, false otherwise. + */ + emit(eventName: 'message', message: any, transferList?: workerThreads.TransferListItem[]): boolean +} + +export = ThreadStream; diff --git a/node_modules/thread-stream/index.js b/node_modules/thread-stream/index.js new file mode 100644 index 0000000..bf8b387 --- /dev/null +++ b/node_modules/thread-stream/index.js @@ -0,0 +1,537 @@ +'use strict' + +const { version } = require('./package.json') +const { EventEmitter } = require('events') +const { Worker } = require('worker_threads') +const { join } = require('path') +const { pathToFileURL } = require('url') +const { wait } = require('./lib/wait') +const { + WRITE_INDEX, + READ_INDEX +} = require('./lib/indexes') +const buffer = require('buffer') +const assert = require('assert') + +const kImpl = Symbol('kImpl') + +// V8 limit for string size +const MAX_STRING = buffer.constants.MAX_STRING_LENGTH + +class FakeWeakRef { + constructor (value) { + this._value = value + } + + deref () { + return this._value + } +} + +class FakeFinalizationRegistry { + register () {} + + unregister () {} +} + +// Currently using FinalizationRegistry with code coverage breaks the world +// Ref: https://github.com/nodejs/node/issues/49344 +const FinalizationRegistry = process.env.NODE_V8_COVERAGE ? FakeFinalizationRegistry : global.FinalizationRegistry || FakeFinalizationRegistry +const WeakRef = process.env.NODE_V8_COVERAGE ? FakeWeakRef : global.WeakRef || FakeWeakRef + +const registry = new FinalizationRegistry((worker) => { + if (worker.exited) { + return + } + worker.terminate() +}) + +function createWorker (stream, opts) { + const { filename, workerData } = opts + + const bundlerOverrides = '__bundlerPathsOverrides' in globalThis ? globalThis.__bundlerPathsOverrides : {} + const toExecute = bundlerOverrides['thread-stream-worker'] || join(__dirname, 'lib', 'worker.js') + + const worker = new Worker(toExecute, { + ...opts.workerOpts, + trackUnmanagedFds: false, + workerData: { + filename: filename.indexOf('file://') === 0 + ? filename + : pathToFileURL(filename).href, + dataBuf: stream[kImpl].dataBuf, + stateBuf: stream[kImpl].stateBuf, + workerData: { + $context: { + threadStreamVersion: version + }, + ...workerData + } + } + }) + + // We keep a strong reference for now, + // we need to start writing first + worker.stream = new FakeWeakRef(stream) + + worker.on('message', onWorkerMessage) + worker.on('exit', onWorkerExit) + registry.register(stream, worker) + + return worker +} + +function drain (stream) { + assert(!stream[kImpl].sync) + if (stream[kImpl].needDrain) { + stream[kImpl].needDrain = false + stream.emit('drain') + } +} + +function nextFlush (stream) { + const writeIndex = Atomics.load(stream[kImpl].state, WRITE_INDEX) + let leftover = stream[kImpl].data.length - writeIndex + + if (leftover > 0) { + if (stream[kImpl].buf.length === 0) { + stream[kImpl].flushing = false + + if (stream[kImpl].ending) { + end(stream) + } else if (stream[kImpl].needDrain) { + process.nextTick(drain, stream) + } + + return + } + + let toWrite = stream[kImpl].buf.slice(0, leftover) + let toWriteBytes = Buffer.byteLength(toWrite) + if (toWriteBytes <= leftover) { + stream[kImpl].buf = stream[kImpl].buf.slice(leftover) + // process._rawDebug('writing ' + toWrite.length) + write(stream, toWrite, nextFlush.bind(null, stream)) + } else { + // multi-byte utf-8 + stream.flush(() => { + // err is already handled in flush() + if (stream.destroyed) { + return + } + + Atomics.store(stream[kImpl].state, READ_INDEX, 0) + Atomics.store(stream[kImpl].state, WRITE_INDEX, 0) + + // Find a toWrite length that fits the buffer + // it must exists as the buffer is at least 4 bytes length + // and the max utf-8 length for a char is 4 bytes. + while (toWriteBytes > stream[kImpl].data.length) { + leftover = leftover / 2 + toWrite = stream[kImpl].buf.slice(0, leftover) + toWriteBytes = Buffer.byteLength(toWrite) + } + stream[kImpl].buf = stream[kImpl].buf.slice(leftover) + write(stream, toWrite, nextFlush.bind(null, stream)) + }) + } + } else if (leftover === 0) { + if (writeIndex === 0 && stream[kImpl].buf.length === 0) { + // we had a flushSync in the meanwhile + return + } + stream.flush(() => { + Atomics.store(stream[kImpl].state, READ_INDEX, 0) + Atomics.store(stream[kImpl].state, WRITE_INDEX, 0) + nextFlush(stream) + }) + } else { + // This should never happen + destroy(stream, new Error('overwritten')) + } +} + +function onWorkerMessage (msg) { + const stream = this.stream.deref() + if (stream === undefined) { + this.exited = true + // Terminate the worker. + this.terminate() + return + } + + switch (msg.code) { + case 'READY': + // Replace the FakeWeakRef with a + // proper one. + this.stream = new WeakRef(stream) + + stream.flush(() => { + stream[kImpl].ready = true + stream.emit('ready') + }) + break + case 'ERROR': + destroy(stream, msg.err) + break + case 'EVENT': + if (Array.isArray(msg.args)) { + stream.emit(msg.name, ...msg.args) + } else { + stream.emit(msg.name, msg.args) + } + break + case 'WARNING': + process.emitWarning(msg.err) + break + default: + destroy(stream, new Error('this should not happen: ' + msg.code)) + } +} + +function onWorkerExit (code) { + const stream = this.stream.deref() + if (stream === undefined) { + // Nothing to do, the worker already exit + return + } + registry.unregister(stream) + stream.worker.exited = true + stream.worker.off('exit', onWorkerExit) + destroy(stream, code !== 0 ? new Error('the worker thread exited') : null) +} + +class ThreadStream extends EventEmitter { + constructor (opts = {}) { + super() + + if (opts.bufferSize < 4) { + throw new Error('bufferSize must at least fit a 4-byte utf-8 char') + } + + this[kImpl] = {} + this[kImpl].stateBuf = new SharedArrayBuffer(128) + this[kImpl].state = new Int32Array(this[kImpl].stateBuf) + this[kImpl].dataBuf = new SharedArrayBuffer(opts.bufferSize || 4 * 1024 * 1024) + this[kImpl].data = Buffer.from(this[kImpl].dataBuf) + this[kImpl].sync = opts.sync || false + this[kImpl].ending = false + this[kImpl].ended = false + this[kImpl].needDrain = false + this[kImpl].destroyed = false + this[kImpl].flushing = false + this[kImpl].ready = false + this[kImpl].finished = false + this[kImpl].errored = null + this[kImpl].closed = false + this[kImpl].buf = '' + + // TODO (fix): Make private? + this.worker = createWorker(this, opts) // TODO (fix): make private + this.on('message', (message, transferList) => { + this.worker.postMessage(message, transferList) + }) + } + + write (data) { + if (this[kImpl].destroyed) { + error(this, new Error('the worker has exited')) + return false + } + + if (this[kImpl].ending) { + error(this, new Error('the worker is ending')) + return false + } + + if (this[kImpl].flushing && this[kImpl].buf.length + data.length >= MAX_STRING) { + try { + writeSync(this) + this[kImpl].flushing = true + } catch (err) { + destroy(this, err) + return false + } + } + + this[kImpl].buf += data + + if (this[kImpl].sync) { + try { + writeSync(this) + return true + } catch (err) { + destroy(this, err) + return false + } + } + + if (!this[kImpl].flushing) { + this[kImpl].flushing = true + setImmediate(nextFlush, this) + } + + this[kImpl].needDrain = this[kImpl].data.length - this[kImpl].buf.length - Atomics.load(this[kImpl].state, WRITE_INDEX) <= 0 + return !this[kImpl].needDrain + } + + end () { + if (this[kImpl].destroyed) { + return + } + + this[kImpl].ending = true + end(this) + } + + flush (cb) { + if (this[kImpl].destroyed) { + if (typeof cb === 'function') { + process.nextTick(cb, new Error('the worker has exited')) + } + return + } + + // TODO write all .buf + const writeIndex = Atomics.load(this[kImpl].state, WRITE_INDEX) + // process._rawDebug(`(flush) readIndex (${Atomics.load(this.state, READ_INDEX)}) writeIndex (${Atomics.load(this.state, WRITE_INDEX)})`) + wait(this[kImpl].state, READ_INDEX, writeIndex, Infinity, (err, res) => { + if (err) { + destroy(this, err) + process.nextTick(cb, err) + return + } + if (res === 'not-equal') { + // TODO handle deadlock + this.flush(cb) + return + } + process.nextTick(cb) + }) + } + + flushSync () { + if (this[kImpl].destroyed) { + return + } + + writeSync(this) + flushSync(this) + } + + unref () { + this.worker.unref() + } + + ref () { + this.worker.ref() + } + + get ready () { + return this[kImpl].ready + } + + get destroyed () { + return this[kImpl].destroyed + } + + get closed () { + return this[kImpl].closed + } + + get writable () { + return !this[kImpl].destroyed && !this[kImpl].ending + } + + get writableEnded () { + return this[kImpl].ending + } + + get writableFinished () { + return this[kImpl].finished + } + + get writableNeedDrain () { + return this[kImpl].needDrain + } + + get writableObjectMode () { + return false + } + + get writableErrored () { + return this[kImpl].errored + } +} + +function error (stream, err) { + setImmediate(() => { + stream.emit('error', err) + }) +} + +function destroy (stream, err) { + if (stream[kImpl].destroyed) { + return + } + stream[kImpl].destroyed = true + + if (err) { + stream[kImpl].errored = err + error(stream, err) + } + + if (!stream.worker.exited) { + stream.worker.terminate() + .catch(() => {}) + .then(() => { + stream[kImpl].closed = true + stream.emit('close') + }) + } else { + setImmediate(() => { + stream[kImpl].closed = true + stream.emit('close') + }) + } +} + +function write (stream, data, cb) { + // data is smaller than the shared buffer length + const current = Atomics.load(stream[kImpl].state, WRITE_INDEX) + const length = Buffer.byteLength(data) + stream[kImpl].data.write(data, current) + Atomics.store(stream[kImpl].state, WRITE_INDEX, current + length) + Atomics.notify(stream[kImpl].state, WRITE_INDEX) + cb() + return true +} + +function end (stream) { + if (stream[kImpl].ended || !stream[kImpl].ending || stream[kImpl].flushing) { + return + } + stream[kImpl].ended = true + + try { + stream.flushSync() + + let readIndex = Atomics.load(stream[kImpl].state, READ_INDEX) + + // process._rawDebug('writing index') + Atomics.store(stream[kImpl].state, WRITE_INDEX, -1) + // process._rawDebug(`(end) readIndex (${Atomics.load(stream.state, READ_INDEX)}) writeIndex (${Atomics.load(stream.state, WRITE_INDEX)})`) + Atomics.notify(stream[kImpl].state, WRITE_INDEX) + + // Wait for the process to complete + let spins = 0 + while (readIndex !== -1) { + // process._rawDebug(`read = ${read}`) + Atomics.wait(stream[kImpl].state, READ_INDEX, readIndex, 1000) + readIndex = Atomics.load(stream[kImpl].state, READ_INDEX) + + if (readIndex === -2) { + destroy(stream, new Error('end() failed')) + return + } + + if (++spins === 10) { + destroy(stream, new Error('end() took too long (10s)')) + return + } + } + + process.nextTick(() => { + stream[kImpl].finished = true + stream.emit('finish') + }) + } catch (err) { + destroy(stream, err) + } + // process._rawDebug('end finished...') +} + +function writeSync (stream) { + const cb = () => { + if (stream[kImpl].ending) { + end(stream) + } else if (stream[kImpl].needDrain) { + process.nextTick(drain, stream) + } + } + stream[kImpl].flushing = false + + while (stream[kImpl].buf.length !== 0) { + const writeIndex = Atomics.load(stream[kImpl].state, WRITE_INDEX) + let leftover = stream[kImpl].data.length - writeIndex + if (leftover === 0) { + flushSync(stream) + Atomics.store(stream[kImpl].state, READ_INDEX, 0) + Atomics.store(stream[kImpl].state, WRITE_INDEX, 0) + continue + } else if (leftover < 0) { + // stream should never happen + throw new Error('overwritten') + } + + let toWrite = stream[kImpl].buf.slice(0, leftover) + let toWriteBytes = Buffer.byteLength(toWrite) + if (toWriteBytes <= leftover) { + stream[kImpl].buf = stream[kImpl].buf.slice(leftover) + // process._rawDebug('writing ' + toWrite.length) + write(stream, toWrite, cb) + } else { + // multi-byte utf-8 + flushSync(stream) + Atomics.store(stream[kImpl].state, READ_INDEX, 0) + Atomics.store(stream[kImpl].state, WRITE_INDEX, 0) + + // Find a toWrite length that fits the buffer + // it must exists as the buffer is at least 4 bytes length + // and the max utf-8 length for a char is 4 bytes. + while (toWriteBytes > stream[kImpl].buf.length) { + leftover = leftover / 2 + toWrite = stream[kImpl].buf.slice(0, leftover) + toWriteBytes = Buffer.byteLength(toWrite) + } + stream[kImpl].buf = stream[kImpl].buf.slice(leftover) + write(stream, toWrite, cb) + } + } +} + +function flushSync (stream) { + if (stream[kImpl].flushing) { + throw new Error('unable to flush while flushing') + } + + // process._rawDebug('flushSync started') + + const writeIndex = Atomics.load(stream[kImpl].state, WRITE_INDEX) + + let spins = 0 + + // TODO handle deadlock + while (true) { + const readIndex = Atomics.load(stream[kImpl].state, READ_INDEX) + + if (readIndex === -2) { + throw Error('_flushSync failed') + } + + // process._rawDebug(`(flushSync) readIndex (${readIndex}) writeIndex (${writeIndex})`) + if (readIndex !== writeIndex) { + // TODO stream timeouts for some reason. + Atomics.wait(stream[kImpl].state, READ_INDEX, readIndex, 1000) + } else { + break + } + + if (++spins === 10) { + throw new Error('_flushSync took too long (10s)') + } + } + // process._rawDebug('flushSync finished') +} + +module.exports = ThreadStream diff --git a/node_modules/thread-stream/lib/indexes.js b/node_modules/thread-stream/lib/indexes.js new file mode 100644 index 0000000..23c9a8e --- /dev/null +++ b/node_modules/thread-stream/lib/indexes.js @@ -0,0 +1,9 @@ +'use strict' + +const WRITE_INDEX = 4 +const READ_INDEX = 8 + +module.exports = { + WRITE_INDEX, + READ_INDEX +} diff --git a/node_modules/thread-stream/lib/wait.js b/node_modules/thread-stream/lib/wait.js new file mode 100644 index 0000000..0964eda --- /dev/null +++ b/node_modules/thread-stream/lib/wait.js @@ -0,0 +1,61 @@ +'use strict' + +const MAX_TIMEOUT = 1000 + +function wait (state, index, expected, timeout, done) { + const max = Date.now() + timeout + let current = Atomics.load(state, index) + if (current === expected) { + done(null, 'ok') + return + } + let prior = current + const check = (backoff) => { + if (Date.now() > max) { + done(null, 'timed-out') + } else { + setTimeout(() => { + prior = current + current = Atomics.load(state, index) + if (current === prior) { + check(backoff >= MAX_TIMEOUT ? MAX_TIMEOUT : backoff * 2) + } else { + if (current === expected) done(null, 'ok') + else done(null, 'not-equal') + } + }, backoff) + } + } + check(1) +} + +// let waitDiffCount = 0 +function waitDiff (state, index, expected, timeout, done) { + // const id = waitDiffCount++ + // process._rawDebug(`>>> waitDiff ${id}`) + const max = Date.now() + timeout + let current = Atomics.load(state, index) + if (current !== expected) { + done(null, 'ok') + return + } + const check = (backoff) => { + // process._rawDebug(`${id} ${index} current ${current} expected ${expected}`) + // process._rawDebug('' + backoff) + if (Date.now() > max) { + done(null, 'timed-out') + } else { + setTimeout(() => { + current = Atomics.load(state, index) + if (current !== expected) { + done(null, 'ok') + } else { + check(backoff >= MAX_TIMEOUT ? MAX_TIMEOUT : backoff * 2) + } + }, backoff) + } + } + check(1) +} + +module.exports = { wait, waitDiff } diff --git a/node_modules/thread-stream/lib/worker.js b/node_modules/thread-stream/lib/worker.js new file mode 100644 index 0000000..6f0825c --- /dev/null +++ b/node_modules/thread-stream/lib/worker.js @@ -0,0 +1,174 @@ +'use strict' + +const { realImport, realRequire } = require('real-require') +const { workerData, parentPort } = require('worker_threads') +const { WRITE_INDEX, READ_INDEX } = require('./indexes') +const { waitDiff } = require('./wait') + +const { + dataBuf, + filename, + stateBuf +} = workerData + +let destination + +const state = new Int32Array(stateBuf) +const data = Buffer.from(dataBuf) + +async function start () { + let worker + try { + if (filename.endsWith('.ts') || filename.endsWith('.cts')) { + // TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ). + if (!process[Symbol.for('ts-node.register.instance')]) { + realRequire('ts-node/register') + } else if (process.env.TS_NODE_DEV) { + realRequire('ts-node-dev') + } + // TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees. + // Remove extra forwardslash on Windows + worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', ''))) + } else { + worker = (await realImport(filename)) + } + } catch (error) { + // A yarn user that tries to start a ThreadStream for an external module + // provides a filename pointing to a zip file. + // eg. require.resolve('pino-elasticsearch') // returns /foo/pino-elasticsearch-npm-6.1.0-0c03079478-6915435172.zip/bar.js + // The `import` will fail to try to load it. + // This catch block executes the `require` fallback to load the module correctly. + // In fact, yarn modifies the `require` function to manage the zipped path. + // More details at https://github.com/pinojs/pino/pull/1113 + // The error codes may change based on the node.js version (ENOTDIR > 12, ERR_MODULE_NOT_FOUND <= 12 ) + if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND') && + filename.startsWith('file://')) { + worker = realRequire(decodeURIComponent(filename.replace('file://', ''))) + } else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') { + // When bundled with pkg, an undefined error is thrown when called with realImport + // When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport + // More info at: https://github.com/pinojs/thread-stream/issues/143 + try { + worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', ''))) + } catch { + throw error + } + } else { + throw error + } + } + + // Depending on how the default export is performed, and on how the code is + // transpiled, we may find cases of two nested "default" objects. + // See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762 + if (typeof worker === 'object') worker = worker.default + if (typeof worker === 'object') worker = worker.default + + destination = await worker(workerData.workerData) + + destination.on('error', function (err) { + Atomics.store(state, WRITE_INDEX, -2) + Atomics.notify(state, WRITE_INDEX) + + Atomics.store(state, READ_INDEX, -2) + Atomics.notify(state, READ_INDEX) + + parentPort.postMessage({ + code: 'ERROR', + err + }) + }) + + destination.on('close', function () { + // process._rawDebug('worker close emitted') + const end = Atomics.load(state, WRITE_INDEX) + Atomics.store(state, READ_INDEX, end) + Atomics.notify(state, READ_INDEX) + setImmediate(() => { + process.exit(0) + }) + }) +} + +// No .catch() handler, +// in case there is an error it goes +// to unhandledRejection +start().then(function () { + parentPort.postMessage({ + code: 'READY' + }) + + process.nextTick(run) +}) + +function run () { + const current = Atomics.load(state, READ_INDEX) + const end = Atomics.load(state, WRITE_INDEX) + + // process._rawDebug(`pre state ${current} ${end}`) + + if (end === current) { + if (end === data.length) { + waitDiff(state, READ_INDEX, end, Infinity, run) + } else { + waitDiff(state, WRITE_INDEX, end, Infinity, run) + } + return + } + + // process._rawDebug(`post state ${current} ${end}`) + + if (end === -1) { + // process._rawDebug('end') + destination.end() + return + } + + const toWrite = data.toString('utf8', current, end) + // process._rawDebug('worker writing: ' + toWrite) + + const res = destination.write(toWrite) + + if (res) { + Atomics.store(state, READ_INDEX, end) + Atomics.notify(state, READ_INDEX) + setImmediate(run) + } else { + destination.once('drain', function () { + Atomics.store(state, READ_INDEX, end) + Atomics.notify(state, READ_INDEX) + run() + }) + } +} + +process.on('unhandledRejection', function (err) { + parentPort.postMessage({ + code: 'ERROR', + err + }) + process.exit(1) +}) + +process.on('uncaughtException', function (err) { + parentPort.postMessage({ + code: 'ERROR', + err + }) + process.exit(1) +}) + +process.once('exit', exitCode => { + if (exitCode !== 0) { + process.exit(exitCode) + return + } + if (destination?.writableNeedDrain && !destination?.writableEnded) { + parentPort.postMessage({ + code: 'WARNING', + err: new Error('ThreadStream: process exited before destination stream was drained. this may indicate that the destination stream try to write to a another missing stream') + }) + } + + process.exit(0) +}) diff --git a/node_modules/thread-stream/package.json b/node_modules/thread-stream/package.json new file mode 100644 index 0000000..1c9f718 --- /dev/null +++ b/node_modules/thread-stream/package.json @@ -0,0 +1,57 @@ +{ + "name": "thread-stream", + "version": "3.1.0", + "description": "A streaming way to send data to a Node.js Worker Thread", + "main": "index.js", + "types": "index.d.ts", + "dependencies": { + "real-require": "^0.2.0" + }, + "devDependencies": { + "@types/node": "^20.1.0", + "@types/tap": "^15.0.0", + "@yao-pkg/pkg": "^5.11.5", + "desm": "^1.3.0", + "fastbench": "^1.0.1", + "husky": "^9.0.6", + "pino-elasticsearch": "^8.0.0", + "sonic-boom": "^4.0.1", + "standard": "^17.0.0", + "tap": "^16.2.0", + "ts-node": "^10.8.0", + "typescript": "^5.3.2", + "why-is-node-running": "^2.2.2" + }, + "scripts": { + "build": "tsc --noEmit", + "test": "standard && npm run build && npm run transpile && tap \"test/**/*.test.*js\" && tap --ts test/*.test.*ts", + "test:ci": "standard && npm run transpile && npm run test:ci:js && npm run test:ci:ts", + "test:ci:js": "tap --no-check-coverage --timeout=120 --coverage-report=lcovonly \"test/**/*.test.*js\"", + "test:ci:ts": "tap --ts --no-check-coverage --coverage-report=lcovonly \"test/**/*.test.*ts\"", + "test:yarn": "npm run transpile && tap \"test/**/*.test.js\" --no-check-coverage", + "transpile": "sh ./test/ts/transpile.sh", + "prepare": "husky install" + }, + "standard": { + "ignore": [ + "test/ts/**/*", + "test/syntax-error.mjs" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/thread-stream.git" + }, + "keywords": [ + "worker", + "thread", + "threads", + "stream" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/mcollina/thread-stream/issues" + }, + "homepage": "https://github.com/mcollina/thread-stream#readme" +} diff --git a/node_modules/thread-stream/test/base.test.js b/node_modules/thread-stream/test/base.test.js new file mode 100644 index 0000000..5b4468e --- /dev/null +++ b/node_modules/thread-stream/test/base.test.js @@ -0,0 +1,285 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const { readFile } = require('fs') +const { file } = require('./helper') +const ThreadStream = require('..') +const { MessageChannel } = require('worker_threads') +const { once } = require('events') + +test('base sync=true', function (t) { + t.plan(15) + + const dest = file() + const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: true + }) + + t.same(stream.writableObjectMode, false) + + t.same(stream.writableFinished, false) + stream.on('finish', () => { + t.same(stream.writableFinished, true) + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + + t.same(stream.closed, false) + stream.on('close', () => { + t.same(stream.closed, true) + t.notOk(stream.writable) + t.pass('close emitted') + }) + + t.same(stream.writableNeedDrain, false) + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + t.ok(stream.writable) + + t.same(stream.writableEnded, false) + stream.end() + t.same(stream.writableEnded, true) +}) + +test('overflow sync=true', function (t) { + t.plan(3) + + const dest = file() + const stream = new ThreadStream({ + bufferSize: 128, + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: true + }) + + let count = 0 + + // Write 10 chars, 20 times + function write () { + if (count++ === 20) { + stream.end() + return + } + + stream.write('aaaaaaaaaa') + // do not wait for drain event + setImmediate(write) + } + + write() + + stream.on('finish', () => { + t.pass('finish emitted') + }) + + stream.on('close', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data.length, 200) + }) + }) +}) + +test('overflow sync=false', function (t) { + const dest = file() + const stream = new ThreadStream({ + bufferSize: 128, + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: false + }) + + let count = 0 + + t.same(stream.writableNeedDrain, false) + + // Write 10 chars, 20 times + function write () { + if (count++ === 20) { + t.pass('end sent') + stream.end() + return + } + + if (!stream.write('aaaaaaaaaa')) { + t.same(stream.writableNeedDrain, true) + } + // do not wait for drain event + setImmediate(write) + } + + write() + + stream.on('drain', () => { + t.same(stream.writableNeedDrain, false) + t.pass('drain') + }) + + stream.on('finish', () => { + t.pass('finish emitted') + }) + + stream.on('close', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data.length, 200) + t.end() + }) + }) +}) + +test('over the bufferSize at startup', function (t) { + t.plan(6) + + const dest = file() + const stream = new ThreadStream({ + bufferSize: 10, + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: true + }) + + stream.on('finish', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + + stream.on('close', () => { + t.pass('close emitted') + }) + + t.ok(stream.write('hello')) + t.ok(stream.write(' world\n')) + t.ok(stream.write('something else\n')) + + stream.end() +}) + +test('over the bufferSize at startup (async)', function (t) { + t.plan(6) + + const dest = file() + const stream = new ThreadStream({ + bufferSize: 10, + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: false + }) + + t.ok(stream.write('hello')) + t.notOk(stream.write(' world\n')) + t.notOk(stream.write('something else\n')) + + stream.end() + + stream.on('finish', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + + stream.on('close', () => { + t.pass('close emitted') + }) +}) + +test('flushSync sync=false', function (t) { + const dest = file() + const stream = new ThreadStream({ + bufferSize: 128, + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: false + }) + + stream.on('drain', () => { + t.pass('drain') + stream.end() + }) + + stream.on('finish', () => { + t.pass('finish emitted') + }) + + stream.on('close', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data.length, 200) + t.end() + }) + }) + + for (let count = 0; count < 20; count++) { + stream.write('aaaaaaaaaa') + } + stream.flushSync() +}) + +test('pass down MessagePorts', async function (t) { + t.plan(3) + + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'port.js'), + workerData: { port: port1 }, + workerOpts: { + transferList: [port1] + }, + sync: false + }) + t.teardown(() => { + stream.end() + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const [strings] = await once(port2, 'message') + + t.equal(strings, 'hello world\nsomething else\n') +}) + +test('destroy does not error', function (t) { + t.plan(5) + + const dest = file() + const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: false + }) + + stream.on('ready', () => { + t.pass('ready emitted') + stream.worker.terminate() + }) + + stream.on('error', (err) => { + t.equal(err.message, 'the worker thread exited') + stream.flush((err) => { + t.equal(err.message, 'the worker has exited') + }) + t.doesNotThrow(() => stream.flushSync()) + t.doesNotThrow(() => stream.end()) + }) +}) + +test('syntax error', function (t) { + t.plan(1) + + const stream = new ThreadStream({ + filename: join(__dirname, 'syntax-error.mjs') + }) + + stream.on('error', (err) => { + t.equal(err.message, 'Unexpected end of input') + }) +}) diff --git a/node_modules/thread-stream/test/bench.test.js b/node_modules/thread-stream/test/bench.test.js new file mode 100644 index 0000000..7e90d2c --- /dev/null +++ b/node_modules/thread-stream/test/bench.test.js @@ -0,0 +1,38 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const ThreadStream = require('..') +const { file } = require('./helper') + +const MAX = 1000 + +let str = '' + +for (let i = 0; i < 10; i++) { + str += 'hello' +} + +test('base', function (t) { + const dest = file() + const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.js'), + workerData: { dest } + }) + let runs = 0 + function benchThreadStream () { + if (++runs === 1000) { + stream.end() + return + } + + for (let i = 0; i < MAX; i++) { + stream.write(str) + } + setImmediate(benchThreadStream) + } + benchThreadStream() + stream.on('finish', function () { + t.end() + }) +}) diff --git a/node_modules/thread-stream/test/bundlers.test.js b/node_modules/thread-stream/test/bundlers.test.js new file mode 100644 index 0000000..d4b8a90 --- /dev/null +++ b/node_modules/thread-stream/test/bundlers.test.js @@ -0,0 +1,60 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const { file } = require('./helper') +const ThreadStream = require('..') + +test('bundlers support with .js file', function (t) { + t.plan(1) + + globalThis.__bundlerPathsOverrides = { + 'thread-stream-worker': join(__dirname, 'custom-worker.js') + } + + const dest = file() + + process.on('uncaughtException', error => { + console.log(error) + }) + + const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: true + }) + + stream.worker.removeAllListeners('message') + stream.worker.once('message', message => { + t.equal(message.code, 'CUSTOM-WORKER-CALLED') + }) + + stream.end() +}) + +test('bundlers support with .mjs file', function (t) { + t.plan(1) + + globalThis.__bundlerPathsOverrides = { + 'thread-stream-worker': join(__dirname, 'custom-worker.js') + } + + const dest = file() + + process.on('uncaughtException', error => { + console.log(error) + }) + + const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.mjs'), + workerData: { dest }, + sync: true + }) + + stream.worker.removeAllListeners('message') + stream.worker.once('message', message => { + t.equal(message.code, 'CUSTOM-WORKER-CALLED') + }) + + stream.end() +}) diff --git a/node_modules/thread-stream/test/close-on-gc.js b/node_modules/thread-stream/test/close-on-gc.js new file mode 100644 index 0000000..d84baa2 --- /dev/null +++ b/node_modules/thread-stream/test/close-on-gc.js @@ -0,0 +1,37 @@ +'use strict' + +const { join } = require('path') +const ThreadStream = require('..') +const assert = require('assert') + +let worker = null + +function setup () { + const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.js'), + workerData: { dest: process.argv[2] }, + sync: true + }) + + worker = stream.worker + + stream.write('hello') + stream.write(' ') + stream.write('world\n') + stream.flushSync() + stream.unref() + + // the stream object goes out of scope here + setImmediate(gc) // eslint-disable-line +} + +setup() + +let exitEmitted = false +worker.on('exit', function () { + exitEmitted = true +}) + +process.on('exit', function () { + assert.strictEqual(exitEmitted, true) +}) diff --git a/node_modules/thread-stream/test/commonjs-fallback.test.js b/node_modules/thread-stream/test/commonjs-fallback.test.js new file mode 100644 index 0000000..a6d3940 --- /dev/null +++ b/node_modules/thread-stream/test/commonjs-fallback.test.js @@ -0,0 +1,80 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const { MessageChannel } = require('worker_threads') +const { once } = require('events') +const ThreadStream = require('..') + +const isYarnPnp = process.versions.pnp !== undefined + +test('yarn module resolution', { skip: !isYarnPnp }, t => { + t.plan(6) + + const modulePath = require.resolve('pino-elasticsearch') + t.match(modulePath, /.*\.zip.*/) + + const stream = new ThreadStream({ + filename: modulePath, + workerData: { node: null }, + sync: true + }) + + t.same(stream.writableErrored, null) + stream.on('error', (err) => { + t.same(stream.writableErrored, err) + t.pass('error emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.writable) + stream.end() +}) + +test('yarn module resolution for directories with special characters', { skip: !isYarnPnp }, async t => { + t.plan(3) + + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'dir with spaces', 'test-package.zip', 'worker.js'), + workerData: { port: port1 }, + workerOpts: { + transferList: [port1] + }, + sync: false + }) + t.teardown(() => { + stream.end() + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const [strings] = await once(port2, 'message') + + t.equal(strings, 'hello world\nsomething else\n') +}) + +test('yarn module resolution for typescript commonjs modules', { skip: !isYarnPnp }, async t => { + t.plan(3) + + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'ts-commonjs-default-export.zip', 'worker.js'), + workerData: { port: port1 }, + workerOpts: { + transferList: [port1] + }, + sync: false + }) + t.teardown(() => { + stream.end() + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + const [strings] = await once(port2, 'message') + + t.equal(strings, 'hello world\nsomething else\n') +}) diff --git a/node_modules/thread-stream/test/context.test.js b/node_modules/thread-stream/test/context.test.js new file mode 100644 index 0000000..07028cb --- /dev/null +++ b/node_modules/thread-stream/test/context.test.js @@ -0,0 +1,21 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const ThreadStream = require('..') +const { version } = require('../package.json') +require('why-is-node-running') + +test('get context', (t) => { + const stream = new ThreadStream({ + filename: join(__dirname, 'get-context.js'), + workerData: {}, + sync: true + }) + t.on('end', () => stream.end()) + stream.on('context', (ctx) => { + t.same(ctx.threadStreamVersion, version) + t.end() + }) + stream.write('hello') +}) diff --git a/node_modules/thread-stream/test/create-and-exit.js b/node_modules/thread-stream/test/create-and-exit.js new file mode 100644 index 0000000..d6f12fa --- /dev/null +++ b/node_modules/thread-stream/test/create-and-exit.js @@ -0,0 +1,16 @@ +'use strict' + +const { join } = require('path') +const ThreadStream = require('..') + +const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.js'), + workerData: { dest: process.argv[2] }, + sync: true +}) + +stream.write('hello') +stream.write(' ') +stream.write('world\n') +stream.flushSync() +stream.unref() diff --git a/node_modules/thread-stream/test/custom-worker.js b/node_modules/thread-stream/test/custom-worker.js new file mode 100644 index 0000000..e78340c --- /dev/null +++ b/node_modules/thread-stream/test/custom-worker.js @@ -0,0 +1,9 @@ +'use strict' + +const { parentPort } = require('worker_threads') + +parentPort.postMessage({ + code: 'CUSTOM-WORKER-CALLED' +}) + +require('../lib/worker') diff --git a/node_modules/thread-stream/test/dir with spaces/test-package.zip b/node_modules/thread-stream/test/dir with spaces/test-package.zip new file mode 100644 index 0000000..7a2aee8 Binary files /dev/null and b/node_modules/thread-stream/test/dir with spaces/test-package.zip differ diff --git a/node_modules/thread-stream/test/emit-event.js b/node_modules/thread-stream/test/emit-event.js new file mode 100644 index 0000000..429aaa2 --- /dev/null +++ b/node_modules/thread-stream/test/emit-event.js @@ -0,0 +1,22 @@ +'use strict' + +const { Writable } = require('stream') +const parentPort = require('worker_threads').parentPort + +async function run () { + return new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + if (parentPort) { + parentPort.postMessage({ + code: 'EVENT', + name: 'socketError', + args: ['list', 'of', 'args', 123, new Error('unable to write data to the TCP socket')] + }) + } + cb() + } + }) +} + +module.exports = run diff --git a/node_modules/thread-stream/test/end.test.js b/node_modules/thread-stream/test/end.test.js new file mode 100644 index 0000000..d96a941 --- /dev/null +++ b/node_modules/thread-stream/test/end.test.js @@ -0,0 +1,61 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const { readFile } = require('fs') +const { file } = require('./helper') +const ThreadStream = require('..') + +test('destroy support', function (t) { + t.plan(7) + + const dest = file() + const stream = new ThreadStream({ + filename: join(__dirname, 'to-file-on-destroy.js'), + workerData: { dest }, + sync: true + }) + + stream.on('close', () => { + t.notOk(stream.writable) + t.pass('close emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + t.ok(stream.writable) + + stream.end() + + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) +}) + +test('synchronous _final support', function (t) { + t.plan(7) + + const dest = file() + const stream = new ThreadStream({ + filename: join(__dirname, 'to-file-on-final.js'), + workerData: { dest }, + sync: true + }) + + stream.on('close', () => { + t.notOk(stream.writable) + t.pass('close emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + t.ok(stream.writable) + + stream.end() + + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) +}) diff --git a/node_modules/thread-stream/test/error.js b/node_modules/thread-stream/test/error.js new file mode 100644 index 0000000..0c7d598 --- /dev/null +++ b/node_modules/thread-stream/test/error.js @@ -0,0 +1,14 @@ +'use strict' + +const { Writable } = require('stream') + +async function run (opts) { + const stream = new Writable({ + write (chunk, enc, cb) { + cb(new Error('kaboom')) + } + }) + return stream +} + +module.exports = run diff --git a/node_modules/thread-stream/test/esm.test.mjs b/node_modules/thread-stream/test/esm.test.mjs new file mode 100644 index 0000000..364bf10 --- /dev/null +++ b/node_modules/thread-stream/test/esm.test.mjs @@ -0,0 +1,47 @@ +import { test } from 'tap' +import { readFile } from 'fs' +import ThreadStream from '../index.js' +import { join } from 'desm' +import { pathToFileURL } from 'url' +import { file } from './helper.js' + +function basic (text, filename) { + test(text, function (t) { + t.plan(5) + + const dest = file() + const stream = new ThreadStream({ + filename, + workerData: { dest }, + sync: true + }) + + stream.on('finish', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + + stream.on('close', () => { + t.pass('close emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() + }) +} + +basic('esm with path', join(import.meta.url, 'to-file.mjs')) +basic('esm with file URL', pathToFileURL(join(import.meta.url, 'to-file.mjs')).href) + +basic('(ts -> es6) esm with path', join(import.meta.url, 'ts', 'to-file.es6.mjs')) +basic('(ts -> es6) esm with file URL', pathToFileURL(join(import.meta.url, 'ts', 'to-file.es6.mjs')).href) + +basic('(ts -> es2017) esm with path', join(import.meta.url, 'ts', 'to-file.es2017.mjs')) +basic('(ts -> es2017) esm with file URL', pathToFileURL(join(import.meta.url, 'ts', 'to-file.es2017.mjs')).href) + +basic('(ts -> esnext) esm with path', join(import.meta.url, 'ts', 'to-file.esnext.mjs')) +basic('(ts -> esnext) esm with file URL', pathToFileURL(join(import.meta.url, 'ts', 'to-file.esnext.mjs')).href) diff --git a/node_modules/thread-stream/test/event.test.js b/node_modules/thread-stream/test/event.test.js new file mode 100644 index 0000000..2807686 --- /dev/null +++ b/node_modules/thread-stream/test/event.test.js @@ -0,0 +1,23 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const ThreadStream = require('..') + +test('event propagate', t => { + const stream = new ThreadStream({ + filename: join(__dirname, 'emit-event.js'), + workerData: {}, + sync: true + }) + t.on('end', () => stream.end()) + stream.on('socketError', function (a, b, c, n, error) { + t.same(a, 'list') + t.same(b, 'of') + t.same(c, 'args') + t.same(n, 123) + t.same(error, new Error('unable to write data to the TCP socket')) + t.end() + }) + stream.write('hello') +}) diff --git a/node_modules/thread-stream/test/exit.js b/node_modules/thread-stream/test/exit.js new file mode 100644 index 0000000..673ff2a --- /dev/null +++ b/node_modules/thread-stream/test/exit.js @@ -0,0 +1,14 @@ +'use strict' + +const { Writable } = require('stream') + +async function run (opts) { + const stream = new Writable({ + write (chunk, enc, cb) { + process.exit(1) + } + }) + return stream +} + +module.exports = run diff --git a/node_modules/thread-stream/test/get-context.js b/node_modules/thread-stream/test/get-context.js new file mode 100644 index 0000000..6bcdc33 --- /dev/null +++ b/node_modules/thread-stream/test/get-context.js @@ -0,0 +1,22 @@ +'use strict' + +const { Writable } = require('stream') +const parentPort = require('worker_threads').parentPort + +async function run (opts) { + return new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + if (parentPort) { + parentPort.postMessage({ + code: 'EVENT', + name: 'context', + args: opts.$context + }) + } + cb() + } + }) +} + +module.exports = run diff --git a/node_modules/thread-stream/test/helper.d.ts b/node_modules/thread-stream/test/helper.d.ts new file mode 100644 index 0000000..7c00f2f --- /dev/null +++ b/node_modules/thread-stream/test/helper.d.ts @@ -0,0 +1 @@ +export declare function file(): string diff --git a/node_modules/thread-stream/test/helper.js b/node_modules/thread-stream/test/helper.js new file mode 100644 index 0000000..b927947 --- /dev/null +++ b/node_modules/thread-stream/test/helper.js @@ -0,0 +1,35 @@ +'use strict' + +const { join } = require('path') +const { tmpdir } = require('os') +const { unlinkSync } = require('fs') +const t = require('tap') + +const files = [] +let count = 0 + +function file () { + const file = join(tmpdir(), `thread-stream-${process.pid}-${count++}`) + files.push(file) + return file +} + +process.on('beforeExit', () => { + t.comment('unlink files') + for (const file of files) { + try { + t.comment(`unliking ${file}`) + unlinkSync(file) + } catch (e) { + console.log(e) + } + } + t.comment('unlink completed') +}) + +module.exports.file = file + +if (process.env.SKIP_PROCESS_EXIT_CHECK !== 'true') { + const why = require('why-is-node-running') + setInterval(why, 10000).unref() +} diff --git a/node_modules/thread-stream/test/indexes.test.js b/node_modules/thread-stream/test/indexes.test.js new file mode 100644 index 0000000..fab8e0a --- /dev/null +++ b/node_modules/thread-stream/test/indexes.test.js @@ -0,0 +1,11 @@ +'use strict' + +const { test } = require('tap') +const indexes = require('../lib/indexes') + +for (const index of Object.keys(indexes)) { + test(`${index} is lock free`, function (t) { + t.equal(Atomics.isLockFree(indexes[index]), true) + t.end() + }) +} diff --git a/node_modules/thread-stream/test/multibyte-chars.test.mjs b/node_modules/thread-stream/test/multibyte-chars.test.mjs new file mode 100644 index 0000000..dff32dd --- /dev/null +++ b/node_modules/thread-stream/test/multibyte-chars.test.mjs @@ -0,0 +1,74 @@ +import { test } from 'tap' +import { readFile } from 'fs' +import ThreadStream from '../index.js' +import { join } from 'desm' +import { file } from './helper.js' + +test('break up utf8 multibyte (sync)', (t) => { + t.plan(2) + const longString = '\u03A3'.repeat(16) + + const dest = file() + const stream = new ThreadStream({ + bufferSize: 15, // this must be odd + filename: join(import.meta.url, 'to-file.js'), + workerData: { dest }, + sync: true + }) + + stream.on('finish', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, longString) + }) + }) + + stream.write(longString) + stream.end() +}) + +test('break up utf8 multibyte (async)', (t) => { + t.plan(2) + const longString = '\u03A3'.repeat(16) + + const dest = file() + const stream = new ThreadStream({ + bufferSize: 15, // this must be odd + filename: join(import.meta.url, 'to-file.js'), + workerData: { dest }, + sync: false + }) + + stream.on('finish', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, longString) + }) + }) + + stream.write(longString) + stream.end() +}) + +test('break up utf8 multibyte several times bigger than write buffer', (t) => { + t.plan(2) + const longString = '\u03A3'.repeat(32) + + const dest = file() + const stream = new ThreadStream({ + bufferSize: 15, // this must be odd + filename: join(import.meta.url, 'to-file.js'), + workerData: { dest }, + sync: false + }) + + stream.on('finish', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, longString) + }) + }) + + stream.write(longString) + stream.end() +}) diff --git a/node_modules/thread-stream/test/never-drain.test.js b/node_modules/thread-stream/test/never-drain.test.js new file mode 100644 index 0000000..f55a4cf --- /dev/null +++ b/node_modules/thread-stream/test/never-drain.test.js @@ -0,0 +1,57 @@ +const { test } = require('tap') +const ThreadStream = require('../index') +const { join } = require('path') + +function retryUntilTimeout (fn, timeout) { + const start = Date.now() + return new Promise((resolve, reject) => { + async function run () { + if (fn()) { + resolve() + return + } + + if (Date.now() - start >= timeout) { + reject(new Error('timeout')) + return + } + setTimeout(run, 10) + } + + run() + }) +} + +const isNode18 = process.version.indexOf('v18') === 0 + +test('emit warning when the worker gracefully exit without the stream ended', { skip: !isNode18 }, async function (t) { + const expectedWarning = 'ThreadStream: process exited before destination stream was drained. this may indicate that the destination stream try to write to a another missing stream' + const stream = new ThreadStream({ + filename: join(__dirname, 'to-next.js') + }) + stream.unref() + + let streamWarning + function saveWarning (e) { + if (e.message === expectedWarning) { + streamWarning = e + } + } + process.on('warning', saveWarning) + + const data = 'hello'.repeat(10) + for (let i = 0; i < 1000; i++) { + if (streamWarning?.message === expectedWarning) { + break + } + stream.write(data) + await new Promise((resolve) => { + setTimeout(resolve, 1) + }) + } + + process.off('warning', saveWarning) + t.equal(streamWarning?.message, expectedWarning) + + await retryUntilTimeout(() => stream.worker.exited === true, 3000) +}) diff --git a/node_modules/thread-stream/test/on-message.js b/node_modules/thread-stream/test/on-message.js new file mode 100644 index 0000000..4aaf09e --- /dev/null +++ b/node_modules/thread-stream/test/on-message.js @@ -0,0 +1,18 @@ +'use strict' + +const { parentPort } = require('worker_threads') +const { Writable } = require('stream') + +function run () { + parentPort.once('message', function ({ text, takeThisPortPlease }) { + takeThisPortPlease.postMessage(`received: ${text}`) + }) + return new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + cb() + } + }) +} + +module.exports = run diff --git a/node_modules/thread-stream/test/pkg/index.js b/node_modules/thread-stream/test/pkg/index.js new file mode 100644 index 0000000..4f40ac5 --- /dev/null +++ b/node_modules/thread-stream/test/pkg/index.js @@ -0,0 +1,37 @@ +'use strict' + +/** + * This file is packaged using pkg in order to test if worker.js works in that context + */ + +const { test } = require('tap') +const { join } = require('path') +const { file } = require('../helper') +const ThreadStream = require('../..') + +test('bundlers support with .js file', function (t) { + t.plan(1) + + globalThis.__bundlerPathsOverrides = { + 'thread-stream-worker': join(__dirname, '..', 'custom-worker.js') + } + + const dest = file() + + process.on('uncaughtException', (error) => { + console.log(error) + }) + + const stream = new ThreadStream({ + filename: join(__dirname, '..', 'to-file.js'), + workerData: { dest }, + sync: true + }) + + stream.worker.removeAllListeners('message') + stream.worker.once('message', (message) => { + t.equal(message.code, 'CUSTOM-WORKER-CALLED') + }) + + stream.end() +}) diff --git a/node_modules/thread-stream/test/pkg/pkg.config.json b/node_modules/thread-stream/test/pkg/pkg.config.json new file mode 100644 index 0000000..38905e0 --- /dev/null +++ b/node_modules/thread-stream/test/pkg/pkg.config.json @@ -0,0 +1,15 @@ +{ + "pkg": { + "assets": [ + "../custom-worker.js", + "../to-file.js" + ], + "targets": [ + "node14", + "node16", + "node18", + "node20" + ], + "outputPath": "test/pkg" + } +} \ No newline at end of file diff --git a/node_modules/thread-stream/test/pkg/pkg.test.js b/node_modules/thread-stream/test/pkg/pkg.test.js new file mode 100644 index 0000000..c162225 --- /dev/null +++ b/node_modules/thread-stream/test/pkg/pkg.test.js @@ -0,0 +1,46 @@ +'use strict' + +const { test } = require('tap') +const config = require('./pkg.config.json') +const { promisify } = require('util') +const { unlink } = require('fs/promises') +const { join } = require('path') +const { platform } = require('process') +const exec = promisify(require('child_process').exec) + +test('worker test when packaged into executable using pkg', async (t) => { + const packageName = 'index' + + // package the app into several node versions, check config for more info + const filePath = `${join(__dirname, packageName)}.js` + const configPath = join(__dirname, 'pkg.config.json') + process.env.NODE_OPTIONS ||= '' + process.env.NODE_OPTIONS = '--no-warnings' + const { stderr } = await exec(`npx pkg ${filePath} --config ${configPath}`) + + // there should be no error when packaging + t.equal(stderr, '') + + // pkg outputs files in the following format by default: {filename}-{node version} + for (const target of config.pkg.targets) { + // execute the packaged test + let executablePath = `${join(config.pkg.outputPath, packageName)}-${target}` + + // when on windows, we need the .exe extension + if (platform === 'win32') { + executablePath = `${executablePath}.exe` + } else { + executablePath = `./${executablePath}` + } + + const { stderr } = await exec(executablePath) + + // check if there were no errors + t.equal(stderr, '') + + // clean up afterwards + await unlink(executablePath) + } + + t.end() +}) diff --git a/node_modules/thread-stream/test/port.js b/node_modules/thread-stream/test/port.js new file mode 100644 index 0000000..8c73b63 --- /dev/null +++ b/node_modules/thread-stream/test/port.js @@ -0,0 +1,16 @@ +'use strict' + +const { Writable } = require('stream') + +function run (opts) { + const { port } = opts + return new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + port.postMessage(chunk.toString()) + cb() + } + }) +} + +module.exports = run diff --git a/node_modules/thread-stream/test/post-message.test.js b/node_modules/thread-stream/test/post-message.test.js new file mode 100644 index 0000000..a266ca7 --- /dev/null +++ b/node_modules/thread-stream/test/post-message.test.js @@ -0,0 +1,24 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const { once } = require('events') +const { MessageChannel } = require('worker_threads') +const ThreadStream = require('..') + +test('message events emitted on the stream are posted to the worker', async function (t) { + t.plan(1) + + const { port1, port2 } = new MessageChannel() + const stream = new ThreadStream({ + filename: join(__dirname, 'on-message.js'), + sync: false + }) + t.teardown(() => { + stream.end() + }) + + stream.emit('message', { text: 'hello', takeThisPortPlease: port1 }, [port1]) + const [confirmation] = await once(port2, 'message') + t.equal(confirmation, 'received: hello') +}) diff --git a/node_modules/thread-stream/test/string-limit-2.test.js b/node_modules/thread-stream/test/string-limit-2.test.js new file mode 100644 index 0000000..0931d02 --- /dev/null +++ b/node_modules/thread-stream/test/string-limit-2.test.js @@ -0,0 +1,41 @@ +'use strict' + +const t = require('tap') + +if (process.env.CI) { + t.skip('skip on CI') + process.exit(0) +} + +const { join } = require('path') +const { file } = require('./helper') +const { createReadStream } = require('fs') +const ThreadStream = require('..') +const buffer = require('buffer') + +const MAX_STRING = buffer.constants.MAX_STRING_LENGTH + +t.plan(1) + +const dest = file() +const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: false +}) + +stream.on('close', async () => { + t.comment('close emitted') + let buf + for await (const chunk of createReadStream(dest)) { + buf = chunk + } + t.equal('asd', buf.toString().slice(-3)) +}) + +stream.on('ready', () => { + t.comment('open emitted') + stream.write('a'.repeat(MAX_STRING - 2)) + stream.write('asd') + stream.end() +}) diff --git a/node_modules/thread-stream/test/string-limit.test.js b/node_modules/thread-stream/test/string-limit.test.js new file mode 100644 index 0000000..f771f8e --- /dev/null +++ b/node_modules/thread-stream/test/string-limit.test.js @@ -0,0 +1,42 @@ +'use strict' + +const t = require('tap') + +if (process.env.CI) { + t.skip('skip on CI') + process.exit(0) +} + +const { join } = require('path') +const { file } = require('./helper') +const { stat } = require('fs') +const ThreadStream = require('..') + +t.setTimeout(30000) + +const dest = file() +const stream = new ThreadStream({ + filename: join(__dirname, 'to-file.js'), + workerData: { dest }, + sync: false +}) + +let length = 0 + +stream.on('close', () => { + stat(dest, (err, f) => { + t.error(err) + t.equal(f.size, length) + t.end() + }) +}) + +const buf = Buffer.alloc(1024).fill('x').toString() // 1 KB + +// This writes 1 GB of data +for (let i = 0; i < 1024 * 1024; i++) { + length += buf.length + stream.write(buf) +} + +stream.end() diff --git a/node_modules/thread-stream/test/syntax-error.mjs b/node_modules/thread-stream/test/syntax-error.mjs new file mode 100644 index 0000000..28bc923 --- /dev/null +++ b/node_modules/thread-stream/test/syntax-error.mjs @@ -0,0 +1,2 @@ +// this is a syntax error +import diff --git a/node_modules/thread-stream/test/thread-management.test.js b/node_modules/thread-stream/test/thread-management.test.js new file mode 100644 index 0000000..dabdc84 --- /dev/null +++ b/node_modules/thread-stream/test/thread-management.test.js @@ -0,0 +1,121 @@ +'use strict' + +const { test } = require('tap') +const { fork } = require('child_process') +const { join } = require('path') +const { readFile } = require('fs').promises +const { file } = require('./helper') +const { once } = require('events') +const ThreadStream = require('..') + +test('exits with 0', async function (t) { + const dest = file() + const child = fork(join(__dirname, 'create-and-exit.js'), [dest]) + + const [code] = await once(child, 'exit') + t.equal(code, 0) + + const data = await readFile(dest, 'utf8') + t.equal(data, 'hello world\n') +}) + +test('emit error if thread exits', async function (t) { + const stream = new ThreadStream({ + filename: join(__dirname, 'exit.js'), + sync: true + }) + + stream.on('ready', () => { + stream.write('hello world\n') + }) + + let [err] = await once(stream, 'error') + t.equal(err.message, 'the worker thread exited') + + stream.write('noop'); + [err] = await once(stream, 'error') + t.equal(err.message, 'the worker has exited') + + stream.write('noop'); + [err] = await once(stream, 'error') + t.equal(err.message, 'the worker has exited') +}) + +test('emit error if thread have unhandledRejection', async function (t) { + const stream = new ThreadStream({ + filename: join(__dirname, 'unhandledRejection.js'), + sync: true + }) + + stream.on('ready', () => { + stream.write('hello world\n') + }) + + let [err] = await once(stream, 'error') + t.equal(err.message, 'kaboom') + + stream.write('noop'); + [err] = await once(stream, 'error') + t.equal(err.message, 'the worker has exited') + + stream.write('noop'); + [err] = await once(stream, 'error') + t.equal(err.message, 'the worker has exited') +}) + +test('emit error if worker stream emit error', async function (t) { + const stream = new ThreadStream({ + filename: join(__dirname, 'error.js'), + sync: true + }) + + stream.on('ready', () => { + stream.write('hello world\n') + }) + + let [err] = await once(stream, 'error') + t.equal(err.message, 'kaboom') + + stream.write('noop'); + [err] = await once(stream, 'error') + t.equal(err.message, 'the worker has exited') + + stream.write('noop'); + [err] = await once(stream, 'error') + t.equal(err.message, 'the worker has exited') +}) + +test('emit error if thread have uncaughtException', async function (t) { + const stream = new ThreadStream({ + filename: join(__dirname, 'uncaughtException.js'), + sync: true + }) + + stream.on('ready', () => { + stream.write('hello world\n') + }) + + let [err] = await once(stream, 'error') + t.equal(err.message, 'kaboom') + + stream.write('noop'); + [err] = await once(stream, 'error') + t.equal(err.message, 'the worker has exited') + + stream.write('noop'); + [err] = await once(stream, 'error') + t.equal(err.message, 'the worker has exited') +}) + +test('close the work if out of scope on gc', { skip: !global.WeakRef }, async function (t) { + const dest = file() + const child = fork(join(__dirname, 'close-on-gc.js'), [dest], { + execArgv: ['--expose-gc'] + }) + + const [code] = await once(child, 'exit') + t.equal(code, 0) + + const data = await readFile(dest, 'utf8') + t.equal(data, 'hello world\n') +}) diff --git a/node_modules/thread-stream/test/to-file-on-destroy.js b/node_modules/thread-stream/test/to-file-on-destroy.js new file mode 100644 index 0000000..7bfc5a8 --- /dev/null +++ b/node_modules/thread-stream/test/to-file-on-destroy.js @@ -0,0 +1,23 @@ +'use strict' + +const fs = require('fs') +const { Writable } = require('stream') + +function run (opts) { + let data = '' + return new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + data += chunk.toString() + cb() + }, + destroy (err, cb) { + // process._rawDebug('destroy called') + fs.writeFile(opts.dest, data, function (err2) { + cb(err2 || err) + }) + } + }) +} + +module.exports = run diff --git a/node_modules/thread-stream/test/to-file-on-final.js b/node_modules/thread-stream/test/to-file-on-final.js new file mode 100644 index 0000000..4cefdeb --- /dev/null +++ b/node_modules/thread-stream/test/to-file-on-final.js @@ -0,0 +1,24 @@ +'use strict' + +const fs = require('fs') +const { Writable } = require('stream') + +function run (opts) { + let data = '' + return new Writable({ + autoDestroy: true, + write (chunk, enc, cb) { + data += chunk.toString() + cb() + }, + final (cb) { + setTimeout(function () { + fs.writeFile(opts.dest, data, function (err) { + cb(err) + }) + }, 100) + } + }) +} + +module.exports = run diff --git a/node_modules/thread-stream/test/to-file.js b/node_modules/thread-stream/test/to-file.js new file mode 100644 index 0000000..9ceb636 --- /dev/null +++ b/node_modules/thread-stream/test/to-file.js @@ -0,0 +1,12 @@ +'use strict' + +const fs = require('fs') +const { once } = require('events') + +async function run (opts) { + const stream = fs.createWriteStream(opts.dest) + await once(stream, 'open') + return stream +} + +module.exports = run diff --git a/node_modules/thread-stream/test/to-file.mjs b/node_modules/thread-stream/test/to-file.mjs new file mode 100644 index 0000000..2fcfe2d --- /dev/null +++ b/node_modules/thread-stream/test/to-file.mjs @@ -0,0 +1,8 @@ +import { createWriteStream } from 'fs' +import { once } from 'events' + +export default async function run (opts) { + const stream = createWriteStream(opts.dest) + await once(stream, 'open') + return stream +} diff --git a/node_modules/thread-stream/test/to-next.js b/node_modules/thread-stream/test/to-next.js new file mode 100644 index 0000000..8148f45 --- /dev/null +++ b/node_modules/thread-stream/test/to-next.js @@ -0,0 +1,9 @@ +'use strict' + +const { PassThrough } = require('stream') + +async function run (opts) { + return new PassThrough({}) +} + +module.exports = run diff --git a/node_modules/thread-stream/test/transpiled.test.js b/node_modules/thread-stream/test/transpiled.test.js new file mode 100644 index 0000000..047486f --- /dev/null +++ b/node_modules/thread-stream/test/transpiled.test.js @@ -0,0 +1,30 @@ +'use strict' + +const { test } = require('tap') +const { join } = require('path') +const { file } = require('./helper') +const ThreadStream = require('..') + +function basic (esVersion) { + test(`transpiled-ts-to-${esVersion}`, function (t) { + t.plan(2) + + const dest = file() + const stream = new ThreadStream({ + filename: join(__dirname, 'ts', `to-file.${esVersion}.cjs`), + workerData: { dest }, + sync: true + }) + + // There are arbitrary checks, the important aspect of this test is to ensure + // that we can properly load the transpiled file into our worker thread. + t.same(stream.writableEnded, false) + stream.end() + t.same(stream.writableEnded, true) + }) +} + +basic('es5') +basic('es6') +basic('es2017') +basic('esnext') diff --git a/node_modules/thread-stream/test/ts-commonjs-default-export.zip b/node_modules/thread-stream/test/ts-commonjs-default-export.zip new file mode 100644 index 0000000..15cb09c Binary files /dev/null and b/node_modules/thread-stream/test/ts-commonjs-default-export.zip differ diff --git a/node_modules/thread-stream/test/ts.test.ts b/node_modules/thread-stream/test/ts.test.ts new file mode 100644 index 0000000..70113c5 --- /dev/null +++ b/node_modules/thread-stream/test/ts.test.ts @@ -0,0 +1,33 @@ +import { test } from 'tap' +import { readFile } from 'fs' +import ThreadStream from '../index.js' +import { join } from 'path' +import { file } from './helper.js' + + +test('typescript module', function (t) { + t.plan(5) + + const dest = file() + const stream = new ThreadStream({ + filename: join(__dirname, 'ts', 'to-file.ts'), + workerData: { dest }, + sync: true + }) + + stream.on('finish', () => { + readFile(dest, 'utf8', (err, data) => { + t.error(err) + t.equal(data, 'hello world\nsomething else\n') + }) + }) + + stream.on('close', () => { + t.pass('close emitted') + }) + + t.ok(stream.write('hello world\n')) + t.ok(stream.write('something else\n')) + + stream.end() +}) diff --git a/node_modules/thread-stream/test/ts/to-file.ts b/node_modules/thread-stream/test/ts/to-file.ts new file mode 100644 index 0000000..da132eb --- /dev/null +++ b/node_modules/thread-stream/test/ts/to-file.ts @@ -0,0 +1,10 @@ +import { type PathLike, type WriteStream, createWriteStream } from 'fs' +import { once } from 'events' + +export default async function run ( + opts: { dest: PathLike }, +): Promise { + const stream = createWriteStream(opts.dest) + await once(stream, 'open') + return stream +} diff --git a/node_modules/thread-stream/test/ts/transpile.sh b/node_modules/thread-stream/test/ts/transpile.sh new file mode 100644 index 0000000..5f36dac --- /dev/null +++ b/node_modules/thread-stream/test/ts/transpile.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +set -e + +cd ./test/ts; + +if (echo "${npm_config_user_agent}" | grep "yarn"); then + export RUNNER="yarn"; +else + export RUNNER="npx"; +fi + +test ./to-file.ts -ot ./to-file.es5.cjs || ("${RUNNER}" tsc --skipLibCheck --target es5 ./to-file.ts && mv ./to-file.js ./to-file.es5.cjs); +test ./to-file.ts -ot ./to-file.es6.mjs || ("${RUNNER}" tsc --skipLibCheck --target es6 ./to-file.ts && mv ./to-file.js ./to-file.es6.mjs); +test ./to-file.ts -ot ./to-file.es6.cjs || ("${RUNNER}" tsc --skipLibCheck --target es6 --module commonjs ./to-file.ts && mv ./to-file.js ./to-file.es6.cjs); +test ./to-file.ts -ot ./to-file.es2017.mjs || ("${RUNNER}" tsc --skipLibCheck --target es2017 ./to-file.ts && mv ./to-file.js ./to-file.es2017.mjs); +test ./to-file.ts -ot ./to-file.es2017.cjs || ("${RUNNER}" tsc --skipLibCheck --target es2017 --module commonjs ./to-file.ts && mv ./to-file.js ./to-file.es2017.cjs); +test ./to-file.ts -ot ./to-file.esnext.mjs || ("${RUNNER}" tsc --skipLibCheck --target esnext --module esnext ./to-file.ts && mv ./to-file.js ./to-file.esnext.mjs); +test ./to-file.ts -ot ./to-file.esnext.cjs || ("${RUNNER}" tsc --skipLibCheck --target esnext --module commonjs ./to-file.ts && mv ./to-file.js ./to-file.esnext.cjs); diff --git a/node_modules/thread-stream/test/uncaughtException.js b/node_modules/thread-stream/test/uncaughtException.js new file mode 100644 index 0000000..7f01e2f --- /dev/null +++ b/node_modules/thread-stream/test/uncaughtException.js @@ -0,0 +1,21 @@ +'use strict' + +const { Writable } = require('stream') + +// Nop console.error to avoid printing things out +console.error = () => {} + +setImmediate(function () { + throw new Error('kaboom') +}) + +async function run (opts) { + const stream = new Writable({ + write (chunk, enc, cb) { + cb() + } + }) + return stream +} + +module.exports = run diff --git a/node_modules/thread-stream/test/unhandledRejection.js b/node_modules/thread-stream/test/unhandledRejection.js new file mode 100644 index 0000000..d7cc6c2 --- /dev/null +++ b/node_modules/thread-stream/test/unhandledRejection.js @@ -0,0 +1,21 @@ +'use strict' + +const { Writable } = require('stream') + +// Nop console.error to avoid printing things out +console.error = () => {} + +setImmediate(function () { + Promise.reject(new Error('kaboom')) +}) + +async function run (opts) { + const stream = new Writable({ + write (chunk, enc, cb) { + cb() + } + }) + return stream +} + +module.exports = run diff --git a/node_modules/thread-stream/test/yarnrc.yml b/node_modules/thread-stream/test/yarnrc.yml new file mode 100644 index 0000000..80215c3 --- /dev/null +++ b/node_modules/thread-stream/test/yarnrc.yml @@ -0,0 +1,7 @@ +nodeLinker: pnp +pnpMode: loose +pnpEnableEsmLoader: false +packageExtensions: + debug@*: + dependencies: + supports-color: '*' diff --git a/node_modules/thread-stream/tsconfig.json b/node_modules/thread-stream/tsconfig.json new file mode 100644 index 0000000..35dd65f --- /dev/null +++ b/node_modules/thread-stream/tsconfig.json @@ -0,0 +1,8 @@ +{ + "compilerOptions": { + "esModuleInterop": true + }, + "files": [ + "index.d.ts" + ], +} \ No newline at end of file diff --git a/node_modules/wrappy/LICENSE b/node_modules/wrappy/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/wrappy/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/wrappy/README.md b/node_modules/wrappy/README.md new file mode 100644 index 0000000..98eab25 --- /dev/null +++ b/node_modules/wrappy/README.md @@ -0,0 +1,36 @@ +# wrappy + +Callback wrapping utility + +## USAGE + +```javascript +var wrappy = require("wrappy") + +// var wrapper = wrappy(wrapperFunction) + +// make sure a cb is called only once +// See also: http://npm.im/once for this specific use case +var once = wrappy(function (cb) { + var called = false + return function () { + if (called) return + called = true + return cb.apply(this, arguments) + } +}) + +function printBoo () { + console.log('boo') +} +// has some rando property +printBoo.iAmBooPrinter = true + +var onlyPrintOnce = once(printBoo) + +onlyPrintOnce() // prints 'boo' +onlyPrintOnce() // does nothing + +// random property is retained! +assert.equal(onlyPrintOnce.iAmBooPrinter, true) +``` diff --git a/node_modules/wrappy/package.json b/node_modules/wrappy/package.json new file mode 100644 index 0000000..1307520 --- /dev/null +++ b/node_modules/wrappy/package.json @@ -0,0 +1,29 @@ +{ + "name": "wrappy", + "version": "1.0.2", + "description": "Callback wrapping utility", + "main": "wrappy.js", + "files": [ + "wrappy.js" + ], + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "tap": "^2.3.1" + }, + "scripts": { + "test": "tap --coverage test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/wrappy" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/wrappy/issues" + }, + "homepage": "https://github.com/npm/wrappy" +} diff --git a/node_modules/wrappy/wrappy.js b/node_modules/wrappy/wrappy.js new file mode 100644 index 0000000..bb7e7d6 --- /dev/null +++ b/node_modules/wrappy/wrappy.js @@ -0,0 +1,33 @@ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} diff --git a/package-lock.json b/package-lock.json index 0c8ded1..eb3f595 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2,5 +2,286 @@ "name": "polymech-astro", "lockfileVersion": 3, "requires": true, - "packages": {} + "packages": { + "": { + "dependencies": { + "pino": "^10.1.0", + "pino-pretty": "^13.1.3" + } + }, + "node_modules/@pinojs/redact": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@pinojs/redact/-/redact-0.4.0.tgz", + "integrity": "sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==", + "license": "MIT" + }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "license": "MIT" + }, + "node_modules/dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/fast-copy": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-4.0.2.tgz", + "integrity": "sha512-ybA6PDXIXOXivLJK/z9e+Otk7ve13I4ckBvGO5I2RRmBU1gMHLVDJYEuJYhGwez7YNlYji2M2DvVU+a9mSFDlw==", + "license": "MIT" + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "license": "MIT" + }, + "node_modules/help-me": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz", + "integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==", + "license": "MIT" + }, + "node_modules/joycon": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/pino": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-10.1.0.tgz", + "integrity": "sha512-0zZC2ygfdqvqK8zJIr1e+wT1T/L+LF6qvqvbzEQ6tiMAoTqEVK9a1K3YRu8HEUvGEvNqZyPJTtb2sNIoTkB83w==", + "license": "MIT", + "dependencies": { + "@pinojs/redact": "^0.4.0", + "atomic-sleep": "^1.0.0", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^5.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^4.0.1", + "thread-stream": "^3.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz", + "integrity": "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-pretty": { + "version": "13.1.3", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.3.tgz", + "integrity": "sha512-ttXRkkOz6WWC95KeY9+xxWL6AtImwbyMHrL1mSwqwW9u+vLp/WIElvHvCSDg0xO/Dzrggz1zv3rN5ovTRVowKg==", + "license": "MIT", + "dependencies": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^4.0.0", + "fast-safe-stringify": "^2.1.1", + "help-me": "^5.0.0", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^3.0.0", + "pump": "^3.0.0", + "secure-json-parse": "^4.0.0", + "sonic-boom": "^4.0.1", + "strip-json-comments": "^5.0.2" + }, + "bin": { + "pino-pretty": "bin.js" + } + }, + "node_modules/pino-pretty/node_modules/pino-abstract-transport": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz", + "integrity": "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-std-serializers": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz", + "integrity": "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==", + "license": "MIT" + }, + "node_modules/process-warning": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz", + "integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", + "license": "MIT" + }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "license": "MIT", + "engines": { + "node": ">= 12.13.0" + } + }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/secure-json-parse": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.1.0.tgz", + "integrity": "sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/sonic-boom": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", + "integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/strip-json-comments": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz", + "integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/thread-stream": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz", + "integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==", + "license": "MIT", + "dependencies": { + "real-require": "^0.2.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + } + } } diff --git a/package.json b/package.json new file mode 100644 index 0000000..2dabb83 --- /dev/null +++ b/package.json @@ -0,0 +1,6 @@ +{ + "dependencies": { + "pino": "^10.1.0", + "pino-pretty": "^13.1.3" + } +} diff --git a/packages/imagetools_3/astroViteConfigs.js b/packages/imagetools_3/astroViteConfigs.js index 613ad89..8b2d327 100644 --- a/packages/imagetools_3/astroViteConfigs.js +++ b/packages/imagetools_3/astroViteConfigs.js @@ -1,12 +1,12 @@ export default { - "environment": "build", + "environment": "dev", "isSsrBuild": false, "projectBase": "", - "publicDir": "C:\\Users\\zx\\Desktop\\polymech\\library.polymech\\public\\", - "rootDir": "C:\\Users\\zx\\Desktop\\polymech\\library.polymech\\", - "mode": "production", - "outDir": "C:\\Users\\zx\\Desktop\\polymech\\library.polymech\\dist\\", - "assetsDir": "_astro", + "publicDir": "C:\\Users\\zx\\Desktop\\polymech\\site2\\public\\", + "rootDir": "C:\\Users\\zx\\Desktop\\polymech\\site2\\", + "mode": "dev", + "outDir": "dist", + "assetsDir": "/_astro", "sourcemap": false, "assetFileNames": "/_astro/[name]@[width].[hash][extname]" } \ No newline at end of file diff --git a/packages/polymech/package.json b/packages/polymech/package.json index 4e95e90..8996696 100644 --- a/packages/polymech/package.json +++ b/packages/polymech/package.json @@ -49,6 +49,8 @@ "mdast-util-to-string": "^4.0.0", "node-xlsx": "^0.24.0", "p-map": "^7.0.3", + "pino": "^10.1.0", + "pino-pretty": "^13.1.3", "quicktype-core": "^23.2.6", "react-jsx-parser": "^2.4.0", "reading-time": "^1.5.0", @@ -61,4 +63,4 @@ "unist-util-visit": "^5.0.0", "yargs": "^18.0.0" } -} \ No newline at end of file +} diff --git a/packages/polymech/src/app/config-loader.ts b/packages/polymech/src/app/config-loader.ts index f645234..6a1f859 100644 --- a/packages/polymech/src/app/config-loader.ts +++ b/packages/polymech/src/app/config-loader.ts @@ -3,11 +3,13 @@ import * as fs from "fs"; import * as path from "path"; import yargs from 'yargs'; import { hideBin } from 'yargs/helpers'; - -import { substitute } from "@polymech/commons/variables"; +import { sync as read } from '@polymech/fs/read' +import { sync as write } from '@polymech/fs/write' +import { substitute, DEFAULT_VARS } from "@polymech/commons/variables"; import { appConfigSchema } from "./config.schema.js"; import type { AppConfig } from "./config.schema.js"; import { z } from "astro/zod"; +import { logger } from "./logger.js"; const I18N_SOURCE_LANGUAGE = 'en'; @@ -47,13 +49,16 @@ export function loadConfig( // 1. Load Library Config (Defaults) let rawLibraryContent: string; try { + logger.info(`Loading library config from ${config}`); rawLibraryContent = fs.readFileSync(config, 'utf-8'); } catch (error) { + logger.error(`Failed to read library config file at ${config}: ${error}`, error); throw new Error(`Failed to read library config file at ${config}: ${error}`); } const variables = { - LANG: locale + LANG: locale, + ...DEFAULT_VARS }; const substitutedLibraryContent = substitute(false, rawLibraryContent, variables); @@ -61,6 +66,7 @@ export function loadConfig( try { libraryConfig = JSON.parse(substitutedLibraryContent); } catch (error) { + logger.error(`Failed to parse library config JSON: ${error}`, error); throw new Error(`Failed to parse library config JSON: ${error}`); } // 2. Parse CLI Arguments @@ -77,30 +83,21 @@ export function loadConfig( if (fs.existsSync(userConfigPath)) { try { const rawUserContent = fs.readFileSync(userConfigPath, 'utf-8'); - const substitutedUserContent = substitute(false, rawUserContent, variables); + const substitutedUserContent = substitute(false, rawUserContent, variables, true); + userConfig = JSON.parse(substitutedUserContent); } catch (error) { - console.warn(`Failed to load or parse user config at ${userConfigPath}: ${error}`); + logger.error(`Failed to load or parse user config at ${userConfigPath}: ${error}`, error); } } else { - console.log('User config not found at', userConfigPath); + logger.warn(`User config not found at ${userConfigPath}`); } - - // 5. Merge: Library <- User <- CLI - // Note: yargs parses --config as part of argv, but also other flags like --core.logging_namespace - // We filter out specific known CLI-only flags if needed, but config schema validation will drop unknown keys anyway? - // Actually zod 'strip' is default in safeParse? No, usually it passes through unless strict(). - // We should probably rely on valid keys overwriting. - - // CLI args often come with standard keys like '$0', '_' which we might want to exclude if we blindly merge. - // However, deepMerge will add them. - // Ideally we would only merge keys that exist in the schema, but dynamic is fine for now. let mergedConfig = deepMerge(libraryConfig, userConfig); mergedConfig = deepMerge(mergedConfig, argv); // @todo 6. Validate // const result = schema.parse(mergedConfig); - + write('./app-c2.json', JSON.stringify(mergedConfig)); return mergedConfig; } diff --git a/packages/polymech/src/app/logger.ts b/packages/polymech/src/app/logger.ts new file mode 100644 index 0000000..8194575 --- /dev/null +++ b/packages/polymech/src/app/logger.ts @@ -0,0 +1,32 @@ +import pino from 'pino'; +import path from 'path'; +import pretty from 'pino-pretty'; + +const fileTransport = pino.transport({ + target: 'pino/file', + options: { destination: path.join(process.cwd(), 'app.log') }, +}); + +const consoleTransport = pretty({ + colorize: true, + ignore: 'pid,hostname', +}); + + +export const logger = pino( + { + level: process.env.PINO_LOG_LEVEL || 'info', + formatters: { + level: (label) => { + return { level: label.toUpperCase() }; + }, + }, + timestamp: pino.stdTimeFunctions.isoTime, + }, + pino.multistream([ + { stream: fileTransport, level: 'info' }, + { stream: consoleTransport, level: 'info' }, + ]) +); + +export default logger; diff --git a/packages/polymech/src/model/component.ts b/packages/polymech/src/model/component.ts index 9be9f1d..457bd83 100644 --- a/packages/polymech/src/model/component.ts +++ b/packages/polymech/src/model/component.ts @@ -8,13 +8,14 @@ import { sync as exists } from '@polymech/fs/exists' import { env } from '../base/index.js' import { gallery } from '@polymech/astro-base/base/media.js'; +import { resolve } from '@polymech/commons' import { get } from '@polymech/commons/component' import { PFilterValid } from '@polymech/commons/filter' - import { IAssemblyData } from '@polymech/cad' import { logger as log } from '../base/index.js' import { translate } from "../base/i18n.js" import { slugify } from '../base/strings.js' + import { loadConfig } from '../app/config-loader.js' import { filesEx, forward_slash, resolveConfig, template } from '@polymech/commons' @@ -25,10 +26,11 @@ import type { Loader, LoaderContext } from 'astro/loaders' import { PolymechInstance } from '../registry.js'; import { AppConfig } from "../app/config.schema.js" + const config = (): AppConfig => PolymechInstance.getConfig(); // Config Accessors -const PRODUCT_ROOT = () => config().products?.root || ''; +const PRODUCT_ROOT = () => path.resolve(resolve(config().products?.root || '')); const PRODUCT_GLOB = () => config().products?.glob || ''; const PRODUCT_DIR = (rel: string) => path.join(PRODUCT_ROOT(), rel); @@ -43,9 +45,8 @@ const CAD_DEFAULT_CONFIGURATION = () => config().cad?.default_configuration || ' // Product Branches const PRODUCT_BRANCHES = () => { - const enabled = config().products?.enabled; - const resolvedPath = enabled ? path.resolve(enabled) : null; - return (resolvedPath && exists(resolvedPath)) ? read(resolvedPath, 'json') : null; + const enabled = path.resolve(resolve(config().products?.enabled)); + return exists(enabled) ? read(enabled, 'json') : null; } const parseBoolean = (value: any) => value === '1' || value === true || String(value).toLowerCase() === 'true'; @@ -63,6 +64,9 @@ export interface IStoreItem extends DataEntry { data: IComponentConfigEx } const filterBranch = (items: { rel: string, config, path }[], branch: string = 'site-prod') => { + if (!items) { + return [] + } const branches = PRODUCT_BRANCHES(); if (!branches) { return items @@ -75,30 +79,12 @@ const filterBranch = (items: { rel: string, config, path }[], branch: string = ' return items.filter((item) => branchItems.includes(item.rel)) } -export const items = (branch: string) => filterBranch(get(`${PRODUCT_ROOT()}/${PRODUCT_GLOB()}`, PRODUCT_ROOT(), PFilterValid.library_component), branch) - -const onComponent = async (item: IStoreItem, ctx: ILoaderContextEx) => { - /* - const onNode = async (data: INodeCallback, configuration: string) => { - if (!CAD_EXPORT_SUB_COMPONENTS || !data.target.endsWith('.json')) { - return - } - const modelPath = `${CAD_MODEL_FILE_PATH(data.target,configuration)}` - const model: IAssemblyData = read(modelPath, 'json') as IAssemblyData - if (!model) { - return - } - const configurations = Object.keys(model.Configurations).filter((c) => { - return c !== CAD_DEFAULT_CONFIGURATION && - c !== 'Global' && - model.Configurations[c].Hide !== '1' - }) - if (!configurations.length || - model.Configurations?.Global?.['Configurations'] !== '1') { - return - } - } - */ +export const items = (branch: string) => { + const root = PRODUCT_ROOT() + const glob = PRODUCT_GLOB() + const match = `${root}/${glob}` + const all = get(match, root, PFilterValid.marketplace_component) + return filterBranch(all, branch) } const cad = async (item: IStoreItem, ctx: ILoaderContextEx): Promise => { @@ -167,6 +153,7 @@ const onItem = async (item: IStoreItem, ctx: ILoaderContextEx) => { // // Item Shared Resources // + debugger let resourcesDefaultPath = await findUp('resources.md', { stopAt: PRODUCT_ROOT(), cwd: itemDir @@ -232,6 +219,7 @@ export function loader(branch: string): Loader { store.clear(); let products = items(branch) + debugger for (const item of products) { const product: any = item.config const id = product.slug;