This commit is contained in:
heiye111
2025-09-20 21:06:53 +08:00
commit c74f28caa7
2539 changed files with 365006 additions and 0 deletions

2
node_modules/pino/.eslintignore generated vendored Normal file
View File

@@ -0,0 +1,2 @@
pino.d.ts
test/types/pino.test-d.ts

8
node_modules/pino/.eslintrc generated vendored Normal file
View File

@@ -0,0 +1,8 @@
{
"extends": [
"standard"
],
"rules": {
"no-var": "off"
}
}

13
node_modules/pino/.github/dependabot.yml generated vendored Normal file
View File

@@ -0,0 +1,13 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10

61
node_modules/pino/.github/workflows/bench.yml generated vendored Normal file
View File

@@ -0,0 +1,61 @@
name: Benchmarks
on:
push:
branches:
- main
paths-ignore:
- 'docs/**'
- '*.md'
pull_request:
paths-ignore:
- 'docs/**'
- '*.md'
permissions:
contents: read
jobs:
benchmark_current:
name: benchmark current
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v5
with:
ref: ${{ github.base_ref }}
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Install Modules
run: npm i --ignore-scripts
- name: Run Benchmark
run: npm run bench | tee current.txt
- name: Upload Current Results
uses: actions/upload-artifact@v4
with:
name: current
path: current.txt
benchmark_branch:
name: benchmark branch
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v5
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Install Modules
run: npm i --ignore-scripts
- name: Run Benchmark
run: npm run bench | tee branch.txt
- name: Upload Branch Results
uses: actions/upload-artifact@v4
with:
name: branch
path: branch.txt

88
node_modules/pino/.github/workflows/ci.yml generated vendored Normal file
View File

@@ -0,0 +1,88 @@
name: CI
on:
push:
branches:
- main
- 'v*'
paths-ignore:
- 'docs/**'
- '*.md'
pull_request:
paths-ignore:
- 'docs/**'
- '*.md'
# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true
jobs:
dependency-review:
name: Dependency Review
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Check out repo
uses: actions/checkout@v5
with:
persist-credentials: false
- name: Dependency review
uses: actions/dependency-review-action@v4
test:
name: ${{ matrix.node-version }} ${{ matrix.os }}
runs-on: ${{ matrix.os }}
permissions:
contents: read
strategy:
fail-fast: false
matrix:
os: [macOS-latest, windows-latest, ubuntu-latest]
node-version: [18, 20, 22]
exclude:
- os: windows-latest
node-version: 22
steps:
- name: Check out repo
uses: actions/checkout@v5
with:
persist-credentials: false
- name: Setup Node ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Install dependencies
run: npm i --ignore-scripts
- name: Run tests
run: npm run test-ci
- name: Run smoke test
if: >
matrix.os != 'windows-latest' &&
matrix.node-version > 14
run: npm run test:smoke
automerge:
name: Automerge Dependabot PRs
if: >
github.event_name == 'pull_request' &&
github.event.pull_request.user.login == 'dependabot[bot]'
needs: test
permissions:
pull-requests: write
contents: write
runs-on: ubuntu-latest
steps:
- uses: fastify/github-action-merge-dependabot@v3
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
exclude: 'sonic-boom,pino-std-serializers,quick-format-unescaped,fast-redact'

30
node_modules/pino/.github/workflows/lock-threads.yml generated vendored Normal file
View File

@@ -0,0 +1,30 @@
name: 'Lock Threads'
on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
permissions:
issues: write
pull-requests: write
concurrency:
group: lock
jobs:
action:
runs-on: ubuntu-latest
steps:
- uses: jsumners/lock-threads@b27edac0ac998d42b2815e122b6c24b32b568321
with:
log-output: true
issue-inactive-days: '30'
issue-comment: >
This issue has been automatically locked since there
has not been any recent activity after it was closed.
Please open a new issue for related bugs.
pr-comment: >
This pull request has been automatically locked since there
has not been any recent activity after it was closed.
Please open a new issue for related bugs.

View File

@@ -0,0 +1,43 @@
name: Publish release
on:
workflow_dispatch:
inputs:
version:
description: 'The version number to tag and release'
required: true
type: string
prerelease:
description: 'Release as pre-release'
required: false
type: boolean
default: false
jobs:
release-npm:
runs-on: ubuntu-latest
environment: main
permissions:
contents: write
id-token: write
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4
- uses: actions/setup-node@v4
with:
node-version: '22'
registry-url: 'https://registry.npmjs.org'
- run: npm install npm -g
- run: npm install
- name: Change version number and sync
run: |
node build/sync-version.js ${{ inputs.version }}
- name: GIT commit and push all changed files
run: |
git config --global user.name "mcollina"
git config --global user.email "hello@matteocollina.com"
git commit -n -a -m "Bumped v${{ inputs.version }}"
git push origin HEAD:${{ github.ref }}
- run: npm publish --access public --tag ${{ inputs.prerelease == true && 'next' || 'latest' }}
- name: 'Create release notes'
run: |
npx @matteo.collina/release-notes -a ${{ secrets.GITHUB_TOKEN }} -t v${{ inputs.version }} -r pino -o pinojs ${{ github.event.inputs.prerelease == 'true' && '-p' || '' }} -c ${{ github.ref }}

23
node_modules/pino/.github/workflows/target-main.yml generated vendored Normal file
View File

@@ -0,0 +1,23 @@
name: PR Target Check
on:
pull_request_target:
types: [opened]
permissions:
pull-requests: write
jobs:
comment:
if: ${{ github.base_ref != "master" }}
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@v7
with:
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: '⚠️ This pull request does not target the master branch.'
})

0
node_modules/pino/.nojekyll generated vendored Normal file
View File

1
node_modules/pino/.prettierignore generated vendored Normal file
View File

@@ -0,0 +1 @@
*

8
node_modules/pino/.taprc.yaml generated vendored Normal file
View File

@@ -0,0 +1,8 @@
coverage: true
timeout: 480
check-coverage: false
reporter: terse
files:
- 'test/**/*.test.js'

1
node_modules/pino/CNAME generated vendored Normal file
View File

@@ -0,0 +1 @@
getpino.io

30
node_modules/pino/CONTRIBUTING.md generated vendored Normal file
View File

@@ -0,0 +1,30 @@
# Pino is an OPEN Open Source Project
## What?
Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project.
## Rules
Before you start coding, please read [Contributing to projects with git](https://jrfom.com/posts/2017/03/08/a-primer-on-contributing-to-projects-with-git/).
Notice that as long as you don't have commit-access to the project, you have to fork the project and open PRs from the feature branches of the forked project.
There are a few basic ground-rules for contributors:
1. **No `--force` pushes** on `master` or modifying the Git history in any way after a PR has been merged.
1. **Non-master branches** ought to be used for ongoing work.
1. **Non-trivial changes** ought to be subject to an **internal pull-request** to solicit feedback from other contributors.
1. All pull-requests for new features **must** target the `master` branch. PRs to fix bugs in LTS releases are also allowed.
1. Contributors should attempt to adhere to the prevailing code-style.
1. 100% code coverage
## Releases
Declaring formal releases remains the prerogative of the project maintainer.
## Changes to this arrangement
This is an experiment and feedback is welcome! This document may also be subject to pull-requests or changes by contributors where you believe you have something valuable to add or change.
-----------------------------------------

21
node_modules/pino/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016-2025 Matteo Collina, David Mark Clements and the Pino contributors listed at <https://github.com/pinojs/pino#the-team> and in the README file.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

177
node_modules/pino/README.md generated vendored Normal file
View File

@@ -0,0 +1,177 @@
![banner](pino-banner.png)
# pino
[![npm version](https://img.shields.io/npm/v/pino)](https://www.npmjs.com/package/pino)
[![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino/ci.yml)](https://github.com/pinojs/pino/actions)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
[Very low overhead](#low-overhead) JavaScript logger.
## Documentation
* [Benchmarks ⇗](/docs/benchmarks.md)
* [API ⇗](/docs/api.md)
* [Browser API ⇗](/docs/browser.md)
* [Redaction ⇗](/docs/redaction.md)
* [Child Loggers ⇗](/docs/child-loggers.md)
* [Transports ⇗](/docs/transports.md)
* [Diagnostics ⇗](/docs/diagnostics.md)
* [Web Frameworks ⇗](/docs/web.md)
* [Pretty Printing ⇗](/docs/pretty.md)
* [Asynchronous Logging ⇗](/docs/asynchronous.md)
* [Ecosystem ⇗](/docs/ecosystem.md)
* [Help ⇗](/docs/help.md)
* [Long Term Support Policy ⇗](/docs/lts.md)
## Runtimes
### Node.js
Pino is built to run on [Node.js](http://nodejs.org).
### Bare
Pino works on [Bare](https://github.com/holepunchto/bare) with the [`pino-bare`](https://github.com/pinojs/pino-bare) compatability module.
### Pear
Pino works on [Pear](https://docs.pears.com), which is built on [Bare](https://github.com/holepunchto/bare), with the [`pino-bare`](https://github.com/pinojs/pino-bare) compatibility module.
## Install
Using NPM:
```
$ npm install pino
```
Using YARN:
```
$ yarn add pino
```
If you would like to install pino v6, refer to https://github.com/pinojs/pino/tree/v6.x.
## Usage
```js
const logger = require('pino')()
logger.info('hello world')
const child = logger.child({ a: 'property' })
child.info('hello child!')
```
This produces:
```
{"level":30,"time":1531171074631,"msg":"hello world","pid":657,"hostname":"Davids-MBP-3.fritz.box"}
{"level":30,"time":1531171082399,"msg":"hello child!","pid":657,"hostname":"Davids-MBP-3.fritz.box","a":"property"}
```
For using Pino with a web framework see:
* [Pino with Fastify](docs/web.md#fastify)
* [Pino with Express](docs/web.md#express)
* [Pino with Hapi](docs/web.md#hapi)
* [Pino with Restify](docs/web.md#restify)
* [Pino with Koa](docs/web.md#koa)
* [Pino with Node core `http`](docs/web.md#http)
* [Pino with Nest](docs/web.md#nest)
* [Pino with Hono](docs/web.md#hono)
<a name="essentials"></a>
## Essentials
### Development Formatting
The [`pino-pretty`](https://github.com/pinojs/pino-pretty) module can be used to
format logs during development:
![pretty demo](pretty-demo.png)
### Transports & Log Processing
Due to Node's single-threaded event-loop, it's highly recommended that sending,
alert triggering, reformatting, and all forms of log processing
are conducted in a separate process or thread.
In Pino terminology, we call all log processors "transports" and recommend that the
transports be run in a worker thread using our `pino.transport` API.
For more details see our [Transports⇗](docs/transports.md) document.
### Low overhead
Using minimum resources for logging is very important. Log messages
tend to get added over time and this can lead to a throttling effect
on applications  such as reduced requests per second.
In many cases, Pino is over 5x faster than alternatives.
See the [Benchmarks](docs/benchmarks.md) document for comparisons.
### Bundling support
Pino supports being bundled using tools like webpack or esbuild.
See [Bundling](docs/bundling.md) document for more information.
<a name="team"></a>
## The Team
### Matteo Collina
<https://github.com/mcollina>
<https://www.npmjs.com/~matteo.collina>
<https://twitter.com/matteocollina>
### David Mark Clements
<https://github.com/davidmarkclements>
<https://www.npmjs.com/~davidmarkclements>
<https://twitter.com/davidmarkclem>
### James Sumners
<https://github.com/jsumners>
<https://www.npmjs.com/~jsumners>
<https://twitter.com/jsumners79>
### Thomas Watson Steen
<https://github.com/watson>
<https://www.npmjs.com/~watson>
<https://twitter.com/wa7son>
## Contributing
Pino is an **OPEN Open Source Project**. This means that:
> Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project.
See the [CONTRIBUTING.md](https://github.com/pinojs/pino/blob/main/CONTRIBUTING.md) file for more details.
<a name="acknowledgments"></a>
## Acknowledgments
This project was kindly sponsored by [nearForm](https://nearform.com).
This project is kindly sponsored by [Platformatic](https://platformatic.dev).
Logo and identity designed by Cosmic Fox Design: https://www.behance.net/cosmicfox.
## License
Licensed under [MIT](./LICENSE).
[elasticsearch]: https://www.elastic.co/products/elasticsearch
[kibana]: https://www.elastic.co/products/kibana

68
node_modules/pino/SECURITY.md generated vendored Normal file
View File

@@ -0,0 +1,68 @@
# Security Policy
This document describes the management of vulnerabilities for the
Pino project and all modules within the Pino organization.
## Reporting vulnerabilities
Individuals who find potential vulnerabilities in Pino are invited
to report them via email at matteo.collina@gmail.com.
### Strict measures when reporting vulnerabilities
Avoid creating new "informative" reports. Only create new
report a potential vulnerability if you are absolutely sure this
should be tagged as an actual vulnerability. Be careful on the maintainers time.
## Handling vulnerability reports
When a potential vulnerability is reported, the following actions are taken:
### Triage
**Delay:** 5 business days
Within 5 business days, a member of the security team provides a first answer to the
individual who submitted the potential vulnerability. The possible responses
can be:
* Acceptance: what was reported is considered as a new vulnerability
* Rejection: what was reported is not considered as a new vulnerability
* Need more information: the security team needs more information in order to evaluate what was reported.
Triaging should include updating issue fields:
* Asset - set/create the module affected by the report
* Severity - TBD, currently left empty
### Correction follow-up
**Delay:** 90 days
When a vulnerability is confirmed, a member of the security team volunteers to follow
up on this report.
With the help of the individual who reported the vulnerability, they contact
the maintainers of the vulnerable package to make them aware of the
vulnerability. The maintainers can be invited as participants to the reported issue.
With the package maintainer, they define a release date for the publication
of the vulnerability. Ideally, this release date should not happen before
the package has been patched.
The report's vulnerable versions upper limit should be set to:
* `*` if there is no fixed version available by the time of publishing the report.
* the last vulnerable version. For example: `<=1.2.3` if a fix exists in `1.2.4`
### Publication
**Delay:** 90 days
Within 90 days after the triage date, the vulnerability must be made public.
**Severity**: Vulnerability severity is assessed using [CVSS v.3](https://www.first.org/cvss/user-guide).
If the package maintainer is actively developing a patch, an additional delay
can be added with the approval of the security team and the individual who
reported the vulnerability.
At this point, a CVE will be requested by the team.

95
node_modules/pino/benchmarks/basic.bench.js generated vendored Normal file
View File

@@ -0,0 +1,95 @@
'use strict'
const bench = require('fastbench')
const pino = require('../')
const bunyan = require('bunyan')
const bole = require('bole')('bench')
const winston = require('winston')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const loglevel = require('./utils/wrap-log-level')(dest)
const plogNodeStream = pino(dest)
delete require.cache[require.resolve('../')]
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', minLength: 4096 }))
delete require.cache[require.resolve('../')]
const plogDest = require('../')(pino.destination('/dev/null'))
process.env.DEBUG = 'dlog'
const debug = require('debug')
const dlog = debug('dlog')
dlog.log = function (s) { dest.write(s) }
const max = 10
const blog = bunyan.createLogger({
name: 'myapp',
streams: [{
level: 'trace',
stream: dest
}]
})
require('bole').output({
level: 'info',
stream: dest
}).setFastTime(true)
const chill = winston.createLogger({
transports: [
new winston.transports.Stream({
stream: fs.createWriteStream('/dev/null')
})
]
})
const run = bench([
function benchBunyan (cb) {
for (var i = 0; i < max; i++) {
blog.info('hello world')
}
setImmediate(cb)
},
function benchWinston (cb) {
for (var i = 0; i < max; i++) {
chill.log('info', 'hello world')
}
setImmediate(cb)
},
function benchBole (cb) {
for (var i = 0; i < max; i++) {
bole.info('hello world')
}
setImmediate(cb)
},
function benchDebug (cb) {
for (var i = 0; i < max; i++) {
dlog('hello world')
}
setImmediate(cb)
},
function benchLogLevel (cb) {
for (var i = 0; i < max; i++) {
loglevel.info('hello world')
}
setImmediate(cb)
},
function benchPino (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello world')
}
setImmediate(cb)
},
function benchPinoMinLength (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info('hello world')
}
setImmediate(cb)
},
function benchPinoNodeStream (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info('hello world')
}
setImmediate(cb)
}
], 10000)
run(run)

52
node_modules/pino/benchmarks/child-child.bench.js generated vendored Normal file
View File

@@ -0,0 +1,52 @@
'use strict'
const bench = require('fastbench')
const pino = require('../')
const bunyan = require('bunyan')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plogNodeStream = pino(dest).child({ a: 'property' }).child({ sub: 'child' })
delete require.cache[require.resolve('../')]
const plogDest = require('../')(pino.destination('/dev/null')).child({ a: 'property' }).child({ sub: 'child' })
delete require.cache[require.resolve('../')]
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
.child({ a: 'property' })
.child({ sub: 'child' })
const max = 10
const blog = bunyan.createLogger({
name: 'myapp',
streams: [{
level: 'trace',
stream: dest
}]
}).child({ a: 'property' }).child({ sub: 'child' })
const run = bench([
function benchBunyanChildChild (cb) {
for (var i = 0; i < max; i++) {
blog.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildChild (cb) {
for (var i = 0; i < max; i++) {
plogDest.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoMinLengthChildChild (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoNodeStreamChildChild (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info({ hello: 'world' })
}
setImmediate(cb)
}
], 10000)
run(run)

73
node_modules/pino/benchmarks/child-creation.bench.js generated vendored Normal file
View File

@@ -0,0 +1,73 @@
'use strict'
const bench = require('fastbench')
const pino = require('../')
const bunyan = require('bunyan')
const bole = require('bole')('bench')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plogNodeStream = pino(dest)
const plogDest = pino(pino.destination(('/dev/null')))
delete require.cache[require.resolve('../')]
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
const max = 10
const blog = bunyan.createLogger({
name: 'myapp',
streams: [{
level: 'trace',
stream: dest
}]
})
require('bole').output({
level: 'info',
stream: dest
}).setFastTime(true)
const run = bench([
function benchBunyanCreation (cb) {
const child = blog.child({ a: 'property' })
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchBoleCreation (cb) {
const child = bole('child')
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoCreation (cb) {
const child = plogDest.child({ a: 'property' })
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoMinLengthCreation (cb) {
const child = plogMinLength.child({ a: 'property' })
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoNodeStreamCreation (cb) {
const child = plogNodeStream.child({ a: 'property' })
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoCreationWithOption (cb) {
const child = plogDest.child({ a: 'property' }, { redact: [] })
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
}
], 10000)
run(run)

62
node_modules/pino/benchmarks/child.bench.js generated vendored Normal file
View File

@@ -0,0 +1,62 @@
'use strict'
const bench = require('fastbench')
const pino = require('../')
const bunyan = require('bunyan')
const bole = require('bole')('bench')('child')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plogNodeStream = pino(dest).child({ a: 'property' })
delete require.cache[require.resolve('../')]
const plogDest = require('../')(pino.destination('/dev/null')).child({ a: 'property' })
delete require.cache[require.resolve('../')]
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
const max = 10
const blog = bunyan.createLogger({
name: 'myapp',
streams: [{
level: 'trace',
stream: dest
}]
}).child({ a: 'property' })
require('bole').output({
level: 'info',
stream: dest
}).setFastTime(true)
const run = bench([
function benchBunyanChild (cb) {
for (var i = 0; i < max; i++) {
blog.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchBoleChild (cb) {
for (var i = 0; i < max; i++) {
bole.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChild (cb) {
for (var i = 0; i < max; i++) {
plogDest.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoMinLengthChild (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoNodeStreamChild (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info({ hello: 'world' })
}
setImmediate(cb)
}
], 10000)
run(run)

88
node_modules/pino/benchmarks/deep-object.bench.js generated vendored Normal file
View File

@@ -0,0 +1,88 @@
'use strict'
const bench = require('fastbench')
const pino = require('../')
const bunyan = require('bunyan')
const bole = require('bole')('bench')
const winston = require('winston')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plogNodeStream = pino(dest)
delete require.cache[require.resolve('../')]
const plogDest = require('../')(pino.destination('/dev/null'))
delete require.cache[require.resolve('../')]
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
delete require.cache[require.resolve('../')]
const loglevel = require('./utils/wrap-log-level')(dest)
const deep = Object.assign({}, require('../package.json'), { level: 'info' })
const max = 10
const blog = bunyan.createLogger({
name: 'myapp',
streams: [{
level: 'trace',
stream: dest
}]
})
require('bole').output({
level: 'info',
stream: dest
}).setFastTime(true)
const chill = winston.createLogger({
transports: [
new winston.transports.Stream({
stream: fs.createWriteStream('/dev/null')
})
]
})
const run = bench([
function benchBunyanDeepObj (cb) {
for (var i = 0; i < max; i++) {
blog.info(deep)
}
setImmediate(cb)
},
function benchWinstonDeepObj (cb) {
for (var i = 0; i < max; i++) {
chill.log(deep)
}
setImmediate(cb)
},
function benchBoleDeepObj (cb) {
for (var i = 0; i < max; i++) {
bole.info(deep)
}
setImmediate(cb)
},
function benchLogLevelDeepObj (cb) {
for (var i = 0; i < max; i++) {
loglevel.info(deep)
}
setImmediate(cb)
},
function benchPinoDeepObj (cb) {
for (var i = 0; i < max; i++) {
plogDest.info(deep)
}
setImmediate(cb)
},
function benchPinoMinLengthDeepObj (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info(deep)
}
setImmediate(cb)
},
function benchPinoNodeStreamDeepObj (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info(deep)
}
setImmediate(cb)
}
], 10000)
run(run)

50
node_modules/pino/benchmarks/formatters.bench.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
'use strict'
const formatters = {
level (label, number) {
return {
log: {
level: label
}
}
},
bindings (bindings) {
return {
process: {
pid: bindings.pid
},
host: {
name: bindings.hostname
}
}
},
log (obj) {
return { foo: 'bar', ...obj }
}
}
const bench = require('fastbench')
const pino = require('../')
delete require.cache[require.resolve('../')]
const pinoNoFormatters = require('../')(pino.destination('/dev/null'))
delete require.cache[require.resolve('../')]
const pinoFormatters = require('../')({ formatters }, pino.destination('/dev/null'))
const max = 10
const run = bench([
function benchPinoNoFormatters (cb) {
for (var i = 0; i < max; i++) {
pinoNoFormatters.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoFormatters (cb) {
for (var i = 0; i < max; i++) {
pinoFormatters.info({ hello: 'world' })
}
setImmediate(cb)
}
], 10000)
run(run)

67
node_modules/pino/benchmarks/internal/custom-levels.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
'use strict'
const bench = require('fastbench')
const pino = require('../../')
const base = pino(pino.destination('/dev/null'))
const baseCl = pino({
customLevels: { foo: 31 }
}, pino.destination('/dev/null'))
const child = base.child({})
const childCl = base.child({
customLevels: { foo: 31 }
})
const childOfBaseCl = baseCl.child({})
const max = 100
const run = bench([
function benchPinoNoCustomLevel (cb) {
for (var i = 0; i < max; i++) {
base.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoCustomLevel (cb) {
for (var i = 0; i < max; i++) {
baseCl.foo({ hello: 'world' })
}
setImmediate(cb)
},
function benchChildNoCustomLevel (cb) {
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildCustomLevel (cb) {
for (var i = 0; i < max; i++) {
childCl.foo({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildInheritedCustomLevel (cb) {
for (var i = 0; i < max; i++) {
childOfBaseCl.foo({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildCreation (cb) {
const child = base.child({})
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildCreationCustomLevel (cb) {
const child = base.child({
customLevels: { foo: 31 }
})
for (var i = 0; i < max; i++) {
child.foo({ hello: 'world' })
}
setImmediate(cb)
}
], 10000)
run(run)

View File

@@ -0,0 +1,76 @@
'use strict'
const bench = require('fastbench')
const pino = require('../../')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plog = pino(dest)
delete require.cache[require.resolve('../../')]
const plogDest = require('../../')(pino.destination('/dev/null'))
delete require.cache[require.resolve('../../')]
const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false }))
const deep = require('../../package.json')
deep.deep = JSON.parse(JSON.stringify(deep))
deep.deep.deep = JSON.parse(JSON.stringify(deep))
const longStr = JSON.stringify(deep)
const max = 10
const run = bench([
function benchPinoLongString (cb) {
for (var i = 0; i < max; i++) {
plog.info(longStr)
}
setImmediate(cb)
},
function benchPinoDestLongString (cb) {
for (var i = 0; i < max; i++) {
plogDest.info(longStr)
}
setImmediate(cb)
},
function benchPinoAsyncLongString (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info(longStr)
}
setImmediate(cb)
},
function benchPinoDeepObj (cb) {
for (var i = 0; i < max; i++) {
plog.info(deep)
}
setImmediate(cb)
},
function benchPinoDestDeepObj (cb) {
for (var i = 0; i < max; i++) {
plogDest.info(deep)
}
setImmediate(cb)
},
function benchPinoAsyncDeepObj (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info(deep)
}
setImmediate(cb)
},
function benchPinoInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
plog.info('hello %j', deep)
}
setImmediate(cb)
},
function benchPinoDestInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %j', deep)
}
setImmediate(cb)
},
function benchPinoAsyncInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info('hello %j', deep)
}
setImmediate(cb)
}
], 1000)
run(run)

View File

@@ -0,0 +1,182 @@
'use strict'
const bench = require('fastbench')
const pino = require('../../')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plog = pino(dest)
delete require.cache[require.resolve('../../')]
const plogDest = require('../../')(pino.destination('/dev/null'))
delete require.cache[require.resolve('../../')]
const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false }))
const plogChild = plog.child({ a: 'property' })
const plogDestChild = plogDest.child({ a: 'property' })
const plogAsyncChild = plogAsync.child({ a: 'property' })
const plogChildChild = plog.child({ a: 'property' }).child({ sub: 'child' })
const plogDestChildChild = plogDest.child({ a: 'property' }).child({ sub: 'child' })
const plogAsyncChildChild = plogAsync.child({ a: 'property' }).child({ sub: 'child' })
const max = 10
const run = bench([
function benchPino (cb) {
for (var i = 0; i < max; i++) {
plog.info('hello world')
}
setImmediate(cb)
},
function benchPinoDest (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello world')
}
setImmediate(cb)
},
function benchPinoExtreme (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info('hello world')
}
setImmediate(cb)
},
function benchPinoObj (cb) {
for (var i = 0; i < max; i++) {
plog.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoDestObj (cb) {
for (var i = 0; i < max; i++) {
plogDest.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoAsyncObj (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChild (cb) {
for (var i = 0; i < max; i++) {
plogChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoDestChild (cb) {
for (var i = 0; i < max; i++) {
plogDestChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoAsyncChild (cb) {
for (var i = 0; i < max; i++) {
plogAsyncChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildChild (cb) {
for (var i = 0; i < max; i++) {
plogChildChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoDestChildChild (cb) {
for (var i = 0; i < max; i++) {
plogDestChildChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoAsyncChildChild (cb) {
for (var i = 0; i < max; i++) {
plogAsyncChildChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildCreation (cb) {
const child = plog.child({ a: 'property' })
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoDestChildCreation (cb) {
const child = plogDest.child({ a: 'property' })
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoMulti (cb) {
for (var i = 0; i < max; i++) {
plog.info('hello', 'world')
}
setImmediate(cb)
},
function benchPinoDestMulti (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello', 'world')
}
setImmediate(cb)
},
function benchPinoAsyncMulti (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info('hello', 'world')
}
setImmediate(cb)
},
function benchPinoInterpolate (cb) {
for (var i = 0; i < max; i++) {
plog.info('hello %s', 'world')
}
setImmediate(cb)
},
function benchPinoDestInterpolate (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %s', 'world')
}
setImmediate(cb)
},
function benchPinoDestInterpolate (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %s', 'world')
}
setImmediate(cb)
},
function benchPinoInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
plog.info('hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchPinoDestInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchPinoAsyncInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info('hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchPinoInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
plog.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
},
function benchPinoDestInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
},
function benchPinoAsyncInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
}
], 10000)
run(run)

View File

@@ -0,0 +1,75 @@
'use strict'
const bench = require('fastbench')
const pino = require('../../')
const base = pino(pino.destination('/dev/null'))
const child = base.child({})
const childChild = child.child({})
const childChildChild = childChild.child({})
const childChildChildChild = childChildChild.child({})
const child2 = base.child({})
const baseSerializers = pino(pino.destination('/dev/null'))
const baseSerializersChild = baseSerializers.child({})
const baseSerializersChildSerializers = baseSerializers.child({})
const max = 100
const run = bench([
function benchPinoBase (cb) {
for (var i = 0; i < max; i++) {
base.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChild (cb) {
for (var i = 0; i < max; i++) {
child.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildChild (cb) {
for (var i = 0; i < max; i++) {
childChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildChildChild (cb) {
for (var i = 0; i < max; i++) {
childChildChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChildChildChildChild (cb) {
for (var i = 0; i < max; i++) {
childChildChildChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoChild2 (cb) {
for (var i = 0; i < max; i++) {
child2.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoBaseSerializers (cb) {
for (var i = 0; i < max; i++) {
baseSerializers.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoBaseSerializersChild (cb) {
for (var i = 0; i < max; i++) {
baseSerializersChild.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoBaseSerializersChildSerializers (cb) {
for (var i = 0; i < max; i++) {
baseSerializersChildSerializers.info({ hello: 'world' })
}
setImmediate(cb)
}
], 10000)
run(run)

86
node_modules/pino/benchmarks/internal/redact.bench.js generated vendored Normal file
View File

@@ -0,0 +1,86 @@
'use strict'
const bench = require('fastbench')
const pino = require('../../')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plog = pino(dest)
delete require.cache[require.resolve('../../')]
const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false }))
delete require.cache[require.resolve('../../')]
const plogUnsafe = require('../../')({ safe: false }, dest)
delete require.cache[require.resolve('../../')]
const plogUnsafeAsync = require('../../')(
{ safe: false },
pino.destination({ dest: '/dev/null', sync: false })
)
const plogRedact = pino({ redact: ['a.b.c'] }, dest)
delete require.cache[require.resolve('../../')]
const plogAsyncRedact = require('../../')(
{ redact: ['a.b.c'] },
pino.destination({ dest: '/dev/null', sync: false })
)
delete require.cache[require.resolve('../../')]
const plogUnsafeRedact = require('../../')({ redact: ['a.b.c'], safe: false }, dest)
delete require.cache[require.resolve('../../')]
const plogUnsafeAsyncRedact = require('../../')(
{ redact: ['a.b.c'], safe: false },
pino.destination({ dest: '/dev/null', sync: false })
)
const max = 10
// note that "redact me." is the same amount of bytes as the censor: "[Redacted]"
const run = bench([
function benchPinoNoRedact (cb) {
for (var i = 0; i < max; i++) {
plog.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
}
setImmediate(cb)
},
function benchPinoRedact (cb) {
for (var i = 0; i < max; i++) {
plogRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
}
setImmediate(cb)
},
function benchPinoUnsafeNoRedact (cb) {
for (var i = 0; i < max; i++) {
plogUnsafe.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
}
setImmediate(cb)
},
function benchPinoUnsafeRedact (cb) {
for (var i = 0; i < max; i++) {
plogUnsafeRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
}
setImmediate(cb)
},
function benchPinoAsyncNoRedact (cb) {
for (var i = 0; i < max; i++) {
plogAsync.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
}
setImmediate(cb)
},
function benchPinoAsyncRedact (cb) {
for (var i = 0; i < max; i++) {
plogAsyncRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
}
setImmediate(cb)
},
function benchPinoUnsafeAsyncNoRedact (cb) {
for (var i = 0; i < max; i++) {
plogUnsafeAsync.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
}
setImmediate(cb)
},
function benchPinoUnsafeAsyncRedact (cb) {
for (var i = 0; i < max; i++) {
plogUnsafeAsyncRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
}
setImmediate(cb)
}
], 10000)
run(run)

81
node_modules/pino/benchmarks/long-string.bench.js generated vendored Normal file
View File

@@ -0,0 +1,81 @@
'use strict'
const bench = require('fastbench')
const pino = require('../')
const bunyan = require('bunyan')
const bole = require('bole')('bench')
const winston = require('winston')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plogNodeStream = pino(dest)
delete require.cache[require.resolve('../')]
const plogDest = require('../')(pino.destination('/dev/null'))
delete require.cache[require.resolve('../')]
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
const crypto = require('crypto')
const longStr = crypto.randomBytes(2000).toString()
const max = 10
const blog = bunyan.createLogger({
name: 'myapp',
streams: [{
level: 'trace',
stream: dest
}]
})
require('bole').output({
level: 'info',
stream: dest
}).setFastTime(true)
const chill = winston.createLogger({
transports: [
new winston.transports.Stream({
stream: fs.createWriteStream('/dev/null')
})
]
})
const run = bench([
function benchBunyan (cb) {
for (var i = 0; i < max; i++) {
blog.info(longStr)
}
setImmediate(cb)
},
function benchWinston (cb) {
for (var i = 0; i < max; i++) {
chill.info(longStr)
}
setImmediate(cb)
},
function benchBole (cb) {
for (var i = 0; i < max; i++) {
bole.info(longStr)
}
setImmediate(cb)
},
function benchPino (cb) {
for (var i = 0; i < max; i++) {
plogDest.info(longStr)
}
setImmediate(cb)
},
function benchPinoMinLength (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info(longStr)
}
setImmediate(cb)
},
function benchPinoNodeStream (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info(longStr)
}
setImmediate(cb)
}
], 1000)
run(run)

193
node_modules/pino/benchmarks/multi-arg.bench.js generated vendored Normal file
View File

@@ -0,0 +1,193 @@
'use strict'
const bench = require('fastbench')
const pino = require('../')
const bunyan = require('bunyan')
const bole = require('bole')('bench')
const winston = require('winston')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const plogNodeStream = pino(dest)
delete require.cache[require.resolve('../')]
const plogDest = require('../')(pino.destination('/dev/null'))
delete require.cache[require.resolve('../')]
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
delete require.cache[require.resolve('../')]
const deep = require('../package.json')
deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep)))
deep.deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep)))
deep.deep.deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep)))
const blog = bunyan.createLogger({
name: 'myapp',
streams: [{
level: 'trace',
stream: dest
}]
})
require('bole').output({
level: 'info',
stream: dest
}).setFastTime(true)
const chill = winston.createLogger({
transports: [
new winston.transports.Stream({
stream: fs.createWriteStream('/dev/null')
})
]
})
const max = 10
const run = bench([
function benchBunyanInterpolate (cb) {
for (var i = 0; i < max; i++) {
blog.info('hello %s', 'world')
}
setImmediate(cb)
},
function benchWinstonInterpolate (cb) {
for (var i = 0; i < max; i++) {
chill.log('info', 'hello %s', 'world')
}
setImmediate(cb)
},
function benchBoleInterpolate (cb) {
for (var i = 0; i < max; i++) {
bole.info('hello %s', 'world')
}
setImmediate(cb)
},
function benchPinoInterpolate (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %s', 'world')
}
setImmediate(cb)
},
function benchPinoMinLengthInterpolate (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info('hello %s', 'world')
}
setImmediate(cb)
},
function benchPinoNodeStreamInterpolate (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info('hello %s', 'world')
}
setImmediate(cb)
},
function benchBunyanInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
blog.info('hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchWinstonInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
chill.log('info', 'hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchBoleInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
bole.info('hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchPinoInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchPinoMinLengthInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info('hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchPinoNodeStreamInterpolateAll (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info('hello %s %j %d', 'world', { obj: true }, 4)
}
setImmediate(cb)
},
function benchBunyanInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
blog.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
},
function benchWinstonInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
chill.log('info', 'hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
},
function benchBoleInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
bole.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
},
function benchPinoInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
},
function benchPinoMinLengthInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
},
function benchPinoNodeStreamInterpolateExtra (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
}
setImmediate(cb)
},
function benchBunyanInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
blog.info('hello %j', deep)
}
setImmediate(cb)
},
function benchWinstonInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
chill.log('info', 'hello %j', deep)
}
setImmediate(cb)
},
function benchBoleInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
bole.info('hello %j', deep)
}
setImmediate(cb)
},
function benchPinoInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
plogDest.info('hello %j', deep)
}
setImmediate(cb)
},
function benchPinoMinLengthInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info('hello %j', deep)
}
setImmediate(cb)
},
function benchPinoNodeStreamInterpolateDeep (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info('hello %j', deep)
}
setImmediate(cb)
}
], 10000)
run(run)

98
node_modules/pino/benchmarks/multistream.js generated vendored Normal file
View File

@@ -0,0 +1,98 @@
'use strict'
const bench = require('fastbench')
const bunyan = require('bunyan')
const pino = require('../')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const tenStreams = [
{ stream: dest },
{ stream: dest },
{ stream: dest },
{ stream: dest },
{ stream: dest },
{ level: 'debug', stream: dest },
{ level: 'debug', stream: dest },
{ level: 'trace', stream: dest },
{ level: 'warn', stream: dest },
{ level: 'fatal', stream: dest }
]
const pinomsTen = pino({ level: 'debug' }, pino.multistream(tenStreams))
const fourStreams = [
{ stream: dest },
{ stream: dest },
{ level: 'debug', stream: dest },
{ level: 'trace', stream: dest }
]
const pinomsFour = pino({ level: 'debug' }, pino.multistream(fourStreams))
const pinomsOne = pino({ level: 'info' }, pino.multistream(dest))
const blogOne = bunyan.createLogger({
name: 'myapp',
streams: [{ stream: dest }]
})
const blogTen = bunyan.createLogger({
name: 'myapp',
streams: tenStreams
})
const blogFour = bunyan.createLogger({
name: 'myapp',
streams: fourStreams
})
const max = 10
const run = bench([
function benchBunyanTen (cb) {
for (let i = 0; i < max; i++) {
blogTen.info('hello world')
blogTen.debug('hello world')
blogTen.trace('hello world')
blogTen.warn('hello world')
blogTen.fatal('hello world')
}
setImmediate(cb)
},
function benchPinoMSTen (cb) {
for (let i = 0; i < max; i++) {
pinomsTen.info('hello world')
pinomsTen.debug('hello world')
pinomsTen.trace('hello world')
pinomsTen.warn('hello world')
pinomsTen.fatal('hello world')
}
setImmediate(cb)
},
function benchBunyanFour (cb) {
for (let i = 0; i < max; i++) {
blogFour.info('hello world')
blogFour.debug('hello world')
blogFour.trace('hello world')
}
setImmediate(cb)
},
function benchPinoMSFour (cb) {
for (let i = 0; i < max; i++) {
pinomsFour.info('hello world')
pinomsFour.debug('hello world')
pinomsFour.trace('hello world')
}
setImmediate(cb)
},
function benchBunyanOne (cb) {
for (let i = 0; i < max; i++) {
blogOne.info('hello world')
}
setImmediate(cb)
},
function benchPinoMSOne (cb) {
for (let i = 0; i < max; i++) {
pinomsOne.info('hello world')
}
setImmediate(cb)
}
], 10000)
run()

82
node_modules/pino/benchmarks/object.bench.js generated vendored Normal file
View File

@@ -0,0 +1,82 @@
'use strict'
const bench = require('fastbench')
const pino = require('../')
const bunyan = require('bunyan')
const bole = require('bole')('bench')
const winston = require('winston')
const fs = require('node:fs')
const dest = fs.createWriteStream('/dev/null')
const loglevel = require('./utils/wrap-log-level')(dest)
const plogNodeStream = pino(dest)
delete require.cache[require.resolve('../')]
const plogDest = require('../')(pino.destination('/dev/null'))
delete require.cache[require.resolve('../')]
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
const blog = bunyan.createLogger({
name: 'myapp',
streams: [{
level: 'trace',
stream: dest
}]
})
require('bole').output({
level: 'info',
stream: dest
}).setFastTime(true)
const chill = winston.createLogger({
transports: [
new winston.transports.Stream({
stream: fs.createWriteStream('/dev/null')
})
]
})
const max = 10
const run = bench([
function benchBunyanObj (cb) {
for (var i = 0; i < max; i++) {
blog.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchWinstonObj (cb) {
for (var i = 0; i < max; i++) {
chill.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchBoleObj (cb) {
for (var i = 0; i < max; i++) {
bole.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchLogLevelObject (cb) {
for (var i = 0; i < max; i++) {
loglevel.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoObj (cb) {
for (var i = 0; i < max; i++) {
plogDest.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoMinLengthObj (cb) {
for (var i = 0; i < max; i++) {
plogMinLength.info({ hello: 'world' })
}
setImmediate(cb)
},
function benchPinoNodeStreamObj (cb) {
for (var i = 0; i < max; i++) {
plogNodeStream.info({ hello: 'world' })
}
setImmediate(cb)
}
], 10000)
run(run)

View File

@@ -0,0 +1,36 @@
'use strict'
const { join } = require('node:path')
const { execSync } = require('node:child_process')
const run = (type) => {
process.stderr.write(`benchmarking ${type}\n`)
return execSync(`node ${join(__dirname, 'runbench')} ${type} -q`)
}
console.log(`
# Benchmarks
\`pino.info('hello world')\`:
\`\`\`
${run('basic')}
\`\`\`
\`pino.info({'hello': 'world'})\`:
\`\`\`
${run('object')}
\`\`\`
\`pino.info(aBigDeeplyNestedObject)\`:
\`\`\`
${run('deep-object')}
\`\`\`
\`pino.info('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'})\`:
For a fair comparison, [LogLevel](http://npm.im/loglevel) was extended
to include a timestamp and [bole](http://npm.im/bole) had
\`fastTime\` mode switched on.
`)

138
node_modules/pino/benchmarks/utils/runbench.js generated vendored Normal file
View File

@@ -0,0 +1,138 @@
'use strict'
const { type, platform, arch, release, cpus } = require('node:os')
const { resolve, join } = require('node:path')
const spawn = require('node:child_process').spawn
const pump = require('pump')
const split = require('split2')
const through = require('through2')
const steed = require('steed')
function usage () {
console.log(`
Pino Benchmarks
To run a benchmark, specify which to run:
・all run all benchmarks (takes a while)
・basic log a simple string
・object logging a basic object
・deep-object logging a large object
・multi-arg multiple log method arguments
・child child from a parent
・child-child child from a child
・child-creation child constructor
・formatters difference between with or without formatters
Example:
node runbench basic
`)
}
if (!process.argv[2]) {
usage()
process.exit()
}
const quiet = process.argv[3] === '-q'
const selectedBenchmark = process.argv[2].toLowerCase()
const benchmarkDir = resolve(__dirname, '..')
const benchmarks = {
basic: 'basic.bench.js',
object: 'object.bench.js',
'deep-object': 'deep-object.bench.js',
'multi-arg': 'multi-arg.bench.js',
'long-string': 'long-string.bench.js',
child: 'child.bench.js',
'child-child': 'child-child.bench.js',
'child-creation': 'child-creation.bench.js',
formatters: 'formatters.bench.js'
}
function runBenchmark (name, done) {
const benchmarkResults = {}
benchmarkResults[name] = {}
const processor = through(function (line, enc, cb) {
const [label, time] = ('' + line).split(': ')
const [target, iterations] = label.split('*')
const logger = target.replace('bench', '')
if (!benchmarkResults[name][logger]) benchmarkResults[name][logger] = []
benchmarkResults[name][logger].push({
time: time.replace('ms', ''),
iterations: iterations.replace(':', '')
})
cb()
})
if (quiet === false) console.log(`Running ${name.toUpperCase()} benchmark\n`)
const benchmark = spawn(
process.argv[0],
[join(benchmarkDir, benchmarks[name])]
)
if (quiet === false) {
benchmark.stdout.pipe(process.stdout)
}
pump(benchmark.stdout, split(), processor)
benchmark.on('exit', () => {
console.log()
if (done && typeof done === 'function') done(null, benchmarkResults)
})
}
function sum (arr) {
let result = 0
for (var i = 0; i < arr.length; i += 1) {
result += Number.parseFloat(arr[i].time)
}
return result
}
function displayResults (results) {
if (quiet === false) console.log('==========')
const benchNames = Object.keys(results)
for (var i = 0; i < benchNames.length; i += 1) {
console.log(`${benchNames[i].toUpperCase()} benchmark averages`)
const benchmark = results[benchNames[i]]
const loggers = Object.keys(benchmark)
for (var j = 0; j < loggers.length; j += 1) {
const logger = benchmark[loggers[j]]
const average = sum(logger) / logger.length
console.log(`${loggers[j]} average: ${average.toFixed(3)}ms`)
}
}
if (quiet === false) {
console.log('==========')
console.log(
`System: ${type()}/${platform()} ${arch()} ${release()}`,
`~ ${cpus()[0].model} (cores/threads: ${cpus().length})`
)
}
}
function toBench (done) {
runBenchmark(this.name, done)
}
const benchQueue = []
if (selectedBenchmark !== 'all') {
benchQueue.push(toBench.bind({ name: selectedBenchmark }))
} else {
const keys = Object.keys(benchmarks)
for (var i = 0; i < keys.length; i += 1) {
benchQueue.push(toBench.bind({ name: keys[i] }))
}
}
steed.series(benchQueue, function (err, results) {
if (err) return console.error(err.message)
results.forEach(displayResults)
})

55
node_modules/pino/benchmarks/utils/wrap-log-level.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
'use strict'
const { readFileSync } = require('node:fs')
const vm = require('vm')
const { join } = require('node:path')
const code = readFileSync(
join(__dirname, '..', '..', 'node_modules', 'loglevel', 'lib', 'loglevel.js')
)
const { Console } = require('console')
function build (dest) {
const sandbox = {
module: {},
console: new Console(dest, dest)
}
const context = vm.createContext(sandbox)
const script = new vm.Script(code)
script.runInContext(context)
const loglevel = sandbox.log
const originalFactory = loglevel.methodFactory
loglevel.methodFactory = function (methodName, logLevel, loggerName) {
const rawMethod = originalFactory(methodName, logLevel, loggerName)
return function () {
const time = new Date()
let array
if (typeof arguments[0] === 'string') {
arguments[0] = '[' + time.toISOString() + '] ' + arguments[0]
rawMethod.apply(null, arguments)
} else {
array = new Array(arguments.length + 1)
array[0] = '[' + time.toISOString() + ']'
for (var i = 0; i < arguments.length; i++) {
array[i + 1] = arguments[i]
}
rawMethod.apply(null, array)
}
}
}
loglevel.setLevel(loglevel.levels.INFO)
return loglevel
}
module.exports = build
if (require.main === module) {
const loglevel = build(process.stdout)
loglevel.info('hello')
loglevel.info({ hello: 'world' })
loglevel.info('hello %j', { hello: 'world' })
}

6
node_modules/pino/bin.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
#!/usr/bin/env node
console.error(
'`pino` cli has been removed. Use `pino-pretty` cli instead.\n' +
'\nSee: https://github.com/pinojs/pino-pretty'
)
process.exit(1)

505
node_modules/pino/browser.js generated vendored Normal file
View File

@@ -0,0 +1,505 @@
'use strict'
const format = require('quick-format-unescaped')
module.exports = pino
const _console = pfGlobalThisOrFallback().console || {}
const stdSerializers = {
mapHttpRequest: mock,
mapHttpResponse: mock,
wrapRequestSerializer: passthrough,
wrapResponseSerializer: passthrough,
wrapErrorSerializer: passthrough,
req: mock,
res: mock,
err: asErrValue,
errWithCause: asErrValue
}
function levelToValue (level, logger) {
return level === 'silent'
? Infinity
: logger.levels.values[level]
}
const baseLogFunctionSymbol = Symbol('pino.logFuncs')
const hierarchySymbol = Symbol('pino.hierarchy')
const logFallbackMap = {
error: 'log',
fatal: 'error',
warn: 'error',
info: 'log',
debug: 'log',
trace: 'log'
}
function appendChildLogger (parentLogger, childLogger) {
const newEntry = {
logger: childLogger,
parent: parentLogger[hierarchySymbol]
}
childLogger[hierarchySymbol] = newEntry
}
function setupBaseLogFunctions (logger, levels, proto) {
const logFunctions = {}
levels.forEach(level => {
logFunctions[level] = proto[level] ? proto[level] : (_console[level] || _console[logFallbackMap[level] || 'log'] || noop)
})
logger[baseLogFunctionSymbol] = logFunctions
}
function shouldSerialize (serialize, serializers) {
if (Array.isArray(serialize)) {
const hasToFilter = serialize.filter(function (k) {
return k !== '!stdSerializers.err'
})
return hasToFilter
} else if (serialize === true) {
return Object.keys(serializers)
}
return false
}
function pino (opts) {
opts = opts || {}
opts.browser = opts.browser || {}
const transmit = opts.browser.transmit
if (transmit && typeof transmit.send !== 'function') { throw Error('pino: transmit option must have a send function') }
const proto = opts.browser.write || _console
if (opts.browser.write) opts.browser.asObject = true
const serializers = opts.serializers || {}
const serialize = shouldSerialize(opts.browser.serialize, serializers)
let stdErrSerialize = opts.browser.serialize
if (
Array.isArray(opts.browser.serialize) &&
opts.browser.serialize.indexOf('!stdSerializers.err') > -1
) stdErrSerialize = false
const customLevels = Object.keys(opts.customLevels || {})
const levels = ['error', 'fatal', 'warn', 'info', 'debug', 'trace'].concat(customLevels)
if (typeof proto === 'function') {
levels.forEach(function (level) {
proto[level] = proto
})
}
if (opts.enabled === false || opts.browser.disabled) opts.level = 'silent'
const level = opts.level || 'info'
const logger = Object.create(proto)
if (!logger.log) logger.log = noop
setupBaseLogFunctions(logger, levels, proto)
// setup root hierarchy entry
appendChildLogger({}, logger)
Object.defineProperty(logger, 'levelVal', {
get: getLevelVal
})
Object.defineProperty(logger, 'level', {
get: getLevel,
set: setLevel
})
const setOpts = {
transmit,
serialize,
asObject: opts.browser.asObject,
asObjectBindingsOnly: opts.browser.asObjectBindingsOnly,
formatters: opts.browser.formatters,
levels,
timestamp: getTimeFunction(opts),
messageKey: opts.messageKey || 'msg',
onChild: opts.onChild || noop
}
logger.levels = getLevels(opts)
logger.level = level
logger.isLevelEnabled = function (level) {
if (!this.levels.values[level]) {
return false
}
return this.levels.values[level] >= this.levels.values[this.level]
}
logger.setMaxListeners = logger.getMaxListeners =
logger.emit = logger.addListener = logger.on =
logger.prependListener = logger.once =
logger.prependOnceListener = logger.removeListener =
logger.removeAllListeners = logger.listeners =
logger.listenerCount = logger.eventNames =
logger.write = logger.flush = noop
logger.serializers = serializers
logger._serialize = serialize
logger._stdErrSerialize = stdErrSerialize
logger.child = function (...args) { return child.call(this, setOpts, ...args) }
if (transmit) logger._logEvent = createLogEventShape()
function getLevelVal () {
return levelToValue(this.level, this)
}
function getLevel () {
return this._level
}
function setLevel (level) {
if (level !== 'silent' && !this.levels.values[level]) {
throw Error('unknown level ' + level)
}
this._level = level
set(this, setOpts, logger, 'error') // <-- must stay first
set(this, setOpts, logger, 'fatal')
set(this, setOpts, logger, 'warn')
set(this, setOpts, logger, 'info')
set(this, setOpts, logger, 'debug')
set(this, setOpts, logger, 'trace')
customLevels.forEach((level) => {
set(this, setOpts, logger, level)
})
}
function child (setOpts, bindings, childOptions) {
if (!bindings) {
throw new Error('missing bindings for child Pino')
}
childOptions = childOptions || {}
if (serialize && bindings.serializers) {
childOptions.serializers = bindings.serializers
}
const childOptionsSerializers = childOptions.serializers
if (serialize && childOptionsSerializers) {
var childSerializers = Object.assign({}, serializers, childOptionsSerializers)
var childSerialize = opts.browser.serialize === true
? Object.keys(childSerializers)
: serialize
delete bindings.serializers
applySerializers([bindings], childSerialize, childSerializers, this._stdErrSerialize)
}
function Child (parent) {
this._childLevel = (parent._childLevel | 0) + 1
// make sure bindings are available in the `set` function
this.bindings = bindings
if (childSerializers) {
this.serializers = childSerializers
this._serialize = childSerialize
}
if (transmit) {
this._logEvent = createLogEventShape(
[].concat(parent._logEvent.bindings, bindings)
)
}
}
Child.prototype = this
const newLogger = new Child(this)
// must happen before the level is assigned
appendChildLogger(this, newLogger)
newLogger.child = function (...args) { return child.call(this, setOpts, ...args) }
// required to actually initialize the logger functions for any given child
newLogger.level = childOptions.level || this.level // allow level to be set by childOptions
setOpts.onChild(newLogger)
return newLogger
}
return logger
}
function getLevels (opts) {
const customLevels = opts.customLevels || {}
const values = Object.assign({}, pino.levels.values, customLevels)
const labels = Object.assign({}, pino.levels.labels, invertObject(customLevels))
return {
values,
labels
}
}
function invertObject (obj) {
const inverted = {}
Object.keys(obj).forEach(function (key) {
inverted[obj[key]] = key
})
return inverted
}
pino.levels = {
values: {
fatal: 60,
error: 50,
warn: 40,
info: 30,
debug: 20,
trace: 10
},
labels: {
10: 'trace',
20: 'debug',
30: 'info',
40: 'warn',
50: 'error',
60: 'fatal'
}
}
pino.stdSerializers = stdSerializers
pino.stdTimeFunctions = Object.assign({}, { nullTime, epochTime, unixTime, isoTime })
function getBindingChain (logger) {
const bindings = []
if (logger.bindings) {
bindings.push(logger.bindings)
}
// traverse up the tree to get all bindings
let hierarchy = logger[hierarchySymbol]
while (hierarchy.parent) {
hierarchy = hierarchy.parent
if (hierarchy.logger.bindings) {
bindings.push(hierarchy.logger.bindings)
}
}
return bindings.reverse()
}
function set (self, opts, rootLogger, level) {
// override the current log functions with either `noop` or the base log function
Object.defineProperty(self, level, {
value: (levelToValue(self.level, rootLogger) > levelToValue(level, rootLogger)
? noop
: rootLogger[baseLogFunctionSymbol][level]),
writable: true,
enumerable: true,
configurable: true
})
if (self[level] === noop) {
if (!opts.transmit) return
const transmitLevel = opts.transmit.level || self.level
const transmitValue = levelToValue(transmitLevel, rootLogger)
const methodValue = levelToValue(level, rootLogger)
if (methodValue < transmitValue) return
}
// make sure the log format is correct
self[level] = createWrap(self, opts, rootLogger, level)
// prepend bindings if it is not the root logger
const bindings = getBindingChain(self)
if (bindings.length === 0) {
// early exit in case for rootLogger
return
}
self[level] = prependBindingsInArguments(bindings, self[level])
}
function prependBindingsInArguments (bindings, logFunc) {
return function () {
return logFunc.apply(this, [...bindings, ...arguments])
}
}
function createWrap (self, opts, rootLogger, level) {
return (function (write) {
return function LOG () {
const ts = opts.timestamp()
const args = new Array(arguments.length)
const proto = (Object.getPrototypeOf && Object.getPrototypeOf(this) === _console) ? _console : this
for (var i = 0; i < args.length; i++) args[i] = arguments[i]
var argsIsSerialized = false
if (opts.serialize) {
applySerializers(args, this._serialize, this.serializers, this._stdErrSerialize)
argsIsSerialized = true
}
if (opts.asObject || opts.formatters) {
write.call(proto, ...asObject(this, level, args, ts, opts))
} else write.apply(proto, args)
if (opts.transmit) {
const transmitLevel = opts.transmit.level || self._level
const transmitValue = levelToValue(transmitLevel, rootLogger)
const methodValue = levelToValue(level, rootLogger)
if (methodValue < transmitValue) return
transmit(this, {
ts,
methodLevel: level,
methodValue,
transmitLevel,
transmitValue: rootLogger.levels.values[opts.transmit.level || self._level],
send: opts.transmit.send,
val: levelToValue(self._level, rootLogger)
}, args, argsIsSerialized)
}
}
})(self[baseLogFunctionSymbol][level])
}
function asObject (logger, level, args, ts, opts) {
const {
level: levelFormatter,
log: logObjectFormatter = (obj) => obj
} = opts.formatters || {}
const argsCloned = args.slice()
let msg = argsCloned[0]
const logObject = {}
let lvl = (logger._childLevel | 0) + 1
if (lvl < 1) lvl = 1
if (ts) {
logObject.time = ts
}
if (levelFormatter) {
const formattedLevel = levelFormatter(level, logger.levels.values[level])
Object.assign(logObject, formattedLevel)
} else {
logObject.level = logger.levels.values[level]
}
if (opts.asObjectBindingsOnly) {
if (msg !== null && typeof msg === 'object') {
while (lvl-- && typeof argsCloned[0] === 'object') {
Object.assign(logObject, argsCloned.shift())
}
}
const formattedLogObject = logObjectFormatter(logObject)
return [formattedLogObject, ...argsCloned]
} else {
// deliberate, catching objects, arrays
if (msg !== null && typeof msg === 'object') {
while (lvl-- && typeof argsCloned[0] === 'object') {
Object.assign(logObject, argsCloned.shift())
}
msg = argsCloned.length ? format(argsCloned.shift(), argsCloned) : undefined
} else if (typeof msg === 'string') msg = format(argsCloned.shift(), argsCloned)
if (msg !== undefined) logObject[opts.messageKey] = msg
const formattedLogObject = logObjectFormatter(logObject)
return [formattedLogObject]
}
}
function applySerializers (args, serialize, serializers, stdErrSerialize) {
for (const i in args) {
if (stdErrSerialize && args[i] instanceof Error) {
args[i] = pino.stdSerializers.err(args[i])
} else if (typeof args[i] === 'object' && !Array.isArray(args[i]) && serialize) {
for (const k in args[i]) {
if (serialize.indexOf(k) > -1 && k in serializers) {
args[i][k] = serializers[k](args[i][k])
}
}
}
}
}
function transmit (logger, opts, args, argsIsSerialized = false) {
const send = opts.send
const ts = opts.ts
const methodLevel = opts.methodLevel
const methodValue = opts.methodValue
const val = opts.val
const bindings = logger._logEvent.bindings
if (!argsIsSerialized) {
applySerializers(
args,
logger._serialize || Object.keys(logger.serializers),
logger.serializers,
logger._stdErrSerialize === undefined ? true : logger._stdErrSerialize
)
}
logger._logEvent.ts = ts
logger._logEvent.messages = args.filter(function (arg) {
// bindings can only be objects, so reference equality check via indexOf is fine
return bindings.indexOf(arg) === -1
})
logger._logEvent.level.label = methodLevel
logger._logEvent.level.value = methodValue
send(methodLevel, logger._logEvent, val)
logger._logEvent = createLogEventShape(bindings)
}
function createLogEventShape (bindings) {
return {
ts: 0,
messages: [],
bindings: bindings || [],
level: { label: '', value: 0 }
}
}
function asErrValue (err) {
const obj = {
type: err.constructor.name,
msg: err.message,
stack: err.stack
}
for (const key in err) {
if (obj[key] === undefined) {
obj[key] = err[key]
}
}
return obj
}
function getTimeFunction (opts) {
if (typeof opts.timestamp === 'function') {
return opts.timestamp
}
if (opts.timestamp === false) {
return nullTime
}
return epochTime
}
function mock () { return {} }
function passthrough (a) { return a }
function noop () {}
function nullTime () { return false }
function epochTime () { return Date.now() }
function unixTime () { return Math.round(Date.now() / 1000.0) }
function isoTime () { return new Date(Date.now()).toISOString() } // using Date.now() for testability
/* eslint-disable */
/* istanbul ignore next */
function pfGlobalThisOrFallback () {
function defd (o) { return typeof o !== 'undefined' && o }
try {
if (typeof globalThis !== 'undefined') return globalThis
Object.defineProperty(Object.prototype, 'globalThis', {
get: function () {
delete Object.prototype.globalThis
return (this.globalThis = this)
},
configurable: true
})
return globalThis
} catch (e) {
return defd(self) || defd(window) || defd(this) || {}
}
}
/* eslint-enable */
module.exports.default = pino
module.exports.pino = pino

25
node_modules/pino/build/sync-version.js generated vendored Normal file
View File

@@ -0,0 +1,25 @@
'use strict'
const fs = require('node:fs')
const path = require('node:path')
let { version } = require('../package.json')
let passedVersion = process.argv[2]
if (passedVersion) {
passedVersion = passedVersion.trim().replace(/^v/, '')
if (version !== passedVersion) {
console.log(`Syncing version from ${version} to ${passedVersion}`)
version = passedVersion
const packageJson = require('../package.json')
packageJson.version = version
fs.writeFileSync(path.resolve('./package.json'), JSON.stringify(packageJson, null, 2) + '\n', { encoding: 'utf-8' })
}
}
const metaContent = `'use strict'
module.exports = { version: '${version}' }
`
fs.writeFileSync(path.resolve('./lib/meta.js'), metaContent, { encoding: 'utf-8' })

1509
node_modules/pino/docs/api.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

40
node_modules/pino/docs/asynchronous.md generated vendored Normal file
View File

@@ -0,0 +1,40 @@
# Asynchronous Logging
Asynchronous logging enables the minimum overhead of Pino.
Asynchronous logging works by buffering log messages and writing them in larger chunks.
```js
const pino = require('pino')
const logger = pino(pino.destination({
dest: './my-file', // omit for stdout
minLength: 4096, // Buffer before writing
sync: false // Asynchronous logging
}))
```
It's always possible to turn on synchronous logging by passing `sync: true`.
In this mode of operation, log messages are directly written to the
output stream as the messages are generated with a _blocking_ operation.
* See [`pino.destination`](/docs/api.md#pino-destination)
* `pino.destination` is implemented on [`sonic-boom` ⇗](https://github.com/mcollina/sonic-boom).
### AWS Lambda
Asynchronous logging is disabled by default on AWS Lambda or any other environment
that modifies `process.stdout`. If forcefully turned on, we recommend calling `dest.flushSync()` at the end
of each function execution to avoid losing data.
## Caveats
Asynchronous logging has a couple of important caveats:
* As opposed to the synchronous mode, there is not a one-to-one relationship between
calls to logging methods (e.g. `logger.info`) and writes to a log file
* There is a possibility of the most recently buffered log messages being lost
in case of a system failure, e.g. a power cut.
See also:
* [`pino.destination` API](/docs/api.md#pino-destination)
* [`destination` parameter](/docs/api.md#destination)

55
node_modules/pino/docs/benchmarks.md generated vendored Normal file
View File

@@ -0,0 +1,55 @@
# Benchmarks
`pino.info('hello world')`:
```
BASIC benchmark averages
Bunyan average: 377.434ms
Winston average: 270.249ms
Bole average: 172.690ms
Debug average: 220.527ms
LogLevel average: 222.802ms
Pino average: 114.801ms
PinoMinLength average: 70.968ms
PinoNodeStream average: 159.192ms
```
`pino.info({'hello': 'world'})`:
```
OBJECT benchmark averages
BunyanObj average: 410.379ms
WinstonObj average: 273.120ms
BoleObj average: 185.069ms
LogLevelObject average: 433.425ms
PinoObj average: 119.315ms
PinoMinLengthObj average: 76.968ms
PinoNodeStreamObj average: 164.268ms
```
`pino.info(aBigDeeplyNestedObject)`:
```
DEEP-OBJECT benchmark averages
BunyanDeepObj average: 1.839ms
WinstonDeepObj average: 5.604ms
BoleDeepObj average: 3.422ms
LogLevelDeepObj average: 11.716ms
PinoDeepObj average: 2.256ms
PinoMinLengthDeepObj average: 2.240ms
PinoNodeStreamDeepObj average: 2.595ms
```
`pino.info('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'})`:
For a fair comparison, [LogLevel](http://npm.im/loglevel) was extended
to include a timestamp and [bole](http://npm.im/bole) had
`fastTime` mode switched on.

242
node_modules/pino/docs/browser.md generated vendored Normal file
View File

@@ -0,0 +1,242 @@
# Browser API
Pino is compatible with [`browserify`](https://npm.im/browserify) for browser-side usage:
This can be useful with isomorphic/universal JavaScript code.
By default, in the browser,
`pino` uses corresponding [Log4j](https://en.wikipedia.org/wiki/Log4j) `console` methods (`console.error`, `console.warn`, `console.info`, `console.debug`, `console.trace`) and uses `console.error` for any `fatal` level logs.
## Options
Pino can be passed a `browser` object in the options object,
which can have the following properties:
### `asObject` (Boolean)
```js
const pino = require('pino')({browser: {asObject: true}})
```
The `asObject` option will create a pino-like log object instead of
passing all arguments to a console method, for instance:
```js
pino.info('hi') // creates and logs {msg: 'hi', level: 30, time: <ts>}
```
When `write` is set, `asObject` will always be `true`.
### `asObjectBindingsOnly` (Boolean)
```js
const pino = require('pino')({browser: {asObjectBindingsOnly: true}})
```
The `asObjectBindingsOnly` option is similar to `asObject` but will keep the message
and arguments unformatted. This allows to defer formatting the message to the
actual call to `console` methods, where browsers then have richer formatting in
their devtools than when pino will format the message to a string first.
```js
pino.info('hello %s', 'world') // creates and logs {level: 30, time: <ts>}, 'hello %s', 'world'
```
### `formatters` (Object)
An object containing functions for formatting the shape of the log lines. When provided, it enables the logger to produce a pino-like log object with customized formatting. Currently, it supports formatting for the `level` object only.
##### `level`
Changes the shape of the log level. The default shape is `{ level: number }`.
The function takes two arguments, the label of the level (e.g. `'info'`)
and the numeric value (e.g. `30`).
```js
const formatters = {
level (label, number) {
return { level: number }
}
}
```
### `write` (Function | Object)
Instead of passing log messages to `console.log` they can be passed to
a supplied function.
If `write` is set to a single function, all logging objects are passed
to this function.
```js
const pino = require('pino')({
browser: {
write: (o) => {
// do something with o
}
}
})
```
If `write` is an object, it can have methods that correspond to the
levels. When a message is logged at a given level, the corresponding
method is called. If a method isn't present, the logging falls back
to using the `console`.
```js
const pino = require('pino')({
browser: {
write: {
info: function (o) {
//process info log object
},
error: function (o) {
//process error log object
}
}
}
})
```
### `serialize`: (Boolean | Array)
The serializers provided to `pino` are ignored by default in the browser, including
the standard serializers provided with Pino. Since the default destination for log
messages is the console, values such as `Error` objects are enhanced for inspection,
which they otherwise wouldn't be if the Error serializer was enabled.
We can turn all serializers on,
```js
const pino = require('pino')({
browser: {
serialize: true
}
})
```
Or we can selectively enable them via an array:
```js
const pino = require('pino')({
serializers: {
custom: myCustomSerializer,
another: anotherSerializer
},
browser: {
serialize: ['custom']
}
})
// following will apply myCustomSerializer to the custom property,
// but will not apply anotherSerializer to another key
pino.info({custom: 'a', another: 'b'})
```
When `serialize` is `true` the standard error serializer is also enabled (see https://github.com/pinojs/pino/blob/master/docs/api.md#stdSerializers).
This is a global serializer, which will apply to any `Error` objects passed to the logger methods.
If `serialize` is an array the standard error serializer is also automatically enabled, it can
be explicitly disabled by including a string in the serialize array: `!stdSerializers.err`, like so:
```js
const pino = require('pino')({
serializers: {
custom: myCustomSerializer,
another: anotherSerializer
},
browser: {
serialize: ['!stdSerializers.err', 'custom'] //will not serialize Errors, will serialize `custom` keys
}
})
```
The `serialize` array also applies to any child logger serializers (see https://github.com/pinojs/pino/blob/master/docs/api.md#discussion-2
for how to set child-bound serializers).
Unlike server pino the serializers apply to every object passed to the logger method,
if the `asObject` option is `true`, this results in the serializers applying to the
first object (as in server pino).
For more info on serializers see https://github.com/pinojs/pino/blob/master/docs/api.md#mergingobject.
### `transmit` (Object)
An object with `send` and `level` properties.
The `transmit.level` property specifies the minimum level (inclusive) of when the `send` function
should be called, if not supplied the `send` function be called based on the main logging `level`
(set via `options.level`, defaulting to `info`).
The `transmit` object must have a `send` function which will be called after
writing the log message. The `send` function is passed the level of the log
message and a `logEvent` object.
The `logEvent` object is a data structure representing a log message, it represents
the arguments passed to a logger statement, the level
at which they were logged, and the hierarchy of child bindings.
The `logEvent` format is structured like so:
```js
{
ts = Number,
messages = Array,
bindings = Array,
level: { label = String, value = Number}
}
```
The `ts` property is a Unix epoch timestamp in milliseconds, the time is taken from the moment the
logger method is called.
The `messages` array is all arguments passed to logger method, (for instance `logger.info('a', 'b', 'c')`
would result in `messages` array `['a', 'b', 'c']`).
The `bindings` array represents each child logger (if any), and the relevant bindings.
For instance, given `logger.child({a: 1}).child({b: 2}).info({c: 3})`, the bindings array
would hold `[{a: 1}, {b: 2}]` and the `messages` array would be `[{c: 3}]`. The `bindings`
are ordered according to their position in the child logger hierarchy, with the lowest index
being the top of the hierarchy.
By default, serializers are not applied to log output in the browser, but they will *always* be
applied to `messages` and `bindings` in the `logEvent` object. This allows us to ensure a consistent
format for all values between server and client.
The `level` holds the label (for instance `info`), and the corresponding numerical value
(for instance `30`). This could be important in cases where client-side level values and
labels differ from server-side.
The point of the `send` function is to remotely record log messages:
```js
const pino = require('pino')({
browser: {
transmit: {
level: 'warn',
send: function (level, logEvent) {
if (level === 'warn') {
// maybe send the logEvent to a separate endpoint
// or maybe analyze the messages further before sending
}
// we could also use the `logEvent.level.value` property to determine
// numerical value
if (logEvent.level.value >= 50) { // covers error and fatal
// send the logEvent somewhere
}
}
}
}
})
```
### `disabled` (Boolean)
```js
const pino = require('pino')({browser: {disabled: true}})
```
The `disabled` option will disable logging in browser if set
to `true`, by default it is set to `false`.

40
node_modules/pino/docs/bundling.md generated vendored Normal file
View File

@@ -0,0 +1,40 @@
# Bundling
Due to its internal architecture based on Worker Threads, it is not possible to bundle Pino *without* generating additional files.
In particular, a bundler must ensure that the following files are also bundled separately:
* `lib/worker.js` from the `thread-stream` dependency
* `file.js`
* `lib/worker.js`
* Any transport used by the user (like `pino-pretty`)
Once the files above have been generated, the bundler must also add information about the files above by injecting a code that sets `__bundlerPathsOverrides` in the `globalThis` object.
The variable is an object whose keys are an identifier for the files and the values are the paths of files relative to the currently bundle files.
Example:
```javascript
// Inject this using your bundle plugin
globalThis.__bundlerPathsOverrides = {
'thread-stream-worker': pinoWebpackAbsolutePath('./thread-stream-worker.js')
'pino/file': pinoWebpackAbsolutePath('./pino-file.js'),
'pino-worker': pinoWebpackAbsolutePath('./pino-worker.js'),
'pino-pretty': pinoWebpackAbsolutePath('./pino-pretty.js'),
};
```
Note that `pino/file`, `pino-worker` and `thread-stream-worker` are required identifiers. Other identifiers are possible based on the user configuration.
## Webpack Plugin
If you are a Webpack user, you can achieve this with [pino-webpack-plugin](https://github.com/pinojs/pino-webpack-plugin) without manual configuration of `__bundlerPathsOverrides`; however, you still need to configure it manually if you are using other bundlers.
## Esbuild Plugin
[esbuild-plugin-pino](https://github.com/davipon/esbuild-plugin-pino) is the esbuild plugin to generate extra pino files for bundling.
## Bun Plugin
[bun-plugin-pino](https://github.com/vktrl/bun-plugin-pino) is the Bun plugin to generate extra pino files for bundling.

95
node_modules/pino/docs/child-loggers.md generated vendored Normal file
View File

@@ -0,0 +1,95 @@
# Child loggers
Let's assume we want to have `"module":"foo"` added to every log within a
module `foo.js`.
To accomplish this, simply use a child logger:
```js
'use strict'
// imports a pino logger instance of `require('pino')()`
const parentLogger = require('./lib/logger')
const log = parentLogger.child({module: 'foo'})
function doSomething () {
log.info('doSomething invoked')
}
module.exports = {
doSomething
}
```
## Cost of child logging
Child logger creation is fast:
```
benchBunyanCreation*10000: 564.514ms
benchBoleCreation*10000: 283.276ms
benchPinoCreation*10000: 258.745ms
benchPinoExtremeCreation*10000: 150.506ms
```
Logging through a child logger has little performance penalty:
```
benchBunyanChild*10000: 556.275ms
benchBoleChild*10000: 288.124ms
benchPinoChild*10000: 231.695ms
benchPinoExtremeChild*10000: 122.117ms
```
Logging via the child logger of a child logger also has negligible overhead:
```
benchBunyanChildChild*10000: 559.082ms
benchPinoChildChild*10000: 229.264ms
benchPinoExtremeChildChild*10000: 127.753ms
```
## Duplicate keys caveat
Naming conflicts can arise between child loggers and
children of child loggers.
This isn't as bad as it sounds, even if the same keys between
parent and child loggers are used, Pino resolves the conflict in the sanest way.
For example, consider the following:
```js
const pino = require('pino')
pino(pino.destination('./my-log'))
.child({a: 'property'})
.child({a: 'prop'})
.info('howdy')
```
```sh
$ cat my-log
{"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":1459534114473,"a":"property","a":"prop"}
```
Notice how there are two keys named `a` in the JSON output. The sub-child's properties
appear after the parent child properties.
At some point, the logs will most likely be processed (for instance with a [transport](transports.md)),
and this generally involves parsing. `JSON.parse` will return an object where the conflicting
namespace holds the final value assigned to it:
```sh
$ cat my-log | node -e "process.stdin.once('data', (line) => console.log(JSON.stringify(JSON.parse(line))))"
{"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":"2016-04-01T18:08:34.473Z","a":"prop"}
```
Ultimately the conflict is resolved by taking the last value, which aligns with Bunyan's child logging
behavior.
There may be cases where this edge case becomes problematic if a JSON parser with alternative behavior
is used to process the logs. It's recommended to be conscious of namespace conflicts with child loggers,
in light of an expected log processing approach.
One of Pino's performance tricks is to avoid building objects and stringifying
them, so we're building strings instead. This is why duplicate keys between
parents and children will end up in the log output.

16
node_modules/pino/docs/diagnostics.md generated vendored Normal file
View File

@@ -0,0 +1,16 @@
# Diagnostics
Pino provides [tracing channel](tc) events that allow insight into the
internal workings of the library. The currently supported events are:
+ `tracing:pino_asJson:start`: emitted when the final serialization process
of logs is started. The emitted event payload has the following fields:
- `instance`: the Pino instance associated with the function
- `arguments`: the arguments passed to the function
+ `tracing:pino_asJson:end`: emitted at the end of the final serialization
process. The emitted event payload has the following fields:
- `instance`: the Pino instance associated with the function
- `arguments`: the arguments passed to the function
- `result`: the finalized, newline delimited, log line as a string
[tc]: https://nodejs.org/docs/latest/api/diagnostics_channel.html#tracingchannel-channels

85
node_modules/pino/docs/ecosystem.md generated vendored Normal file
View File

@@ -0,0 +1,85 @@
# Pino Ecosystem
This is a list of ecosystem modules that integrate with `pino`.
Modules listed under [Core](#core) are maintained by the Pino team. Modules
listed under [Community](#community) are maintained by independent community
members.
Please send a PR to add new modules!
<a id="core"></a>
## Core
### Frameworks
+ [`express-pino-logger`](https://github.com/pinojs/express-pino-logger): use
Pino to log requests within [express](https://expressjs.com/).
+ [`koa-pino-logger`](https://github.com/pinojs/koa-pino-logger): use Pino to
log requests within [Koa](https://koajs.com/).
+ [`restify-pino-logger`](https://github.com/pinojs/restify-pino-logger): use
Pino to log requests within [restify](http://restify.com/).
+ [`rill-pino-logger`](https://github.com/pinojs/rill-pino-logger): use Pino as
the logger for the [Rill framework](https://rill.site/).
### Utilities
+ [`pino-arborsculpture`](https://github.com/pinojs/pino-arborsculpture): change
log levels at runtime.
+ [`pino-caller`](https://github.com/pinojs/pino-caller): add callsite to the log line.
+ [`pino-clf`](https://github.com/pinojs/pino-clf): reformat Pino logs into
Common Log Format.
+ [`pino-console`](https://github.com/pinojs/pino-console): adapter for the [WHATWG Console](https://console.spec.whatwg.org/) spec.
+ [`pino-debug`](https://github.com/pinojs/pino-debug): use Pino to interpret
[`debug`](https://npm.im/debug) logs.
+ [`pino-elasticsearch`](https://github.com/pinojs/pino-elasticsearch): send
Pino logs to an Elasticsearch instance.
+ [`pino-eventhub`](https://github.com/pinojs/pino-eventhub): send Pino logs
to an [Event Hub](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-what-is-event-hubs).
+ [`pino-filter`](https://github.com/pinojs/pino-filter): filter Pino logs in
the same fashion as the [`debug`](https://npm.im/debug) module.
+ [`pino-gelf`](https://github.com/pinojs/pino-gelf): reformat Pino logs into
GELF format for Graylog.
+ [`pino-hapi`](https://github.com/pinojs/hapi-pino): use Pino as the logger
for [Hapi](https://hapijs.com/).
+ [`pino-http`](https://github.com/pinojs/pino-http): easily use Pino to log
requests with the core `http` module.
+ [`pino-http-print`](https://github.com/pinojs/pino-http-print): reformat Pino
logs into traditional [HTTPD](https://httpd.apache.org/) style request logs.
+ [`pino-mongodb`](https://github.com/pinojs/pino-mongodb): store Pino logs
in a MongoDB database.
+ [`pino-multi-stream`](https://github.com/pinojs/pino-multi-stream): send
logs to multiple destination streams (slow!).
+ [`pino-noir`](https://github.com/pinojs/pino-noir): redact sensitive information
in logs.
+ [`pino-pretty`](https://github.com/pinojs/pino-pretty): basic prettifier to
make log lines human-readable.
+ [`pino-socket`](https://github.com/pinojs/pino-socket): send logs to TCP or UDP
destinations.
+ [`pino-std-serializers`](https://github.com/pinojs/pino-std-serializers): the
core object serializers used within Pino.
+ [`pino-syslog`](https://github.com/pinojs/pino-syslog): reformat Pino logs
to standard syslog format.
+ [`pino-tee`](https://github.com/pinojs/pino-tee): pipe Pino logs into files
based upon log levels.
+ [`pino-test`](https://github.com/pinojs/pino-test): a set of utilities for
verifying logs generated by the Pino logger.
+ [`pino-toke`](https://github.com/pinojs/pino-toke): reformat Pino logs
according to a given format string.
<a id="community"></a>
## Community
+ [`@google-cloud/pino-logging-gcp-config`](https://www.npmjs.com/package/@google-cloud/pino-logging-gcp-config): Config helper and formatter to output [Google Cloud Platform Structured Logging](https://cloud.google.com/logging/docs/structured-logging)
+ [`@newrelic/pino-enricher`](https://github.com/newrelic/newrelic-node-log-extensions/blob/main/packages/pino-log-enricher): a log customization to add New Relic context to use [Logs In Context](https://docs.newrelic.com/docs/logs/logs-context/logs-in-context/)
+ [`cloud-pine`](https://github.com/metcoder95/cloud-pine): transport that provides abstraction and compatibility with [`@google-cloud/logging`](https://www.npmjs.com/package/@google-cloud/logging).
+ [`cls-proxify`](https://github.com/keenondrums/cls-proxify): integration of pino and [CLS](https://github.com/jeff-lewis/cls-hooked). Useful for creating dynamically configured child loggers (e.g. with added trace ID) for each request.
+ [`crawlee-pino`](https://github.com/imyelo/crawlee-pino): use Pino to log within Crawlee
+ [`pino-colada`](https://github.com/lrlna/pino-colada): cute ndjson formatter for pino.
+ [`pino-dev`](https://github.com/dnjstrom/pino-dev): simple prettifier for pino with built-in support for common ecosystem packages.
+ [`pino-fluentd`](https://github.com/davidedantonio/pino-fluentd): send Pino logs to Elasticsearch,
MongoDB, and many [others](https://www.fluentd.org/dataoutputs) via Fluentd.
+ [`pino-lambda`](https://github.com/FormidableLabs/pino-lambda): log transport for cloudwatch support inside aws-lambda
+ [`pino-pretty-min`](https://github.com/unjello/pino-pretty-min): a minimal
prettifier inspired by the [logrus](https://github.com/sirupsen/logrus) logger.
+ [`pino-rotating-file`](https://github.com/homeaway/pino-rotating-file): a hapi-pino log transport for splitting logs into separate, automatically rotating files.
+ [`pino-tiny`](https://github.com/holmok/pino-tiny): a tiny (and extensible?) little log formatter for pino.

345
node_modules/pino/docs/help.md generated vendored Normal file
View File

@@ -0,0 +1,345 @@
# Help
* [Log rotation](#rotate)
* [Reopening log files](#reopening)
* [Saving to multiple files](#multiple)
* [Log filtering](#filter-logs)
* [Transports and systemd](#transport-systemd)
* [Log to different streams](#multi-stream)
* [Duplicate keys](#dupe-keys)
* [Log levels as labels instead of numbers](#level-string)
* [Pino with `debug`](#debug)
* [Unicode and Windows terminal](#windows)
* [Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels](#stackdriver)
* [Using Grafana Loki to evaluate pino logs in a kubernetes cluster](#grafana-loki)
* [Avoid Message Conflict](#avoid-message-conflict)
* [Best performance for logging to `stdout`](#best-performance-for-stdout)
* [Testing](#testing)
<a id="rotate"></a>
## Log rotation
Use a separate tool for log rotation:
We recommend [logrotate](https://github.com/logrotate/logrotate).
Consider we output our logs to `/var/log/myapp.log` like so:
```
$ node server.js > /var/log/myapp.log
```
We would rotate our log files with logrotate, by adding the following to `/etc/logrotate.d/myapp`:
```
/var/log/myapp.log {
su root
daily
rotate 7
delaycompress
compress
notifempty
missingok
copytruncate
}
```
The `copytruncate` configuration has a very slight possibility of lost log lines due
to a gap between copying and truncating - the truncate may occur after additional lines
have been written. To perform log rotation without `copytruncate`, see the [Reopening log files](#reopening)
help.
<a id="reopening"></a>
## Reopening log files
In cases where a log rotation tool doesn't offer copy-truncate capabilities,
or where using them is deemed inappropriate, `pino.destination`
can reopen file paths after a file has been moved away.
One way to use this is to set up a `SIGUSR2` or `SIGHUP` signal handler that
reopens the log file destination, making sure to write the process PID out
somewhere so the log rotation tool knows where to send the signal.
```js
// write the process pid to a well known location for later
const fs = require('node:fs')
fs.writeFileSync('/var/run/myapp.pid', process.pid)
const dest = pino.destination('/log/file')
const logger = require('pino')(dest)
process.on('SIGHUP', () => dest.reopen())
```
The log rotation tool can then be configured to send this signal to the process
after a log rotation event has occurred.
Given a similar scenario as in the [Log rotation](#rotate) section a basic
`logrotate` config that aligns with this strategy would look similar to the following:
```
/var/log/myapp.log {
su root
daily
rotate 7
delaycompress
compress
notifempty
missingok
postrotate
kill -HUP `cat /var/run/myapp.pid`
endscript
}
```
<a id="multiple"></a>
## Saving to multiple files
See [`pino.multistream`](/docs/api.md#pino-multistream).
<a id="filter-logs"></a>
## Log Filtering
The Pino philosophy advocates common, preexisting, system utilities.
Some recommendations in line with this philosophy are:
1. Use [`grep`](https://linux.die.net/man/1/grep):
```sh
$ # View all "INFO" level logs
$ node app.js | grep '"level":30'
```
1. Use [`jq`](https://stedolan.github.io/jq/):
```sh
$ # View all "ERROR" level logs
$ node app.js | jq 'select(.level == 50)'
```
<a id="transport-systemd"></a>
## Transports and systemd
`systemd` makes it complicated to use pipes in services. One method for overcoming
this challenge is to use a subshell:
```
ExecStart=/bin/sh -c '/path/to/node app.js | pino-transport'
```
<a id="multi-stream"></a>
## Log to different streams
Pino's default log destination is the singular destination of `stdout`. While
not recommended for performance reasons, multiple destinations can be targeted
by using [`pino.multistream`](/docs/api.md#pino-multistream).
In this example, we use `stderr` for `error` level logs and `stdout` as default
for all other levels (e.g. `debug`, `info`, and `warn`).
```js
const pino = require('pino')
var streams = [
{level: 'debug', stream: process.stdout},
{level: 'error', stream: process.stderr},
{level: 'fatal', stream: process.stderr}
]
const logger = pino({
name: 'my-app',
level: 'debug', // must be the lowest level of all streams
}, pino.multistream(streams))
```
<a id="dupe-keys"></a>
## How Pino handles duplicate keys
Duplicate keys are possibly when a child logger logs an object with a key that
collides with a key in the child loggers bindings.
See the [child logger duplicate keys caveat](/docs/child-loggers.md#duplicate-keys-caveat)
for information on this is handled.
<a id="level-string"></a>
## Log levels as labels instead of numbers
Pino log lines are meant to be parsable. Thus, Pino's default mode of operation
is to print the level value instead of the string name.
However, you can use the [`formatters`](/docs/api.md#formatters-object) option
with a [`level`](/docs/api.md#level) function to print the string name instead of the level value :
```js
const pino = require('pino')
const log = pino({
formatters: {
level: (label) => {
return {
level: label
}
}
}
})
log.info('message')
// {"level":"info","time":1661632832200,"pid":18188,"hostname":"foo","msg":"message"}
```
Although it works, we recommend using one of these options instead if you are able:
1. If the only change desired is the name then a transport can be used. One such
transport is [`pino-text-level-transport`](https://npm.im/pino-text-level-transport).
1. Use a prettifier like [`pino-pretty`](https://npm.im/pino-pretty) to make
the logs human friendly.
<a id="debug"></a>
## Pino with `debug`
The popular [`debug`](https://npm.im/debug) is used in many modules across the ecosystem.
The [`pino-debug`](https://github.com/pinojs/pino-debug) module
can capture calls to `debug` loggers and run them
through `pino` instead. This results in a 10x (20x in asynchronous mode)
performance improvement - even though `pino-debug` is logging additional
data and wrapping it in JSON.
To quickly enable this install [`pino-debug`](https://github.com/pinojs/pino-debug)
and preload it with the `-r` flag, enabling any `debug` logs with the
`DEBUG` environment variable:
```sh
$ npm i pino-debug
$ DEBUG=* node -r pino-debug app.js
```
[`pino-debug`](https://github.com/pinojs/pino-debug) also offers fine-grain control to map specific `debug`
namespaces to `pino` log levels. See [`pino-debug`](https://github.com/pinojs/pino-debug)
for more.
<a id="windows"></a>
## Unicode and Windows terminal
Pino uses [sonic-boom](https://github.com/mcollina/sonic-boom) to speed
up logging. Internally, it uses [`fs.write`](https://nodejs.org/dist/latest-v10.x/docs/api/fs.html#fs_fs_write_fd_string_position_encoding_callback) to write log lines directly to a file
descriptor. On Windows, Unicode output is not handled properly in the
terminal (both `cmd.exe` and PowerShell), and as such the output could
be visualized incorrectly if the log lines include utf8 characters. It
is possible to configure the terminal to visualize those characters
correctly with the use of [`chcp`](https://ss64.com/nt/chcp.html) by
executing in the terminal `chcp 65001`. This is a known limitation of
Node.js.
<a id="stackdriver"></a>
## Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels
Google Cloud Logging uses `severity` levels instead of log levels. As a result, all logs may show as INFO
level logs while completely ignoring the level set in the pino log. Google Cloud Logging also prefers that
log data is present inside a `message` key instead of the default `msg` key that Pino uses. Use a technique
similar to the one below to retain log levels in Google Cloud Logging
```js
const pino = require('pino')
// https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#logseverity
const PinoLevelToSeverityLookup = {
trace: 'DEBUG',
debug: 'DEBUG',
info: 'INFO',
warn: 'WARNING',
error: 'ERROR',
fatal: 'CRITICAL',
};
const defaultPinoConf = {
messageKey: 'message',
formatters: {
level(label, number) {
return {
severity: PinoLevelToSeverityLookup[label] || PinoLevelToSeverityLookup['info'],
level: number,
}
}
},
}
module.exports = function createLogger(options) {
return pino(Object.assign({}, options, defaultPinoConf))
}
```
A library that configures Pino for
[Google Cloud Structured Logging](https://cloud.google.com/logging/docs/structured-logging)
is available at:
[@google-cloud/pino-logging-gcp-config](https://www.npmjs.com/package/@google-cloud/pino-logging-gcp-config)
This library has the following features:
+ Converts Pino log levels to Google Cloud Logging log levels, as above
+ Uses `message` instead of `msg` for the message key, as above
+ Adds a millisecond-granularity timestamp in the
[structure](https://cloud.google.com/logging/docs/agent/logging/configuration#timestamp-processing)
recognised by Google Cloud Logging eg: \
`"timestamp":{"seconds":1445470140,"nanos":123000000}`
+ Adds a sequential
[`insertId`](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#FIELDS.insert_id)
to ensure log messages with identical timestamps are ordered correctly.
+ Logs including an `Error` object have the
[`stack_trace`](https://cloud.google.com/error-reporting/docs/formatting-error-messages#log-error)
property set so that the error is forwarded to Google Cloud Error Reporting.
+ Includes a
[`ServiceContext`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/ServiceContext)
object in the logs for Google Cloud Error Reporting, auto detected from the
environment if not specified
+ Maps the OpenTelemetry properties `span_id`, `trace_id`, and `trace_flags`
to the equivalent Google Cloud Logging fields.
<a id="grafana-loki"></a>
## Using Grafana Loki to evaluate pino logs in a kubernetes cluster
To get pino logs into Grafana Loki there are two options:
1. **Push:** Use [pino-loki](https://github.com/Julien-R44/pino-loki) to send logs directly to Loki.
1. **Pull:** Configure Grafana Promtail to read and properly parse the logs before sending them to Loki.
Similar to Google Cloud logging, this involves remapping the log levels. See this [article](https://medium.com/@janpaepke/structured-logging-in-the-grafana-monitoring-stack-8aff0a5af2f5) for details.
<a id="avoid-message-conflict"></a>
## Avoid Message Conflict
As described in the [`message` documentation](/docs/api.md#message), when a log
is written like `log.info({ msg: 'a message' }, 'another message')` then the
final output JSON will have `"msg":"another message"` and the `'a message'`
string will be lost. To overcome this, the [`logMethod` hook](/docs/api.md#logmethod)
can be used:
```js
'use strict'
const log = require('pino')({
level: 'debug',
hooks: {
logMethod (inputArgs, method) {
if (inputArgs.length === 2 && inputArgs[0].msg) {
inputArgs[0].originalMsg = inputArgs[0].msg
}
return method.apply(this, inputArgs)
}
}
})
log.info('no original message')
log.info({ msg: 'mapped to originalMsg' }, 'a message')
// {"level":30,"time":1596313323106,"pid":63739,"hostname":"foo","msg":"no original message"}
// {"level":30,"time":1596313323107,"pid":63739,"hostname":"foo","msg":"a message","originalMsg":"mapped to originalMsg"}
```
<a id="best-performance-for-stdout"></a>
## Best performance for logging to `stdout`
The best performance for logging directly to stdout is _usually_ achieved by using the
default configuration:
```js
const log = require('pino')();
```
You should only have to configure custom transports or other settings
if you have broader logging requirements.
<a id="testing"></a>
## Testing
See [`pino-test`](https://github.com/pinojs/pino-test).

64
node_modules/pino/docs/lts.md generated vendored Normal file
View File

@@ -0,0 +1,64 @@
## Long Term Support
Pino's Long Term Support (LTS) is provided according to the schedule laid
out in this document:
1. Major releases, "X" release of [semantic versioning][semver] X.Y.Z release
versions, are supported for a minimum period of six months from their release
date. The release date of any specific version can be found at
[https://github.com/pinojs/pino/releases](https://github.com/pinojs/pino/releases).
1. Major releases will receive security updates for an additional six months
from the release of the next major release. After this period
we will still review and release security fixes as long as they are
provided by the community and they do not violate other constraints,
e.g. minimum supported Node.js version.
1. Major releases will be tested and verified against all Node.js
release lines that are supported by the
[Node.js LTS policy](https://github.com/nodejs/Release) within the
LTS period of that given Pino release line. This implies that only
the latest Node.js release of a given line is supported.
A "month" is defined as 30 consecutive days.
> ## Security Releases and Semver
>
> As a consequence of providing long-term support for major releases, there
> are occasions where we need to release breaking changes as a _minor_
> version release. Such changes will _always_ be noted in the
> [release notes](https://github.com/pinojs/pino/releases).
>
> To avoid automatically receiving breaking security updates it is possible to use
> the tilde (`~`) range qualifier. For example, to get patches for the 6.1
> release, and avoid automatically updating to the 6.1 release, specify
> the dependency as `"pino": "~6.1.x"`. This will leave your application vulnerable,
> so please use with caution.
[semver]: https://semver.org/
<a name="lts-schedule"></a>
### Schedule
| Version | Release Date | End Of LTS Date | Node.js |
| :------ | :----------- | :-------------- | :------------------- |
| 9.x | 2024-04-26 | TBD | 18, 20, 22 |
| 8.x | 2022-06-01 | 2024-10-26 | 14, 16, 18, 20 |
| 7.x | 2021-10-14 | 2023-06-01 | 12, 14, 16 |
| 6.x | 2020-03-07 | 2022-04-14 | 10, 12, 14, 16 |
<a name="supported-os"></a>
### CI tested operating systems
Pino uses GitHub Actions for CI testing, please refer to
[GitHub's documentation regarding workflow runners](https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources)
for further details on what the latest virtual environment is in relation to
the YAML workflow labels below:
| OS | YAML Workflow Label | Node.js |
|---------|------------------------|--------------|
| Linux | `ubuntu-latest` | 18, 20, 22 |
| Windows | `windows-latest` | 18, 20, 22 |
| MacOS | `macos-latest` | 18, 20, 22 |

35
node_modules/pino/docs/pretty.md generated vendored Normal file
View File

@@ -0,0 +1,35 @@
# Pretty Printing
By default, Pino log lines are newline delimited JSON (NDJSON). This is perfect
for production usage and long-term storage. It's not so great for development
environments. Thus, Pino logs can be prettified by using a Pino prettifier
module like [`pino-pretty`][pp]:
1. Install a prettifier module as a separate dependency, e.g. `npm install pino-pretty`.
2. Instantiate the logger with the `transport.target` option set to `'pino-pretty'`:
```js
const pino = require('pino')
const logger = pino({
transport: {
target: 'pino-pretty'
},
})
logger.info('hi')
```
3. The transport option can also have an options object containing `pino-pretty` options:
```js
const pino = require('pino')
const logger = pino({
transport: {
target: 'pino-pretty',
options: {
colorize: true
}
}
})
logger.info('hi')
```
[pp]: https://github.com/pinojs/pino-pretty

135
node_modules/pino/docs/redaction.md generated vendored Normal file
View File

@@ -0,0 +1,135 @@
# Redaction
> Redaction is not supported in the browser [#670](https://github.com/pinojs/pino/issues/670)
To redact sensitive information, supply paths to keys that hold sensitive data
using the `redact` option. Note that paths that contain hyphens need to use
brackets to access the hyphenated property:
```js
const logger = require('.')({
redact: ['key', 'path.to.key', 'stuff.thats[*].secret', 'path["with-hyphen"]']
})
logger.info({
key: 'will be redacted',
path: {
to: {key: 'sensitive', another: 'thing'}
},
stuff: {
thats: [
{secret: 'will be redacted', logme: 'will be logged'},
{secret: 'as will this', logme: 'as will this'}
]
}
})
```
This will output:
```JSON
{"level":30,"time":1527777350011,"pid":3186,"hostname":"Davids-MacBook-Pro-3.local","key":"[Redacted]","path":{"to":{"key":"[Redacted]","another":"thing"}},"stuff":{"thats":[{"secret":"[Redacted]","logme":"will be logged"},{"secret":"[Redacted]","logme":"as will this"}]}}
```
The `redact` option can take an array (as shown in the above example) or
an object. This allows control over *how* information is redacted.
For instance, setting the censor:
```js
const logger = require('.')({
redact: {
paths: ['key', 'path.to.key', 'stuff.thats[*].secret'],
censor: '**GDPR COMPLIANT**'
}
})
logger.info({
key: 'will be redacted',
path: {
to: {key: 'sensitive', another: 'thing'}
},
stuff: {
thats: [
{secret: 'will be redacted', logme: 'will be logged'},
{secret: 'as will this', logme: 'as will this'}
]
}
})
```
This will output:
```JSON
{"level":30,"time":1527778563934,"pid":3847,"hostname":"Davids-MacBook-Pro-3.local","key":"**GDPR COMPLIANT**","path":{"to":{"key":"**GDPR COMPLIANT**","another":"thing"}},"stuff":{"thats":[{"secret":"**GDPR COMPLIANT**","logme":"will be logged"},{"secret":"**GDPR COMPLIANT**","logme":"as will this"}]}}
```
The `redact.remove` option also allows for the key and value to be removed from output:
```js
const logger = require('.')({
redact: {
paths: ['key', 'path.to.key', 'stuff.thats[*].secret'],
remove: true
}
})
logger.info({
key: 'will be redacted',
path: {
to: {key: 'sensitive', another: 'thing'}
},
stuff: {
thats: [
{secret: 'will be redacted', logme: 'will be logged'},
{secret: 'as will this', logme: 'as will this'}
]
}
})
```
This will output
```JSON
{"level":30,"time":1527782356751,"pid":5758,"hostname":"Davids-MacBook-Pro-3.local","path":{"to":{"another":"thing"}},"stuff":{"thats":[{"logme":"will be logged"},{"logme":"as will this"}]}}
```
See [pino options in API](/docs/api.md#redact-array-object) for `redact` API details.
<a name="paths"></a>
## Path Syntax
The syntax for paths supplied to the `redact` option conform to the syntax in path lookups
in standard ECMAScript, with two additions:
* paths may start with bracket notation
* paths may contain the asterisk `*` to denote a wildcard
* paths are **case sensitive**
By way of example, the following are all valid paths:
* `a.b.c`
* `a["b-c"].d`
* `["a-b"].c`
* `a.b.*`
* `a[*].b`
## Overhead
Pino's redaction functionality is built on top of [`fast-redact`](https://github.com/davidmarkclements/fast-redact)
which adds about 2% overhead to `JSON.stringify` when using paths without wildcards.
When used with pino logger with a single redacted path, any overhead is within noise -
a way to deterministically measure its effect has not been found. This is because it is not a bottleneck.
However, wildcard redaction does carry a non-trivial cost relative to explicitly declaring the keys
(50% in a case where four keys are redacted across two objects). See
the [`fast-redact` benchmarks](https://github.com/davidmarkclements/fast-redact#benchmarks) for details.
## Safety
The `redact` option is intended as an initialization time configuration option.
Path strings must not originate from user input.
The `fast-redact` module uses a VM context to syntax check the paths, user input
should never be combined with such an approach. See the [`fast-redact` Caveat](https://github.com/davidmarkclements/fast-redact#caveat)
and the [`fast-redact` Approach](https://github.com/davidmarkclements/fast-redact#approach) for in-depth information.

1263
node_modules/pino/docs/transports.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

309
node_modules/pino/docs/web.md generated vendored Normal file
View File

@@ -0,0 +1,309 @@
# Web Frameworks
Since HTTP logging is a primary use case, Pino has first-class support for the Node.js
web framework ecosystem.
- [Web Frameworks](#web-frameworks)
- [Pino with Fastify](#pino-with-fastify)
- [Pino with Express](#pino-with-express)
- [Pino with Hapi](#pino-with-hapi)
- [Pino with Restify](#pino-with-restify)
- [Pino with Koa](#pino-with-koa)
- [Pino with Node core `http`](#pino-with-node-core-http)
- [Pino with Nest](#pino-with-nest)
- [Pino with H3](#pino-with-h3)
- [Pino with Hono](#pino-with-hono)
<a id="fastify"></a>
## Pino with Fastify
The Fastify web framework comes bundled with Pino by default, simply set Fastify's
`logger` option to `true` and use `request.log` or `reply.log` for log messages that correspond
to each request:
```js
const fastify = require('fastify')({
logger: true
})
fastify.get('/', async (request, reply) => {
request.log.info('something')
return { hello: 'world' }
})
fastify.listen({ port: 3000 }, (err) => {
if (err) {
fastify.log.error(err)
process.exit(1)
}
})
```
The `logger` option can also be set to an object, which will be passed through directly
as the [`pino` options object](/docs/api.md#options-object).
See the [fastify documentation](https://www.fastify.io/docs/latest/Reference/Logging/) for more information.
<a id="express"></a>
## Pino with Express
```sh
npm install pino-http
```
```js
const app = require('express')()
const pino = require('pino-http')()
app.use(pino)
app.get('/', function (req, res) {
req.log.info('something')
res.send('hello world')
})
app.listen(3000)
```
See the [pino-http README](https://npm.im/pino-http) for more info.
<a id="hapi"></a>
## Pino with Hapi
```sh
npm install hapi-pino
```
```js
'use strict'
const Hapi = require('@hapi/hapi')
const Pino = require('hapi-pino');
async function start () {
// Create a server with a host and port
const server = Hapi.server({
host: 'localhost',
port: 3000
})
// Add the route
server.route({
method: 'GET',
path: '/',
handler: async function (request, h) {
// request.log is HAPI's standard way of logging
request.log(['a', 'b'], 'Request into hello world')
// a pino instance can also be used, which will be faster
request.logger.info('In handler %s', request.path)
return 'hello world'
}
})
await server.register(Pino)
// also as a decorated API
server.logger.info('another way for accessing it')
// and through Hapi standard logging system
server.log(['subsystem'], 'third way for accessing it')
await server.start()
return server
}
start().catch((err) => {
console.log(err)
process.exit(1)
})
```
See the [hapi-pino README](https://npm.im/hapi-pino) for more info.
<a id="restify"></a>
## Pino with Restify
```sh
npm install restify-pino-logger
```
```js
const server = require('restify').createServer({name: 'server'})
const pino = require('restify-pino-logger')()
server.use(pino)
server.get('/', function (req, res) {
req.log.info('something')
res.send('hello world')
})
server.listen(3000)
```
See the [restify-pino-logger README](https://npm.im/restify-pino-logger) for more info.
<a id="koa"></a>
## Pino with Koa
```sh
npm install koa-pino-logger
```
```js
const Koa = require('koa')
const app = new Koa()
const pino = require('koa-pino-logger')()
app.use(pino)
app.use((ctx) => {
ctx.log.info('something else')
ctx.body = 'hello world'
})
app.listen(3000)
```
See the [koa-pino-logger README](https://github.com/pinojs/koa-pino-logger) for more info.
<a id="http"></a>
## Pino with Node core `http`
```sh
npm install pino-http
```
```js
const http = require('http')
const server = http.createServer(handle)
const logger = require('pino-http')()
function handle (req, res) {
logger(req, res)
req.log.info('something else')
res.end('hello world')
}
server.listen(3000)
```
See the [pino-http README](https://npm.im/pino-http) for more info.
<a id="nest"></a>
## Pino with Nest
```sh
npm install nestjs-pino
```
```ts
import { NestFactory } from '@nestjs/core'
import { Controller, Get, Module } from '@nestjs/common'
import { LoggerModule, Logger } from 'nestjs-pino'
@Controller()
export class AppController {
constructor(private readonly logger: Logger) {}
@Get()
getHello() {
this.logger.log('something')
return `Hello world`
}
}
@Module({
controllers: [AppController],
imports: [LoggerModule.forRoot()]
})
class MyModule {}
async function bootstrap() {
const app = await NestFactory.create(MyModule)
await app.listen(3000)
}
bootstrap()
```
See the [nestjs-pino README](https://npm.im/nestjs-pino) for more info.
<a id="h3"></a>
## Pino with H3
```sh
npm install pino-http h3
```
Save as `server.mjs`:
```js
import { createApp, createRouter, eventHandler, fromNodeMiddleware } from "h3";
import pino from 'pino-http'
export const app = createApp();
const router = createRouter();
app.use(router);
app.use(fromNodeMiddleware(pino()))
app.use(eventHandler((event) => {
event.node.req.log.info('something')
return 'hello world'
}))
router.get(
"/",
eventHandler((event) => {
return { path: event.path, message: "Hello World!" };
}),
);
```
Execute `npx --yes listhen -w --open ./server.mjs`.
See the [pino-http README](https://npm.im/pino-http) for more info.
<a id="hono"></a>
## Pino with Hono
```sh
npm install pino pino-http hono
```
```js
import { serve } from '@hono/node-server';
import { Hono } from 'hono';
import { requestId } from 'hono/request-id';
import { pinoHttp } from 'pino-http';
const app = new Hono();
app.use(requestId());
app.use(async (c, next) => {
// pass hono's request-id to pino-http
c.env.incoming.id = c.var.requestId;
// map express style middleware to hono
await new Promise((resolve) => pinoHttp()(c.env.incoming, c.env.outgoing, () => resolve()));
c.set('logger', c.env.incoming.log);
await next();
});
app.get('/', (c) => {
c.var.logger.info('something');
return c.text('Hello Node.js!');
});
serve(app);
```
See the [pino-http README](https://npm.im/pino-http) for more info.

26
node_modules/pino/docsify/sidebar.md generated vendored Normal file
View File

@@ -0,0 +1,26 @@
* [Readme](/)
* [API](/docs/api.md)
* [Browser API](/docs/browser.md)
* [Redaction](/docs/redaction.md)
* [Child Loggers](/docs/child-loggers.md)
* [Transports](/docs/transports.md)
* [Web Frameworks](/docs/web.md)
* [Pretty Printing](/docs/pretty.md)
* [Asynchronous Logging](/docs/asynchronous.md)
* [Ecosystem](/docs/ecosystem.md)
* [Benchmarks](/docs/benchmarks.md)
* [Long Term Support](/docs/lts.md)
* [Help](/docs/help.md)
* [Log rotation](/docs/help.md#rotate)
* [Reopening log files](/docs/help.md#reopening)
* [Saving to multiple files](/docs/help.md#multiple)
* [Log filtering](/docs/help.md#filter-logs)
* [Transports and systemd](/docs/help.md#transport-systemd)
* [Duplicate keys](/docs/help.md#dupe-keys)
* [Log levels as labels instead of numbers](/docs/help.md#level-string)
* [Pino with `debug`](/docs/help.md#debug)
* [Unicode and Windows terminal](/docs/help.md#windows)
* [Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels](/docs/help.md#stackdriver)
* [Avoid Message Conflict](/docs/help.md#avoid-message-conflict)
* [Best performance for logging to `stdout`](/docs/help.md#best-performance-for-stdout)
* [Testing](/docs/help.md#testing)

43
node_modules/pino/examples/basic.js generated vendored Normal file
View File

@@ -0,0 +1,43 @@
'use strict'
// Pino's primary usage writes ndjson to `stdout`:
const pino = require('..')()
// However, if "human readable" output is desired,
// `pino-pretty` can be provided as the destination
// stream by uncommenting the following line in place
// of the previous declaration:
// const pino = require('..')(require('pino-pretty')())
pino.info('hello world')
pino.error('this is at error level')
pino.info('the answer is %d', 42)
pino.info({ obj: 42 }, 'hello world')
pino.info({ obj: 42, b: 2 }, 'hello world')
pino.info({ nested: { obj: 42 } }, 'nested')
setImmediate(() => {
pino.info('after setImmediate')
})
pino.error(new Error('an error'))
const child = pino.child({ a: 'property' })
child.info('hello child!')
const childsChild = child.child({ another: 'property' })
childsChild.info('hello baby..')
pino.debug('this should be mute')
pino.level = 'trace'
pino.debug('this is a debug statement')
pino.child({ another: 'property' }).debug('this is a debug statement via child')
pino.trace('this is a trace statement')
pino.debug('this is a "debug" statement with "')
pino.info(new Error('kaboom'))
pino.info(null)
pino.info(new Error('kaboom'), 'with', 'a', 'message')

68
node_modules/pino/examples/transport.js generated vendored Normal file
View File

@@ -0,0 +1,68 @@
'use strict'
const pino = require('..')
const { tmpdir } = require('node:os')
const { join } = require('node:path')
const file = join(tmpdir(), `pino-${process.pid}-example`)
const transport = pino.transport({
targets: [{
level: 'warn',
target: 'pino/file',
options: {
destination: file
}
/*
}, {
level: 'info',
target: 'pino-elasticsearch',
options: {
node: 'http://localhost:9200'
}
*/
}, {
level: 'info',
target: 'pino-pretty'
}]
})
const logger = pino(transport)
logger.info({
file
}, 'logging destination')
logger.info('hello world')
logger.error('this is at error level')
logger.info('the answer is %d', 42)
logger.info({ obj: 42 }, 'hello world')
logger.info({ obj: 42, b: 2 }, 'hello world')
logger.info({ nested: { obj: 42 } }, 'nested')
logger.warn('WARNING!')
setImmediate(() => {
logger.info('after setImmediate')
})
logger.error(new Error('an error'))
const child = logger.child({ a: 'property' })
child.info('hello child!')
const childsChild = child.child({ another: 'property' })
childsChild.info('hello baby..')
logger.debug('this should be mute')
logger.level = 'trace'
logger.debug('this is a debug statement')
logger.child({ another: 'property' }).debug('this is a debug statement via child')
logger.trace('this is a trace statement')
logger.debug('this is a "debug" statement with "')
logger.info(new Error('kaboom'))
logger.info(null)
logger.info(new Error('kaboom'), 'with', 'a', 'message')

BIN
node_modules/pino/favicon-16x16.png generated vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 970 B

BIN
node_modules/pino/favicon-32x32.png generated vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
node_modules/pino/favicon.ico generated vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

12
node_modules/pino/file.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
'use strict'
const pino = require('./pino')
const { once } = require('node:events')
module.exports = async function (opts = {}) {
const destOpts = Object.assign({}, opts, { dest: opts.destination || 1, sync: false })
delete destOpts.destination
const destination = pino.destination(destOpts)
await once(destination, 'ready')
return destination
}

42
node_modules/pino/inc-version.sh generated vendored Normal file
View File

@@ -0,0 +1,42 @@
#!/bin/bash
set -e
PATH=./node_modules/.bin:${PATH}
CURRENT_VERSION=$(jq -r .version package.json)
case ${1} in
Major | MAJOR | major)
LEVEL=major
;;
Minor | MINOR | minor)
LEVEL=minor
;;
Patch | PATCH | patch)
LEVEL=patch
;;
*)
LEVEL=patch
;;
esac
NEW_VERSION=$(semver -i ${LEVEL} ${CURRENT_VERSION})
echo "${CURRENT_VERSION} => ${NEW_VERSION}"
read -n 1 -s -r -p "Press any key to continue (ctrl+c to abort)..."
echo ""
echo "Patching package.json..."
cat package.json | \
jq --arg vers "${NEW_VERSION}" '.version = $vers' | \
tee package.json 1>/dev/null
echo "Patching lib/meta.js ..."
SED_SCRIPT=$(printf 's/%s/%s/' ${CURRENT_VERSION//\./\\.} ${NEW_VERSION//\./\\.})
cat ./lib/meta.js | \
sed -e ${SED_SCRIPT} | \
tee ./lib/meta.js 1>/dev/null
echo "Done."

55
node_modules/pino/index.html generated vendored Normal file
View File

@@ -0,0 +1,55 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Pino - Super fast, all natural JSON logger for Node.js</title>
<meta name="description" content="Super fast, all natural JSON logger for Node.js">
<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<link rel="stylesheet" href="//unpkg.com/docsify-themeable/dist/css/theme-simple.css">
<style>
:root {
--base-font-size: 16px;
--theme-color: rgb(104, 118, 52);
--link-color: rgb(104, 118, 52);
--link-color--hover: rgb(137, 152, 100);
--sidebar-name-margin: 0;
--sidebar-name-padding: 0;
--code-font-size: .9em;
}
.sidebar > h1 {
margin-bottom: -.75em;
margin-top: .75em;
}
.sidebar > h1 img {
height: 4em;
}
.markdown-section a code {
color: var(--link-color)!important;
}
.markdown-section code:not([class*="lang-"]):not([class*="language-"]) {
white-space: unset
}
</style>
<link rel="icon" type="image/png" sizes="32x32" href="favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="favicon-16x16.png">
</head>
<body>
<div id="app"></div>
</body>
<script>
window.$docsify = {
name: 'pino',
logo: './pino-tree.png',
loadSidebar: 'docsify/sidebar.md',
repo: 'https://github.com/pinojs/pino',
auto2top: true,
ga: 'UA-103155139-1'
}
</script>
<script src="//unpkg.com/docsify/lib/docsify.min.js"></script>
<script src="//unpkg.com/docsify/lib/plugins/search.min.js"></script>
<script src="//unpkg.com/docsify/lib/plugins/ga.min.js"></script>
<!-- To enable syntax highlighting on TypeScript codes: -->
<script src="//cdn.jsdelivr.net/npm/prismjs@1/components/prism-typescript.min.js"></script>
</html>

30
node_modules/pino/lib/caller.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
'use strict'
function noOpPrepareStackTrace (_, stack) {
return stack
}
module.exports = function getCallers () {
const originalPrepare = Error.prepareStackTrace
Error.prepareStackTrace = noOpPrepareStackTrace
const stack = new Error().stack
Error.prepareStackTrace = originalPrepare
if (!Array.isArray(stack)) {
return undefined
}
const entries = stack.slice(2)
const fileNames = []
for (const entry of entries) {
if (!entry) {
continue
}
fileNames.push(entry.getFileName())
}
return fileNames
}

28
node_modules/pino/lib/constants.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
/**
* Represents default log level values
*
* @enum {number}
*/
const DEFAULT_LEVELS = {
trace: 10,
debug: 20,
info: 30,
warn: 40,
error: 50,
fatal: 60
}
/**
* Represents sort order direction: `ascending` or `descending`
*
* @enum {string}
*/
const SORTING_ORDER = {
ASC: 'ASC',
DESC: 'DESC'
}
module.exports = {
DEFAULT_LEVELS,
SORTING_ORDER
}

8
node_modules/pino/lib/deprecations.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
'use strict'
const warning = require('process-warning')()
module.exports = warning
// const warnName = 'PinoWarning'
// warning.create(warnName, 'PINODEP010', 'A new deprecation')

241
node_modules/pino/lib/levels.js generated vendored Normal file
View File

@@ -0,0 +1,241 @@
'use strict'
/* eslint no-prototype-builtins: 0 */
const {
lsCacheSym,
levelValSym,
useOnlyCustomLevelsSym,
streamSym,
formattersSym,
hooksSym,
levelCompSym
} = require('./symbols')
const { noop, genLog } = require('./tools')
const { DEFAULT_LEVELS, SORTING_ORDER } = require('./constants')
const levelMethods = {
fatal: (hook) => {
const logFatal = genLog(DEFAULT_LEVELS.fatal, hook)
return function (...args) {
const stream = this[streamSym]
logFatal.call(this, ...args)
if (typeof stream.flushSync === 'function') {
try {
stream.flushSync()
} catch (e) {
// https://github.com/pinojs/pino/pull/740#discussion_r346788313
}
}
}
},
error: (hook) => genLog(DEFAULT_LEVELS.error, hook),
warn: (hook) => genLog(DEFAULT_LEVELS.warn, hook),
info: (hook) => genLog(DEFAULT_LEVELS.info, hook),
debug: (hook) => genLog(DEFAULT_LEVELS.debug, hook),
trace: (hook) => genLog(DEFAULT_LEVELS.trace, hook)
}
const nums = Object.keys(DEFAULT_LEVELS).reduce((o, k) => {
o[DEFAULT_LEVELS[k]] = k
return o
}, {})
const initialLsCache = Object.keys(nums).reduce((o, k) => {
o[k] = '{"level":' + Number(k)
return o
}, {})
function genLsCache (instance) {
const formatter = instance[formattersSym].level
const { labels } = instance.levels
const cache = {}
for (const label in labels) {
const level = formatter(labels[label], Number(label))
cache[label] = JSON.stringify(level).slice(0, -1)
}
instance[lsCacheSym] = cache
return instance
}
function isStandardLevel (level, useOnlyCustomLevels) {
if (useOnlyCustomLevels) {
return false
}
switch (level) {
case 'fatal':
case 'error':
case 'warn':
case 'info':
case 'debug':
case 'trace':
return true
default:
return false
}
}
function setLevel (level) {
const { labels, values } = this.levels
if (typeof level === 'number') {
if (labels[level] === undefined) throw Error('unknown level value' + level)
level = labels[level]
}
if (values[level] === undefined) throw Error('unknown level ' + level)
const preLevelVal = this[levelValSym]
const levelVal = this[levelValSym] = values[level]
const useOnlyCustomLevelsVal = this[useOnlyCustomLevelsSym]
const levelComparison = this[levelCompSym]
const hook = this[hooksSym].logMethod
for (const key in values) {
if (levelComparison(values[key], levelVal) === false) {
this[key] = noop
continue
}
this[key] = isStandardLevel(key, useOnlyCustomLevelsVal) ? levelMethods[key](hook) : genLog(values[key], hook)
}
this.emit(
'level-change',
level,
levelVal,
labels[preLevelVal],
preLevelVal,
this
)
}
function getLevel (level) {
const { levels, levelVal } = this
// protection against potential loss of Pino scope from serializers (edge case with circular refs - https://github.com/pinojs/pino/issues/833)
return (levels && levels.labels) ? levels.labels[levelVal] : ''
}
function isLevelEnabled (logLevel) {
const { values } = this.levels
const logLevelVal = values[logLevel]
return logLevelVal !== undefined && this[levelCompSym](logLevelVal, this[levelValSym])
}
/**
* Determine if the given `current` level is enabled by comparing it
* against the current threshold (`expected`).
*
* @param {SORTING_ORDER} direction comparison direction "ASC" or "DESC"
* @param {number} current current log level number representation
* @param {number} expected threshold value to compare with
* @returns {boolean}
*/
function compareLevel (direction, current, expected) {
if (direction === SORTING_ORDER.DESC) {
return current <= expected
}
return current >= expected
}
/**
* Create a level comparison function based on `levelComparison`
* it could a default function which compares levels either in "ascending" or "descending" order or custom comparison function
*
* @param {SORTING_ORDER | Function} levelComparison sort levels order direction or custom comparison function
* @returns Function
*/
function genLevelComparison (levelComparison) {
if (typeof levelComparison === 'string') {
return compareLevel.bind(null, levelComparison)
}
return levelComparison
}
function mappings (customLevels = null, useOnlyCustomLevels = false) {
const customNums = customLevels
/* eslint-disable */
? Object.keys(customLevels).reduce((o, k) => {
o[customLevels[k]] = k
return o
}, {})
: null
/* eslint-enable */
const labels = Object.assign(
Object.create(Object.prototype, { Infinity: { value: 'silent' } }),
useOnlyCustomLevels ? null : nums,
customNums
)
const values = Object.assign(
Object.create(Object.prototype, { silent: { value: Infinity } }),
useOnlyCustomLevels ? null : DEFAULT_LEVELS,
customLevels
)
return { labels, values }
}
function assertDefaultLevelFound (defaultLevel, customLevels, useOnlyCustomLevels) {
if (typeof defaultLevel === 'number') {
const values = [].concat(
Object.keys(customLevels || {}).map(key => customLevels[key]),
useOnlyCustomLevels ? [] : Object.keys(nums).map(level => +level),
Infinity
)
if (!values.includes(defaultLevel)) {
throw Error(`default level:${defaultLevel} must be included in custom levels`)
}
return
}
const labels = Object.assign(
Object.create(Object.prototype, { silent: { value: Infinity } }),
useOnlyCustomLevels ? null : DEFAULT_LEVELS,
customLevels
)
if (!(defaultLevel in labels)) {
throw Error(`default level:${defaultLevel} must be included in custom levels`)
}
}
function assertNoLevelCollisions (levels, customLevels) {
const { labels, values } = levels
for (const k in customLevels) {
if (k in values) {
throw Error('levels cannot be overridden')
}
if (customLevels[k] in labels) {
throw Error('pre-existing level values cannot be used for new levels')
}
}
}
/**
* Validates whether `levelComparison` is correct
*
* @throws Error
* @param {SORTING_ORDER | Function} levelComparison - value to validate
* @returns
*/
function assertLevelComparison (levelComparison) {
if (typeof levelComparison === 'function') {
return
}
if (typeof levelComparison === 'string' && Object.values(SORTING_ORDER).includes(levelComparison)) {
return
}
throw new Error('Levels comparison should be one of "ASC", "DESC" or "function" type')
}
module.exports = {
initialLsCache,
genLsCache,
levelMethods,
getLevel,
setLevel,
isLevelEnabled,
mappings,
assertNoLevelCollisions,
assertDefaultLevelFound,
genLevelComparison,
assertLevelComparison
}

3
node_modules/pino/lib/meta.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
'use strict'
module.exports = { version: '9.10.0' }

203
node_modules/pino/lib/multistream.js generated vendored Normal file
View File

@@ -0,0 +1,203 @@
'use strict'
const metadata = Symbol.for('pino.metadata')
const { DEFAULT_LEVELS } = require('./constants')
const DEFAULT_INFO_LEVEL = DEFAULT_LEVELS.info
function multistream (streamsArray, opts) {
streamsArray = streamsArray || []
opts = opts || { dedupe: false }
const streamLevels = Object.create(DEFAULT_LEVELS)
streamLevels.silent = Infinity
if (opts.levels && typeof opts.levels === 'object') {
Object.keys(opts.levels).forEach(i => {
streamLevels[i] = opts.levels[i]
})
}
const res = {
write,
add,
remove,
emit,
flushSync,
end,
minLevel: 0,
lastId: 0,
streams: [],
clone,
[metadata]: true,
streamLevels
}
if (Array.isArray(streamsArray)) {
streamsArray.forEach(add, res)
} else {
add.call(res, streamsArray)
}
// clean this object up
// or it will stay allocated forever
// as it is closed on the following closures
streamsArray = null
return res
// we can exit early because the streams are ordered by level
function write (data) {
let dest
const level = this.lastLevel
const { streams } = this
// for handling situation when several streams has the same level
let recordedLevel = 0
let stream
// if dedupe set to true we send logs to the stream with the highest level
// therefore, we have to change sorting order
for (let i = initLoopVar(streams.length, opts.dedupe); checkLoopVar(i, streams.length, opts.dedupe); i = adjustLoopVar(i, opts.dedupe)) {
dest = streams[i]
if (dest.level <= level) {
if (recordedLevel !== 0 && recordedLevel !== dest.level) {
break
}
stream = dest.stream
if (stream[metadata]) {
const { lastTime, lastMsg, lastObj, lastLogger } = this
stream.lastLevel = level
stream.lastTime = lastTime
stream.lastMsg = lastMsg
stream.lastObj = lastObj
stream.lastLogger = lastLogger
}
stream.write(data)
if (opts.dedupe) {
recordedLevel = dest.level
}
} else if (!opts.dedupe) {
break
}
}
}
function emit (...args) {
for (const { stream } of this.streams) {
if (typeof stream.emit === 'function') {
stream.emit(...args)
}
}
}
function flushSync () {
for (const { stream } of this.streams) {
if (typeof stream.flushSync === 'function') {
stream.flushSync()
}
}
}
function add (dest) {
if (!dest) {
return res
}
// Check that dest implements either StreamEntry or DestinationStream
const isStream = typeof dest.write === 'function' || dest.stream
const stream_ = dest.write ? dest : dest.stream
// This is necessary to provide a meaningful error message, otherwise it throws somewhere inside write()
if (!isStream) {
throw Error('stream object needs to implement either StreamEntry or DestinationStream interface')
}
const { streams, streamLevels } = this
let level
if (typeof dest.levelVal === 'number') {
level = dest.levelVal
} else if (typeof dest.level === 'string') {
level = streamLevels[dest.level]
} else if (typeof dest.level === 'number') {
level = dest.level
} else {
level = DEFAULT_INFO_LEVEL
}
const dest_ = {
stream: stream_,
level,
levelVal: undefined,
id: ++res.lastId
}
streams.unshift(dest_)
streams.sort(compareByLevel)
this.minLevel = streams[0].level
return res
}
function remove (id) {
const { streams } = this
const index = streams.findIndex(s => s.id === id)
if (index >= 0) {
streams.splice(index, 1)
streams.sort(compareByLevel)
this.minLevel = streams.length > 0 ? streams[0].level : -1
}
return res
}
function end () {
for (const { stream } of this.streams) {
if (typeof stream.flushSync === 'function') {
stream.flushSync()
}
stream.end()
}
}
function clone (level) {
const streams = new Array(this.streams.length)
for (let i = 0; i < streams.length; i++) {
streams[i] = {
level,
stream: this.streams[i].stream
}
}
return {
write,
add,
remove,
minLevel: level,
streams,
clone,
emit,
flushSync,
[metadata]: true
}
}
}
function compareByLevel (a, b) {
return a.level - b.level
}
function initLoopVar (length, dedupe) {
return dedupe ? length - 1 : 0
}
function adjustLoopVar (i, dedupe) {
return dedupe ? i - 1 : i + 1
}
function checkLoopVar (i, length, dedupe) {
return dedupe ? i >= 0 : i < length
}
module.exports = multistream

238
node_modules/pino/lib/proto.js generated vendored Normal file
View File

@@ -0,0 +1,238 @@
'use strict'
/* eslint no-prototype-builtins: 0 */
const { EventEmitter } = require('node:events')
const {
lsCacheSym,
levelValSym,
setLevelSym,
getLevelSym,
chindingsSym,
parsedChindingsSym,
mixinSym,
asJsonSym,
writeSym,
mixinMergeStrategySym,
timeSym,
timeSliceIndexSym,
streamSym,
serializersSym,
formattersSym,
errorKeySym,
messageKeySym,
useOnlyCustomLevelsSym,
needsMetadataGsym,
redactFmtSym,
stringifySym,
formatOptsSym,
stringifiersSym,
msgPrefixSym,
hooksSym
} = require('./symbols')
const {
getLevel,
setLevel,
isLevelEnabled,
mappings,
initialLsCache,
genLsCache,
assertNoLevelCollisions
} = require('./levels')
const {
asChindings,
asJson,
buildFormatters,
stringify
} = require('./tools')
const {
version
} = require('./meta')
const redaction = require('./redaction')
// note: use of class is satirical
// https://github.com/pinojs/pino/pull/433#pullrequestreview-127703127
const constructor = class Pino {}
const prototype = {
constructor,
child,
bindings,
setBindings,
flush,
isLevelEnabled,
version,
get level () { return this[getLevelSym]() },
set level (lvl) { this[setLevelSym](lvl) },
get levelVal () { return this[levelValSym] },
set levelVal (n) { throw Error('levelVal is read-only') },
get msgPrefix () { return this[msgPrefixSym] },
get [Symbol.toStringTag] () { return 'Pino' },
[lsCacheSym]: initialLsCache,
[writeSym]: write,
[asJsonSym]: asJson,
[getLevelSym]: getLevel,
[setLevelSym]: setLevel
}
Object.setPrototypeOf(prototype, EventEmitter.prototype)
// exporting and consuming the prototype object using factory pattern fixes scoping issues with getters when serializing
module.exports = function () {
return Object.create(prototype)
}
const resetChildingsFormatter = bindings => bindings
function child (bindings, options) {
if (!bindings) {
throw Error('missing bindings for child Pino')
}
options = options || {} // default options to empty object
const serializers = this[serializersSym]
const formatters = this[formattersSym]
const instance = Object.create(this)
if (options.hasOwnProperty('serializers') === true) {
instance[serializersSym] = Object.create(null)
for (const k in serializers) {
instance[serializersSym][k] = serializers[k]
}
const parentSymbols = Object.getOwnPropertySymbols(serializers)
/* eslint no-var: off */
for (var i = 0; i < parentSymbols.length; i++) {
const ks = parentSymbols[i]
instance[serializersSym][ks] = serializers[ks]
}
for (const bk in options.serializers) {
instance[serializersSym][bk] = options.serializers[bk]
}
const bindingsSymbols = Object.getOwnPropertySymbols(options.serializers)
for (var bi = 0; bi < bindingsSymbols.length; bi++) {
const bks = bindingsSymbols[bi]
instance[serializersSym][bks] = options.serializers[bks]
}
} else instance[serializersSym] = serializers
if (options.hasOwnProperty('formatters')) {
const { level, bindings: chindings, log } = options.formatters
instance[formattersSym] = buildFormatters(
level || formatters.level,
chindings || resetChildingsFormatter,
log || formatters.log
)
} else {
instance[formattersSym] = buildFormatters(
formatters.level,
resetChildingsFormatter,
formatters.log
)
}
if (options.hasOwnProperty('customLevels') === true) {
assertNoLevelCollisions(this.levels, options.customLevels)
instance.levels = mappings(options.customLevels, instance[useOnlyCustomLevelsSym])
genLsCache(instance)
}
// redact must place before asChindings and only replace if exist
if ((typeof options.redact === 'object' && options.redact !== null) || Array.isArray(options.redact)) {
instance.redact = options.redact // replace redact directly
const stringifiers = redaction(instance.redact, stringify)
const formatOpts = { stringify: stringifiers[redactFmtSym] }
instance[stringifySym] = stringify
instance[stringifiersSym] = stringifiers
instance[formatOptsSym] = formatOpts
}
if (typeof options.msgPrefix === 'string') {
instance[msgPrefixSym] = (this[msgPrefixSym] || '') + options.msgPrefix
}
instance[chindingsSym] = asChindings(instance, bindings)
if ((options.level !== undefined && options.level !== this.level) || options.hasOwnProperty('customLevels')) {
const childLevel = options.level || this.level
instance[setLevelSym](childLevel)
}
this.onChild(instance)
return instance
}
function bindings () {
const chindings = this[chindingsSym]
const chindingsJson = `{${chindings.substr(1)}}` // at least contains ,"pid":7068,"hostname":"myMac"
const bindingsFromJson = JSON.parse(chindingsJson)
delete bindingsFromJson.pid
delete bindingsFromJson.hostname
return bindingsFromJson
}
function setBindings (newBindings) {
const chindings = asChindings(this, newBindings)
this[chindingsSym] = chindings
delete this[parsedChindingsSym]
}
/**
* Default strategy for creating `mergeObject` from arguments and the result from `mixin()`.
* Fields from `mergeObject` have higher priority in this strategy.
*
* @param {Object} mergeObject The object a user has supplied to the logging function.
* @param {Object} mixinObject The result of the `mixin` method.
* @return {Object}
*/
function defaultMixinMergeStrategy (mergeObject, mixinObject) {
return Object.assign(mixinObject, mergeObject)
}
function write (_obj, msg, num) {
const t = this[timeSym]()
const mixin = this[mixinSym]
const errorKey = this[errorKeySym]
const messageKey = this[messageKeySym]
const mixinMergeStrategy = this[mixinMergeStrategySym] || defaultMixinMergeStrategy
let obj
const streamWriteHook = this[hooksSym].streamWrite
if (_obj === undefined || _obj === null) {
obj = {}
} else if (_obj instanceof Error) {
obj = { [errorKey]: _obj }
if (msg === undefined) {
msg = _obj.message
}
} else {
obj = _obj
if (msg === undefined && _obj[messageKey] === undefined && _obj[errorKey]) {
msg = _obj[errorKey].message
}
}
if (mixin) {
obj = mixinMergeStrategy(obj, mixin(obj, num, this))
}
const s = this[asJsonSym](obj, msg, num, t)
const stream = this[streamSym]
if (stream[needsMetadataGsym] === true) {
stream.lastLevel = num
stream.lastObj = obj
stream.lastMsg = msg
stream.lastTime = t.slice(this[timeSliceIndexSym])
stream.lastLogger = this // for child loggers
}
stream.write(streamWriteHook ? streamWriteHook(s) : s)
}
function noop () {}
function flush (cb) {
if (cb != null && typeof cb !== 'function') {
throw Error('callback must be a function')
}
const stream = this[streamSym]
if (typeof stream.flush === 'function') {
stream.flush(cb || noop)
} else if (cb) cb()
}

118
node_modules/pino/lib/redaction.js generated vendored Normal file
View File

@@ -0,0 +1,118 @@
'use strict'
const fastRedact = require('fast-redact')
const { redactFmtSym, wildcardFirstSym } = require('./symbols')
const { rx, validator } = fastRedact
const validate = validator({
ERR_PATHS_MUST_BE_STRINGS: () => 'pino redacted paths must be strings',
ERR_INVALID_PATH: (s) => `pino redact paths array contains an invalid path (${s})`
})
const CENSOR = '[Redacted]'
const strict = false // TODO should this be configurable?
function redaction (opts, serialize) {
const { paths, censor } = handle(opts)
const shape = paths.reduce((o, str) => {
rx.lastIndex = 0
const first = rx.exec(str)
const next = rx.exec(str)
// ns is the top-level path segment, brackets + quoting removed.
let ns = first[1] !== undefined
? first[1].replace(/^(?:"|'|`)(.*)(?:"|'|`)$/, '$1')
: first[0]
if (ns === '*') {
ns = wildcardFirstSym
}
// top level key:
if (next === null) {
o[ns] = null
return o
}
// path with at least two segments:
// if ns is already redacted at the top level, ignore lower level redactions
if (o[ns] === null) {
return o
}
const { index } = next
const nextPath = `${str.substr(index, str.length - 1)}`
o[ns] = o[ns] || []
// shape is a mix of paths beginning with literal values and wildcard
// paths [ "a.b.c", "*.b.z" ] should reduce to a shape of
// { "a": [ "b.c", "b.z" ], *: [ "b.z" ] }
// note: "b.z" is in both "a" and * arrays because "a" matches the wildcard.
// (* entry has wildcardFirstSym as key)
if (ns !== wildcardFirstSym && o[ns].length === 0) {
// first time ns's get all '*' redactions so far
o[ns].push(...(o[wildcardFirstSym] || []))
}
if (ns === wildcardFirstSym) {
// new * path gets added to all previously registered literal ns's.
Object.keys(o).forEach(function (k) {
if (o[k]) {
o[k].push(nextPath)
}
})
}
o[ns].push(nextPath)
return o
}, {})
// the redactor assigned to the format symbol key
// provides top level redaction for instances where
// an object is interpolated into the msg string
const result = {
[redactFmtSym]: fastRedact({ paths, censor, serialize, strict })
}
const topCensor = (...args) => {
return typeof censor === 'function' ? serialize(censor(...args)) : serialize(censor)
}
return [...Object.keys(shape), ...Object.getOwnPropertySymbols(shape)].reduce((o, k) => {
// top level key:
if (shape[k] === null) {
o[k] = (value) => topCensor(value, [k])
} else {
const wrappedCensor = typeof censor === 'function'
? (value, path) => {
return censor(value, [k, ...path])
}
: censor
o[k] = fastRedact({
paths: shape[k],
censor: wrappedCensor,
serialize,
strict
})
}
return o
}, result)
}
function handle (opts) {
if (Array.isArray(opts)) {
opts = { paths: opts, censor: CENSOR }
validate(opts)
return opts
}
let { paths, censor = CENSOR, remove } = opts
if (Array.isArray(paths) === false) { throw Error('pino redact must contain an array of strings') }
if (remove === true) censor = undefined
validate({ paths, censor })
return { paths, censor }
}
module.exports = redaction

74
node_modules/pino/lib/symbols.js generated vendored Normal file
View File

@@ -0,0 +1,74 @@
'use strict'
const setLevelSym = Symbol('pino.setLevel')
const getLevelSym = Symbol('pino.getLevel')
const levelValSym = Symbol('pino.levelVal')
const levelCompSym = Symbol('pino.levelComp')
const useLevelLabelsSym = Symbol('pino.useLevelLabels')
const useOnlyCustomLevelsSym = Symbol('pino.useOnlyCustomLevels')
const mixinSym = Symbol('pino.mixin')
const lsCacheSym = Symbol('pino.lsCache')
const chindingsSym = Symbol('pino.chindings')
const asJsonSym = Symbol('pino.asJson')
const writeSym = Symbol('pino.write')
const redactFmtSym = Symbol('pino.redactFmt')
const timeSym = Symbol('pino.time')
const timeSliceIndexSym = Symbol('pino.timeSliceIndex')
const streamSym = Symbol('pino.stream')
const stringifySym = Symbol('pino.stringify')
const stringifySafeSym = Symbol('pino.stringifySafe')
const stringifiersSym = Symbol('pino.stringifiers')
const endSym = Symbol('pino.end')
const formatOptsSym = Symbol('pino.formatOpts')
const messageKeySym = Symbol('pino.messageKey')
const errorKeySym = Symbol('pino.errorKey')
const nestedKeySym = Symbol('pino.nestedKey')
const nestedKeyStrSym = Symbol('pino.nestedKeyStr')
const mixinMergeStrategySym = Symbol('pino.mixinMergeStrategy')
const msgPrefixSym = Symbol('pino.msgPrefix')
const wildcardFirstSym = Symbol('pino.wildcardFirst')
// public symbols, no need to use the same pino
// version for these
const serializersSym = Symbol.for('pino.serializers')
const formattersSym = Symbol.for('pino.formatters')
const hooksSym = Symbol.for('pino.hooks')
const needsMetadataGsym = Symbol.for('pino.metadata')
module.exports = {
setLevelSym,
getLevelSym,
levelValSym,
levelCompSym,
useLevelLabelsSym,
mixinSym,
lsCacheSym,
chindingsSym,
asJsonSym,
writeSym,
serializersSym,
redactFmtSym,
timeSym,
timeSliceIndexSym,
streamSym,
stringifySym,
stringifySafeSym,
stringifiersSym,
endSym,
formatOptsSym,
messageKeySym,
errorKeySym,
nestedKeySym,
wildcardFirstSym,
needsMetadataGsym,
useOnlyCustomLevelsSym,
formattersSym,
hooksSym,
nestedKeyStrSym,
mixinMergeStrategySym,
msgPrefixSym
}

11
node_modules/pino/lib/time.js generated vendored Normal file
View File

@@ -0,0 +1,11 @@
'use strict'
const nullTime = () => ''
const epochTime = () => `,"time":${Date.now()}`
const unixTime = () => `,"time":${Math.round(Date.now() / 1000.0)}`
const isoTime = () => `,"time":"${new Date(Date.now()).toISOString()}"` // using Date.now() for testability
module.exports = { nullTime, epochTime, unixTime, isoTime }

424
node_modules/pino/lib/tools.js generated vendored Normal file
View File

@@ -0,0 +1,424 @@
'use strict'
/* eslint no-prototype-builtins: 0 */
const diagChan = require('node:diagnostics_channel')
const format = require('quick-format-unescaped')
const { mapHttpRequest, mapHttpResponse } = require('pino-std-serializers')
const SonicBoom = require('sonic-boom')
const onExit = require('on-exit-leak-free')
const {
lsCacheSym,
chindingsSym,
writeSym,
serializersSym,
formatOptsSym,
endSym,
stringifiersSym,
stringifySym,
stringifySafeSym,
wildcardFirstSym,
nestedKeySym,
formattersSym,
messageKeySym,
errorKeySym,
nestedKeyStrSym,
msgPrefixSym
} = require('./symbols')
const { isMainThread } = require('worker_threads')
const transport = require('./transport')
const asJsonChan = diagChan.tracingChannel('pino_asJson')
function noop () {
}
function genLog (level, hook) {
if (!hook) return LOG
return function hookWrappedLog (...args) {
hook.call(this, args, LOG, level)
}
function LOG (o, ...n) {
if (typeof o === 'object') {
let msg = o
if (o !== null) {
if (o.method && o.headers && o.socket) {
o = mapHttpRequest(o)
} else if (typeof o.setHeader === 'function') {
o = mapHttpResponse(o)
}
}
let formatParams
if (msg === null && n.length === 0) {
formatParams = [null]
} else {
msg = n.shift()
formatParams = n
}
// We do not use a coercive check for `msg` as it is
// measurably slower than the explicit checks.
if (typeof this[msgPrefixSym] === 'string' && msg !== undefined && msg !== null) {
msg = this[msgPrefixSym] + msg
}
this[writeSym](o, format(msg, formatParams, this[formatOptsSym]), level)
} else {
let msg = o === undefined ? n.shift() : o
// We do not use a coercive check for `msg` as it is
// measurably slower than the explicit checks.
if (typeof this[msgPrefixSym] === 'string' && msg !== undefined && msg !== null) {
msg = this[msgPrefixSym] + msg
}
this[writeSym](null, format(msg, n, this[formatOptsSym]), level)
}
}
}
// magically escape strings for json
// relying on their charCodeAt
// everything below 32 needs JSON.stringify()
// 34 and 92 happens all the time, so we
// have a fast case for them
function asString (str) {
let result = ''
let last = 0
let found = false
let point = 255
const l = str.length
if (l > 100) {
return JSON.stringify(str)
}
for (var i = 0; i < l && point >= 32; i++) {
point = str.charCodeAt(i)
if (point === 34 || point === 92) {
result += str.slice(last, i) + '\\'
last = i
found = true
}
}
if (!found) {
result = str
} else {
result += str.slice(last)
}
return point < 32 ? JSON.stringify(str) : '"' + result + '"'
}
/**
* `asJson` wraps `_asJson` in order to facilitate generating diagnostics.
*
* @param {object} obj The merging object passed to the log method.
* @param {string} msg The log message passed to the log method.
* @param {number} num The log level number.
* @param {number} time The log time in milliseconds.
*
* @returns {string}
*/
function asJson (obj, msg, num, time) {
if (asJsonChan.hasSubscribers === false) {
return _asJson.call(this, obj, msg, num, time)
}
const store = { instance: this, arguments }
return asJsonChan.traceSync(_asJson, store, this, obj, msg, num, time)
}
/**
* `_asJson` parses all collected data and generates the finalized newline
* delimited JSON string.
*
* @param {object} obj The merging object passed to the log method.
* @param {string} msg The log message passed to the log method.
* @param {number} num The log level number.
* @param {number} time The log time in milliseconds.
*
* @returns {string} The finalized log string terminated with a newline.
* @private
*/
function _asJson (obj, msg, num, time) {
const stringify = this[stringifySym]
const stringifySafe = this[stringifySafeSym]
const stringifiers = this[stringifiersSym]
const end = this[endSym]
const chindings = this[chindingsSym]
const serializers = this[serializersSym]
const formatters = this[formattersSym]
const messageKey = this[messageKeySym]
const errorKey = this[errorKeySym]
let data = this[lsCacheSym][num] + time
// we need the child bindings added to the output first so instance logged
// objects can take precedence when JSON.parse-ing the resulting log line
data = data + chindings
let value
if (formatters.log) {
obj = formatters.log(obj)
}
const wildcardStringifier = stringifiers[wildcardFirstSym]
let propStr = ''
for (const key in obj) {
value = obj[key]
if (Object.prototype.hasOwnProperty.call(obj, key) && value !== undefined) {
if (serializers[key]) {
value = serializers[key](value)
} else if (key === errorKey && serializers.err) {
value = serializers.err(value)
}
const stringifier = stringifiers[key] || wildcardStringifier
switch (typeof value) {
case 'undefined':
case 'function':
continue
case 'number':
/* eslint no-fallthrough: "off" */
if (Number.isFinite(value) === false) {
value = null
}
// this case explicitly falls through to the next one
case 'boolean':
if (stringifier) value = stringifier(value)
break
case 'string':
value = (stringifier || asString)(value)
break
default:
value = (stringifier || stringify)(value, stringifySafe)
}
if (value === undefined) continue
const strKey = asString(key)
propStr += ',' + strKey + ':' + value
}
}
let msgStr = ''
if (msg !== undefined) {
value = serializers[messageKey] ? serializers[messageKey](msg) : msg
const stringifier = stringifiers[messageKey] || wildcardStringifier
switch (typeof value) {
case 'function':
break
case 'number':
/* eslint no-fallthrough: "off" */
if (Number.isFinite(value) === false) {
value = null
}
// this case explicitly falls through to the next one
case 'boolean':
if (stringifier) value = stringifier(value)
msgStr = ',"' + messageKey + '":' + value
break
case 'string':
value = (stringifier || asString)(value)
msgStr = ',"' + messageKey + '":' + value
break
default:
value = (stringifier || stringify)(value, stringifySafe)
msgStr = ',"' + messageKey + '":' + value
}
}
if (this[nestedKeySym] && propStr) {
// place all the obj properties under the specified key
// the nested key is already formatted from the constructor
return data + this[nestedKeyStrSym] + propStr.slice(1) + '}' + msgStr + end
} else {
return data + propStr + msgStr + end
}
}
function asChindings (instance, bindings) {
let value
let data = instance[chindingsSym]
const stringify = instance[stringifySym]
const stringifySafe = instance[stringifySafeSym]
const stringifiers = instance[stringifiersSym]
const wildcardStringifier = stringifiers[wildcardFirstSym]
const serializers = instance[serializersSym]
const formatter = instance[formattersSym].bindings
bindings = formatter(bindings)
for (const key in bindings) {
value = bindings[key]
const valid = key !== 'level' &&
key !== 'serializers' &&
key !== 'formatters' &&
key !== 'customLevels' &&
bindings.hasOwnProperty(key) &&
value !== undefined
if (valid === true) {
value = serializers[key] ? serializers[key](value) : value
value = (stringifiers[key] || wildcardStringifier || stringify)(value, stringifySafe)
if (value === undefined) continue
data += ',"' + key + '":' + value
}
}
return data
}
function hasBeenTampered (stream) {
return stream.write !== stream.constructor.prototype.write
}
function buildSafeSonicBoom (opts) {
const stream = new SonicBoom(opts)
stream.on('error', filterBrokenPipe)
// If we are sync: false, we must flush on exit
if (!opts.sync && isMainThread) {
onExit.register(stream, autoEnd)
stream.on('close', function () {
onExit.unregister(stream)
})
}
return stream
function filterBrokenPipe (err) {
// Impossible to replicate across all operating systems
/* istanbul ignore next */
if (err.code === 'EPIPE') {
// If we get EPIPE, we should stop logging here
// however we have no control to the consumer of
// SonicBoom, so we just overwrite the write method
stream.write = noop
stream.end = noop
stream.flushSync = noop
stream.destroy = noop
return
}
stream.removeListener('error', filterBrokenPipe)
stream.emit('error', err)
}
}
function autoEnd (stream, eventName) {
// This check is needed only on some platforms
/* istanbul ignore next */
if (stream.destroyed) {
return
}
if (eventName === 'beforeExit') {
// We still have an event loop, let's use it
stream.flush()
stream.on('drain', function () {
stream.end()
})
} else {
// For some reason istanbul is not detecting this, but it's there
/* istanbul ignore next */
// We do not have an event loop, so flush synchronously
stream.flushSync()
}
}
function createArgsNormalizer (defaultOptions) {
return function normalizeArgs (instance, caller, opts = {}, stream) {
// support stream as a string
if (typeof opts === 'string') {
stream = buildSafeSonicBoom({ dest: opts })
opts = {}
} else if (typeof stream === 'string') {
if (opts && opts.transport) {
throw Error('only one of option.transport or stream can be specified')
}
stream = buildSafeSonicBoom({ dest: stream })
} else if (opts instanceof SonicBoom || opts.writable || opts._writableState) {
stream = opts
opts = {}
} else if (opts.transport) {
if (opts.transport instanceof SonicBoom || opts.transport.writable || opts.transport._writableState) {
throw Error('option.transport do not allow stream, please pass to option directly. e.g. pino(transport)')
}
if (opts.transport.targets && opts.transport.targets.length && opts.formatters && typeof opts.formatters.level === 'function') {
throw Error('option.transport.targets do not allow custom level formatters')
}
let customLevels
if (opts.customLevels) {
customLevels = opts.useOnlyCustomLevels ? opts.customLevels : Object.assign({}, opts.levels, opts.customLevels)
}
stream = transport({ caller, ...opts.transport, levels: customLevels })
}
opts = Object.assign({}, defaultOptions, opts)
opts.serializers = Object.assign({}, defaultOptions.serializers, opts.serializers)
opts.formatters = Object.assign({}, defaultOptions.formatters, opts.formatters)
if (opts.prettyPrint) {
throw new Error('prettyPrint option is no longer supported, see the pino-pretty package (https://github.com/pinojs/pino-pretty)')
}
const { enabled, onChild } = opts
if (enabled === false) opts.level = 'silent'
if (!onChild) opts.onChild = noop
if (!stream) {
if (!hasBeenTampered(process.stdout)) {
// If process.stdout.fd is undefined, it means that we are running
// in a worker thread. Let's assume we are logging to file descriptor 1.
stream = buildSafeSonicBoom({ fd: process.stdout.fd || 1 })
} else {
stream = process.stdout
}
}
return { opts, stream }
}
}
function stringify (obj, stringifySafeFn) {
try {
return JSON.stringify(obj)
} catch (_) {
try {
const stringify = stringifySafeFn || this[stringifySafeSym]
return stringify(obj)
} catch (_) {
return '"[unable to serialize, circular reference is too complex to analyze]"'
}
}
}
function buildFormatters (level, bindings, log) {
return {
level,
bindings,
log
}
}
/**
* Convert a string integer file descriptor to a proper native integer
* file descriptor.
*
* @param {string} destination The file descriptor string to attempt to convert.
*
* @returns {Number}
*/
function normalizeDestFileDescriptor (destination) {
const fd = Number(destination)
if (typeof destination === 'string' && Number.isFinite(fd)) {
return fd
}
// destination could be undefined if we are in a worker
if (destination === undefined) {
// This is stdout in UNIX systems
return 1
}
return destination
}
module.exports = {
noop,
buildSafeSonicBoom,
asChindings,
asJson,
genLog,
createArgsNormalizer,
stringify,
buildFormatters,
normalizeDestFileDescriptor
}

56
node_modules/pino/lib/transport-stream.js generated vendored Normal file
View File

@@ -0,0 +1,56 @@
'use strict'
const { realImport, realRequire } = require('real-require')
module.exports = loadTransportStreamBuilder
/**
* Loads & returns a function to build transport streams
* @param {string} target
* @returns {Promise<function(object): Promise<import('node:stream').Writable>>}
* @throws {Error} In case the target module does not export a function
*/
async function loadTransportStreamBuilder (target) {
let fn
try {
const toLoad = target.startsWith('file://') ? target : 'file://' + target
if (toLoad.endsWith('.ts') || toLoad.endsWith('.cts')) {
// TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ).
if (process[Symbol.for('ts-node.register.instance')]) {
realRequire('ts-node/register')
} else if (process.env && process.env.TS_NODE_DEV) {
realRequire('ts-node-dev')
}
// TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees.
fn = realRequire(decodeURIComponent(target))
} else {
fn = (await realImport(toLoad))
}
} catch (error) {
// See this PR for details: https://github.com/pinojs/thread-stream/pull/34
if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND')) {
fn = realRequire(target)
} else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') {
// When bundled with pkg, an undefined error is thrown when called with realImport
// When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport
// More info at: https://github.com/pinojs/thread-stream/issues/143
try {
fn = realRequire(decodeURIComponent(target))
} catch {
throw error
}
} else {
throw error
}
}
// Depending on how the default export is performed, and on how the code is
// transpiled, we may find cases of two nested "default" objects.
// See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762
if (typeof fn === 'object') fn = fn.default
if (typeof fn === 'object') fn = fn.default
if (typeof fn !== 'function') throw Error('exported worker is not a function')
return fn
}

167
node_modules/pino/lib/transport.js generated vendored Normal file
View File

@@ -0,0 +1,167 @@
'use strict'
const { createRequire } = require('module')
const getCallers = require('./caller')
const { join, isAbsolute, sep } = require('node:path')
const sleep = require('atomic-sleep')
const onExit = require('on-exit-leak-free')
const ThreadStream = require('thread-stream')
function setupOnExit (stream) {
// This is leak free, it does not leave event handlers
onExit.register(stream, autoEnd)
onExit.registerBeforeExit(stream, flush)
stream.on('close', function () {
onExit.unregister(stream)
})
}
function buildStream (filename, workerData, workerOpts, sync) {
const stream = new ThreadStream({
filename,
workerData,
workerOpts,
sync
})
stream.on('ready', onReady)
stream.on('close', function () {
process.removeListener('exit', onExit)
})
process.on('exit', onExit)
function onReady () {
process.removeListener('exit', onExit)
stream.unref()
if (workerOpts.autoEnd !== false) {
setupOnExit(stream)
}
}
function onExit () {
/* istanbul ignore next */
if (stream.closed) {
return
}
stream.flushSync()
// Apparently there is a very sporadic race condition
// that in certain OS would prevent the messages to be flushed
// because the thread might not have been created still.
// Unfortunately we need to sleep(100) in this case.
sleep(100)
stream.end()
}
return stream
}
function autoEnd (stream) {
stream.ref()
stream.flushSync()
stream.end()
stream.once('close', function () {
stream.unref()
})
}
function flush (stream) {
stream.flushSync()
}
function transport (fullOptions) {
const { pipeline, targets, levels, dedupe, worker = {}, caller = getCallers(), sync = false } = fullOptions
const options = {
...fullOptions.options
}
// Backwards compatibility
const callers = typeof caller === 'string' ? [caller] : caller
// This will be eventually modified by bundlers
const bundlerOverrides = '__bundlerPathsOverrides' in globalThis ? globalThis.__bundlerPathsOverrides : {}
let target = fullOptions.target
if (target && targets) {
throw new Error('only one of target or targets can be specified')
}
if (targets) {
target = bundlerOverrides['pino-worker'] || join(__dirname, 'worker.js')
options.targets = targets.filter(dest => dest.target).map((dest) => {
return {
...dest,
target: fixTarget(dest.target)
}
})
options.pipelines = targets.filter(dest => dest.pipeline).map((dest) => {
return dest.pipeline.map((t) => {
return {
...t,
level: dest.level, // duplicate the pipeline `level` property defined in the upper level
target: fixTarget(t.target)
}
})
})
} else if (pipeline) {
target = bundlerOverrides['pino-worker'] || join(__dirname, 'worker.js')
options.pipelines = [pipeline.map((dest) => {
return {
...dest,
target: fixTarget(dest.target)
}
})]
}
if (levels) {
options.levels = levels
}
if (dedupe) {
options.dedupe = dedupe
}
options.pinoWillSendConfig = true
return buildStream(fixTarget(target), options, worker, sync)
function fixTarget (origin) {
origin = bundlerOverrides[origin] || origin
if (isAbsolute(origin) || origin.indexOf('file://') === 0) {
return origin
}
if (origin === 'pino/file') {
return join(__dirname, '..', 'file.js')
}
let fixTarget
for (const filePath of callers) {
try {
const context = filePath === 'node:repl'
? process.cwd() + sep
: filePath
fixTarget = createRequire(context).resolve(origin)
break
} catch (err) {
// Silent catch
continue
}
}
if (!fixTarget) {
throw new Error(`unable to determine transport target for "${origin}"`)
}
return fixTarget
}
}
module.exports = transport

194
node_modules/pino/lib/worker.js generated vendored Normal file
View File

@@ -0,0 +1,194 @@
'use strict'
const EE = require('node:events')
const { pipeline, PassThrough } = require('node:stream')
const pino = require('../pino.js')
const build = require('pino-abstract-transport')
const loadTransportStreamBuilder = require('./transport-stream')
// This file is not checked by the code coverage tool,
// as it is not reliable.
/* istanbul ignore file */
/*
* > Multiple targets & pipelines
*
*
* ┌─────────────────────────────────────────────────┐ ┌─────┐
* │ │ │ p │
* │ │ │ i │
* │ target │ │ n │
* │ │ ────────────────────────────────┼────┤ o │
* │ targets │ target │ │ . │
* │ ────────────► │ ────────────────────────────────┼────┤ m │ source
* │ │ target │ │ u │ │
* │ │ ────────────────────────────────┼────┤ l │ │write
* │ │ │ │ t │ ▼
* │ │ pipeline ┌───────────────┐ │ │ i │ ┌────────┐
* │ │ ──────────► │ PassThrough ├───┼────┤ s ├──────┤ │
* │ │ └───────────────┘ │ │ t │ write│ Thread │
* │ │ │ │ r │◄─────┤ Stream │
* │ │ pipeline ┌───────────────┐ │ │ e │ │ │
* │ │ ──────────► │ PassThrough ├───┼────┤ a │ └────────┘
* │ └───────────────┘ │ │ m │
* │ │ │ │
* └─────────────────────────────────────────────────┘ └─────┘
*
*
*
* > One single pipeline or target
*
*
* source
* │
* ┌────────────────────────────────────────────────┐ │write
* │ │ ▼
* │ │ ┌────────┐
* │ targets │ target │ │ │
* │ ────────────► │ ──────────────────────────────┤ │ │
* │ │ │ │ │
* │ ├──────┤ │
* │ │ │ │
* │ │ │ │
* │ OR │ │ │
* │ │ │ │
* │ │ │ │
* │ ┌──────────────┐ │ │ │
* │ targets │ pipeline │ │ │ │ Thread │
* │ ────────────► │ ────────────►│ PassThrough ├─┤ │ Stream │
* │ │ │ │ │ │ │
* │ └──────────────┘ │ │ │
* │ │ │ │
* │ OR │ write│ │
* │ │◄─────┤ │
* │ │ │ │
* │ ┌──────────────┐ │ │ │
* │ pipeline │ │ │ │ │
* │ ──────────────►│ PassThrough ├────────────────┤ │ │
* │ │ │ │ │ │
* │ └──────────────┘ │ └────────┘
* │ │
* │ │
* └────────────────────────────────────────────────┘
*/
module.exports = async function ({ targets, pipelines, levels, dedupe }) {
const targetStreams = []
// Process targets
if (targets && targets.length) {
targets = await Promise.all(targets.map(async (t) => {
const fn = await loadTransportStreamBuilder(t.target)
const stream = await fn(t.options)
return {
level: t.level,
stream
}
}))
targetStreams.push(...targets)
}
// Process pipelines
if (pipelines && pipelines.length) {
pipelines = await Promise.all(
pipelines.map(async (p) => {
let level
const pipeDests = await Promise.all(
p.map(async (t) => {
// level assigned to pipeline is duplicated over all its targets, just store it
level = t.level
const fn = await loadTransportStreamBuilder(t.target)
const stream = await fn(t.options)
return stream
}
))
return {
level,
stream: createPipeline(pipeDests)
}
})
)
targetStreams.push(...pipelines)
}
// Skip building the multistream step if either one single pipeline or target is defined and
// return directly the stream instance back to TreadStream.
// This is equivalent to define either:
//
// pino.transport({ target: ... })
//
// OR
//
// pino.transport({ pipeline: ... })
if (targetStreams.length === 1) {
return targetStreams[0].stream
} else {
return build(process, {
parse: 'lines',
metadata: true,
close (err, cb) {
let expected = 0
for (const transport of targetStreams) {
expected++
transport.stream.on('close', closeCb)
transport.stream.end()
}
function closeCb () {
if (--expected === 0) {
cb(err)
}
}
}
})
}
// TODO: Why split2 was not used for pipelines?
function process (stream) {
const multi = pino.multistream(targetStreams, { levels, dedupe })
// TODO manage backpressure
stream.on('data', function (chunk) {
const { lastTime, lastMsg, lastObj, lastLevel } = this
multi.lastLevel = lastLevel
multi.lastTime = lastTime
multi.lastMsg = lastMsg
multi.lastObj = lastObj
// TODO handle backpressure
multi.write(chunk + '\n')
})
}
/**
* Creates a pipeline using the provided streams and return an instance of `PassThrough` stream
* as a source for the pipeline.
*
* @param {(TransformStream|WritableStream)[]} streams An array of streams.
* All intermediate streams in the array *MUST* be `Transform` streams and only the last one `Writable`.
* @returns A `PassThrough` stream instance representing the source stream of the pipeline
*/
function createPipeline (streams) {
const ee = new EE()
const stream = new PassThrough({
autoDestroy: true,
destroy (_, cb) {
ee.on('error', cb)
ee.on('closed', cb)
}
})
pipeline(stream, ...streams, function (err) {
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
ee.emit('error', err)
return
}
ee.emit('closed')
})
return stream
}
}

121
node_modules/pino/package.json generated vendored Normal file
View File

@@ -0,0 +1,121 @@
{
"name": "pino",
"version": "9.10.0",
"description": "super fast, all natural json logger",
"main": "pino.js",
"type": "commonjs",
"types": "pino.d.ts",
"browser": "./browser.js",
"scripts": {
"docs": "docsify serve",
"browser-test": "airtap --local 8080 test/browser*test.js",
"lint": "eslint .",
"prepublishOnly": "tap --no-check-coverage test/internals/version.test.js",
"test": "npm run lint && npm run transpile && tap --ts && jest test/jest && npm run test-types",
"test-ci": "npm run lint && npm run transpile && tap --ts --no-check-coverage --coverage-report=lcovonly && npm run test-types",
"test-ci-pnpm": "pnpm run lint && npm run transpile && tap --ts --no-coverage --no-check-coverage && pnpm run test-types",
"test-ci-yarn-pnp": "yarn run lint && npm run transpile && tap --ts --no-check-coverage --coverage-report=lcovonly",
"test-types": "tsc && tsd && ts-node test/types/pino.ts && attw --pack .",
"test:smoke": "smoker smoke:pino && smoker smoke:browser && smoker smoke:file",
"smoke:pino": "node ./pino.js",
"smoke:browser": "node ./browser.js",
"smoke:file": "node ./file.js",
"transpile": "node ./test/fixtures/ts/transpile.cjs",
"cov-ui": "tap --ts --coverage-report=html",
"bench": "node benchmarks/utils/runbench all",
"bench-basic": "node benchmarks/utils/runbench basic",
"bench-object": "node benchmarks/utils/runbench object",
"bench-deep-object": "node benchmarks/utils/runbench deep-object",
"bench-multi-arg": "node benchmarks/utils/runbench multi-arg",
"bench-long-string": "node benchmarks/utils/runbench long-string",
"bench-child": "node benchmarks/utils/runbench child",
"bench-child-child": "node benchmarks/utils/runbench child-child",
"bench-child-creation": "node benchmarks/utils/runbench child-creation",
"bench-formatters": "node benchmarks/utils/runbench formatters",
"update-bench-doc": "node benchmarks/utils/generate-benchmark-doc > docs/benchmarks.md"
},
"bin": {
"pino": "./bin.js"
},
"precommit": "test",
"repository": {
"type": "git",
"url": "git+https://github.com/pinojs/pino.git"
},
"keywords": [
"fast",
"logger",
"stream",
"json"
],
"author": "Matteo Collina <hello@matteocollina.com>",
"contributors": [
"David Mark Clements <huperekchuno@googlemail.com>",
"James Sumners <james.sumners@gmail.com>",
"Thomas Watson Steen <w@tson.dk> (https://twitter.com/wa7son)"
],
"license": "MIT",
"bugs": {
"url": "https://github.com/pinojs/pino/issues"
},
"homepage": "https://getpino.io",
"devDependencies": {
"@arethetypeswrong/cli": "^0.18.1",
"@matteo.collina/tspl": "^0.2.0",
"@types/flush-write-stream": "^1.0.0",
"@types/node": "^24.0.8",
"@types/tap": "^15.0.6",
"@yao-pkg/pkg": "6.3.0",
"airtap": "5.0.0",
"bole": "^5.0.5",
"bunyan": "^1.8.14",
"debug": "^4.3.4",
"docsify-cli": "^4.4.4",
"eslint": "^8.17.0",
"eslint-config-standard": "^17.0.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-n": "15.7.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^6.0.0",
"execa": "^5.0.0",
"fastbench": "^1.0.1",
"flush-write-stream": "^2.0.0",
"import-fresh": "^3.2.1",
"jest": "^30.0.3",
"log": "^6.0.0",
"loglevel": "^1.6.7",
"midnight-smoker": "1.1.1",
"pino-pretty": "^13.0.0",
"pre-commit": "^1.2.2",
"proxyquire": "^2.1.3",
"pump": "^3.0.0",
"rimraf": "^6.0.1",
"semver": "^7.3.7",
"split2": "^4.0.0",
"steed": "^1.1.3",
"strip-ansi": "^6.0.0",
"tap": "^16.2.0",
"tape": "^5.5.3",
"through2": "^4.0.0",
"ts-node": "^10.9.1",
"tsd": "^0.33.0",
"typescript": "~5.9.2",
"winston": "^3.7.2"
},
"dependencies": {
"atomic-sleep": "^1.0.0",
"fast-redact": "^3.1.1",
"on-exit-leak-free": "^2.1.0",
"pino-abstract-transport": "^2.0.0",
"pino-std-serializers": "^7.0.0",
"process-warning": "^5.0.0",
"quick-format-unescaped": "^4.0.3",
"real-require": "^0.2.0",
"safe-stable-stringify": "^2.3.1",
"sonic-boom": "^4.0.1",
"thread-stream": "^3.0.0"
},
"tsd": {
"directory": "test/types"
}
}

BIN
node_modules/pino/pino-banner.png generated vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

BIN
node_modules/pino/pino-logo-hire.png generated vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

BIN
node_modules/pino/pino-tree.png generated vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

881
node_modules/pino/pino.d.ts generated vendored Normal file
View File

@@ -0,0 +1,881 @@
// Project: https://github.com/pinojs/pino.git, http://getpino.io
// Definitions by: Peter Snider <https://github.com/psnider>
// BendingBender <https://github.com/BendingBender>
// Christian Rackerseder <https://github.com/screendriver>
// GP <https://github.com/paambaati>
// Alex Ferrando <https://github.com/alferpal>
// Oleksandr Sidko <https://github.com/mortiy>
// Harris Lummis <https://github.com/lummish>
// Raoul Jaeckel <https://github.com/raoulus>
// Cory Donkin <https://github.com/Cooryd>
// Adam Vigneaux <https://github.com/AdamVig>
// Austin Beer <https://github.com/austin-beer>
// Michel Nemnom <https://github.com/Pegase745>
// Igor Savin <https://github.com/kibertoad>
// James Bromwell <https://github.com/thw0rted>
// TypeScript Version: 4.4
import type { EventEmitter } from "events";
import * as pinoStdSerializers from "pino-std-serializers";
import type { SonicBoom, SonicBoomOpts } from "sonic-boom";
import type { WorkerOptions } from "worker_threads";
declare namespace pino {
//// Non-exported types and interfaces
// ToDo https://github.com/pinojs/thread-stream/issues/24
type ThreadStream = any
type TimeFn = () => string;
type MixinFn<CustomLevels extends string = never> = (mergeObject: object, level: number, logger:Logger<CustomLevels>) => object;
type MixinMergeStrategyFn = (mergeObject: object, mixinObject: object) => object;
type CustomLevelLogger<CustomLevels extends string, UseOnlyCustomLevels extends boolean = boolean> = {
/**
* Define additional logging levels.
*/
customLevels: { [level in CustomLevels]: number };
/**
* Use only defined `customLevels` and omit Pino's levels.
*/
useOnlyCustomLevels: UseOnlyCustomLevels;
} & {
// This will override default log methods
[K in Exclude<Level, CustomLevels>]: UseOnlyCustomLevels extends true ? never : LogFn;
} & {
[level in CustomLevels]: LogFn;
};
/**
* A synchronous callback that will run on each creation of a new child.
* @param child: The newly created child logger instance.
*/
type OnChildCallback<CustomLevels extends string = never> = (child: Logger<CustomLevels>) => void
export interface redactOptions {
paths: string[];
censor?: string | ((value: any, path: string[]) => any);
remove?: boolean;
}
export interface LoggerExtras<CustomLevels extends string = never, UseOnlyCustomLevels extends boolean = boolean> extends EventEmitter {
/**
* Exposes the Pino package version. Also available on the exported pino function.
*/
readonly version: string;
levels: LevelMapping;
/**
* Outputs the level as a string instead of integer.
*/
useLevelLabels: boolean;
/**
* Returns the integer value for the logger instance's logging level.
*/
levelVal: number;
/**
* Creates a child logger, setting all key-value pairs in `bindings` as properties in the log lines. All serializers will be applied to the given pair.
* Child loggers use the same output stream as the parent and inherit the current log level of the parent at the time they are spawned.
* From v2.x.x the log level of a child is mutable (whereas in v1.x.x it was immutable), and can be set independently of the parent.
* If a `level` property is present in the object passed to `child` it will override the child logger level.
*
* @param bindings: an object of key-value pairs to include in log lines as properties.
* @param options: an options object that will override child logger inherited options.
* @returns a child logger instance.
*/
child<ChildCustomLevels extends string = never>(bindings: Bindings, options?: ChildLoggerOptions<ChildCustomLevels>): Logger<CustomLevels | ChildCustomLevels>;
/**
* This can be used to modify the callback function on creation of a new child.
*/
onChild: OnChildCallback<CustomLevels>;
/**
* Registers a listener function that is triggered when the level is changed.
* Note: When browserified, this functionality will only be available if the `events` module has been required elsewhere
* (e.g. if you're using streams in the browser). This allows for a trade-off between bundle size and functionality.
*
* @param event: only ever fires the `'level-change'` event
* @param listener: The listener is passed four arguments: `levelLabel`, `levelValue`, `previousLevelLabel`, `previousLevelValue`.
*/
on(event: "level-change", listener: LevelChangeEventListener<CustomLevels, UseOnlyCustomLevels>): this;
addListener(event: "level-change", listener: LevelChangeEventListener<CustomLevels, UseOnlyCustomLevels>): this;
once(event: "level-change", listener: LevelChangeEventListener<CustomLevels, UseOnlyCustomLevels>): this;
prependListener(event: "level-change", listener: LevelChangeEventListener<CustomLevels, UseOnlyCustomLevels>): this;
prependOnceListener(event: "level-change", listener: LevelChangeEventListener<CustomLevels, UseOnlyCustomLevels>): this;
removeListener(event: "level-change", listener: LevelChangeEventListener<CustomLevels, UseOnlyCustomLevels>): this;
/**
* A utility method for determining if a given log level will write to the destination.
*/
isLevelEnabled(level: LevelWithSilentOrString): boolean;
/**
* Returns an object containing all the current bindings, cloned from the ones passed in via logger.child().
*/
bindings(): Bindings;
/**
* Adds to the bindings of this logger instance.
* Note: Does not overwrite bindings. Can potentially result in duplicate keys in log lines.
*
* @param bindings: an object of key-value pairs to include in log lines as properties.
*/
setBindings(bindings: Bindings): void;
/**
* Flushes the content of the buffer when using pino.destination({ sync: false }).
* call the callback when finished
*/
flush(cb?: (err?: Error) => void): void;
}
//// Exported types and interfaces
export interface BaseLogger {
/**
* Set this property to the desired logging level. In order of priority, available levels are:
*
* - 'fatal'
* - 'error'
* - 'warn'
* - 'info'
* - 'debug'
* - 'trace'
*
* The logging level is a __minimum__ level. For instance if `logger.level` is `'info'` then all `'fatal'`, `'error'`, `'warn'`,
* and `'info'` logs will be enabled.
*
* You can pass `'silent'` to disable logging.
*/
level: LevelWithSilentOrString;
/**
* Log at `'fatal'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line.
* If more args follows `msg`, these will be used to format `msg` using `util.format`.
*
* @typeParam T: the interface of the object being serialized. Default is object.
* @param obj: object to be serialized
* @param msg: the log message to write
* @param ...args: format string values when `msg` is a format string
*/
fatal: LogFn;
/**
* Log at `'error'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line.
* If more args follows `msg`, these will be used to format `msg` using `util.format`.
*
* @typeParam T: the interface of the object being serialized. Default is object.
* @param obj: object to be serialized
* @param msg: the log message to write
* @param ...args: format string values when `msg` is a format string
*/
error: LogFn;
/**
* Log at `'warn'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line.
* If more args follows `msg`, these will be used to format `msg` using `util.format`.
*
* @typeParam T: the interface of the object being serialized. Default is object.
* @param obj: object to be serialized
* @param msg: the log message to write
* @param ...args: format string values when `msg` is a format string
*/
warn: LogFn;
/**
* Log at `'info'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line.
* If more args follows `msg`, these will be used to format `msg` using `util.format`.
*
* @typeParam T: the interface of the object being serialized. Default is object.
* @param obj: object to be serialized
* @param msg: the log message to write
* @param ...args: format string values when `msg` is a format string
*/
info: LogFn;
/**
* Log at `'debug'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line.
* If more args follows `msg`, these will be used to format `msg` using `util.format`.
*
* @typeParam T: the interface of the object being serialized. Default is object.
* @param obj: object to be serialized
* @param msg: the log message to write
* @param ...args: format string values when `msg` is a format string
*/
debug: LogFn;
/**
* Log at `'trace'` level the given msg. If the first argument is an object, all its properties will be included in the JSON line.
* If more args follows `msg`, these will be used to format `msg` using `util.format`.
*
* @typeParam T: the interface of the object being serialized. Default is object.
* @param obj: object to be serialized
* @param msg: the log message to write
* @param ...args: format string values when `msg` is a format string
*/
trace: LogFn;
/**
* Noop function.
*/
silent: LogFn;
/**
* Get `msgPrefix` of the logger instance.
*
* See {@link https://github.com/pinojs/pino/blob/main/docs/api.md#msgprefix-string}.
*/
get msgPrefix(): string | undefined;
}
export type Bindings = Record<string, any>;
export type Level = "fatal" | "error" | "warn" | "info" | "debug" | "trace";
export type LevelOrString = Level | (string & {});
export type LevelWithSilent = Level | "silent";
export type LevelWithSilentOrString = LevelWithSilent | (string & {});
export type SerializerFn = (value: any) => any;
export type WriteFn = (o: object) => void;
export type LevelChangeEventListener<CustomLevels extends string = never, UseOnlyCustomLevels extends boolean = boolean> = (
lvl: LevelWithSilentOrString,
val: number,
prevLvl: LevelWithSilentOrString,
prevVal: number,
logger: Logger<CustomLevels, UseOnlyCustomLevels>
) => void;
export type LogDescriptor = Record<string, any>;
export type Logger<CustomLevels extends string = never, UseOnlyCustomLevels extends boolean = boolean> = BaseLogger & LoggerExtras<CustomLevels> & CustomLevelLogger<CustomLevels, UseOnlyCustomLevels>;
export type SerializedError = pinoStdSerializers.SerializedError;
export type SerializedResponse = pinoStdSerializers.SerializedResponse;
export type SerializedRequest = pinoStdSerializers.SerializedRequest;
export interface TransportTargetOptions<TransportOptions = Record<string, any>> {
target: string
options?: TransportOptions
level?: LevelWithSilentOrString
}
export interface TransportBaseOptions<TransportOptions = Record<string, any>> {
options?: TransportOptions
worker?: WorkerOptions & { autoEnd?: boolean}
}
export interface TransportSingleOptions<TransportOptions = Record<string, any>> extends TransportBaseOptions<TransportOptions>{
target: string
}
export interface TransportPipelineOptions<TransportOptions = Record<string, any>> extends TransportBaseOptions<TransportOptions>{
pipeline: TransportSingleOptions<TransportOptions>[]
level?: LevelWithSilentOrString
}
export interface TransportMultiOptions<TransportOptions = Record<string, any>> extends TransportBaseOptions<TransportOptions>{
targets: readonly (TransportTargetOptions<TransportOptions>|TransportPipelineOptions<TransportOptions>)[],
levels?: Record<string, number>
dedupe?: boolean
}
export interface MultiStreamOptions {
levels?: Record<string, number>
dedupe?: boolean
}
export interface DestinationStream {
write(msg: string): void;
}
interface DestinationStreamHasMetadata {
[symbols.needsMetadataGsym]: true;
lastLevel: number;
lastTime: string;
lastMsg: string;
lastObj: object;
lastLogger: Logger;
}
export type DestinationStreamWithMetadata = DestinationStream & ({ [symbols.needsMetadataGsym]?: false } | DestinationStreamHasMetadata);
export interface StreamEntry<TLevel = Level> {
stream: DestinationStream
level?: TLevel
}
export interface MultiStreamRes<TOriginLevel = Level> {
write: (data: any) => void,
add: <TLevel = Level>(dest: StreamEntry<TLevel> | DestinationStream) => MultiStreamRes<TOriginLevel & TLevel>,
flushSync: () => void,
minLevel: number,
streams: StreamEntry<TOriginLevel>[],
clone<TLevel = Level>(level: TLevel): MultiStreamRes<TLevel>,
}
export interface LevelMapping {
/**
* Returns the mappings of level names to their respective internal number representation.
*/
values: { [level: string]: number };
/**
* Returns the mappings of level internal level numbers to their string representations.
*/
labels: { [level: number]: string };
}
type PlaceholderSpecifier = 'd' | 's' | 'j' | 'o' | 'O';
type PlaceholderTypeMapping<T extends PlaceholderSpecifier> = T extends 'd'
? number
: T extends 's'
? unknown
: T extends 'j' | 'o' | 'O'
? object
: never;
type ParseLogFnArgs<
T,
Acc extends unknown[] = [],
> = T extends `${infer _}%${infer Placeholder}${infer Rest}`
? Placeholder extends PlaceholderSpecifier
? ParseLogFnArgs<Rest, [...Acc, PlaceholderTypeMapping<Placeholder>]>
: ParseLogFnArgs<Rest, Acc>
: Acc;
export interface LogFn {
<TMsg extends string = string>(msg: TMsg, ...args: ParseLogFnArgs<TMsg>): void;
<T, TMsg extends string = string>(obj: T, msg?: T extends string ? never : TMsg, ...args: ParseLogFnArgs<TMsg> extends [unknown, ...unknown[]] ? ParseLogFnArgs<TMsg> : unknown[]): void;
}
export interface LoggerOptions<CustomLevels extends string = never, UseOnlyCustomLevels extends boolean = boolean> {
transport?: TransportSingleOptions | TransportMultiOptions | TransportPipelineOptions
/**
* Avoid error causes by circular references in the object tree. Default: `true`.
*/
safe?: boolean;
/**
* The name of the logger. Default: `undefined`.
*/
name?: string;
/**
* an object containing functions for custom serialization of objects.
* These functions should return an JSONifiable object and they should never throw. When logging an object,
* each top-level property matching the exact key of a serializer will be serialized using the defined serializer.
*/
serializers?: { [key: string]: SerializerFn };
/**
* Enables or disables the inclusion of a timestamp in the log message. If a function is supplied, it must
* synchronously return a JSON string representation of the time. If set to `false`, no timestamp will be included in the output.
* See stdTimeFunctions for a set of available functions for passing in as a value for this option.
* Caution: any sort of formatted time will significantly slow down Pino's performance.
*/
timestamp?: TimeFn | boolean;
/**
* One of the supported levels or `silent` to disable logging. Any other value defines a custom level and
* requires supplying a level value via `levelVal`. Default: 'info'.
*/
level?: LevelWithSilentOrString;
/**
* Use this option to define additional logging levels.
* The keys of the object correspond the namespace of the log level, and the values should be the numerical value of the level.
*/
customLevels?: { [level in CustomLevels]: number };
/**
* Use this option to only use defined `customLevels` and omit Pino's levels.
* Logger's default `level` must be changed to a value in `customLevels` in order to use `useOnlyCustomLevels`
* Warning: this option may not be supported by downstream transports.
*/
useOnlyCustomLevels?: UseOnlyCustomLevels;
/**
* Use this option to define custom comparison of log levels.
* Useful to compare custom log levels or non-standard level values.
* Default: "ASC"
*/
levelComparison?: "ASC" | "DESC" | ((current: number, expected: number) => boolean);
/**
* If provided, the `mixin` function is called each time one of the active logging methods
* is called. The function must synchronously return an object. The properties of the
* returned object will be added to the logged JSON.
*/
mixin?: MixinFn<CustomLevels>;
/**
* If provided, the `mixinMergeStrategy` function is called each time one of the active
* logging methods is called. The first parameter is the value `mergeObject` or an empty object,
* the second parameter is the value resulting from `mixin()` or an empty object.
* The function must synchronously return an object.
*/
mixinMergeStrategy?: MixinMergeStrategyFn
/**
* As an array, the redact option specifies paths that should have their values redacted from any log output.
*
* Each path must be a string using a syntax which corresponds to JavaScript dot and bracket notation.
*
* If an object is supplied, three options can be specified:
*
* paths (String[]): Required. An array of paths
* censor (String): Optional. A value to overwrite key which are to be redacted. Default: '[Redacted]'
* remove (Boolean): Optional. Instead of censoring the value, remove both the key and the value. Default: false
*/
redact?: string[] | redactOptions;
/**
* When defining a custom log level via level, set to an integer value to define the new level. Default: `undefined`.
*/
levelVal?: number;
/**
* The string key for the 'message' in the JSON object. Default: "msg".
*/
messageKey?: string;
/**
* The string key for the 'error' in the JSON object. Default: "err".
*/
errorKey?: string;
/**
* The string key to place any logged object under.
*/
nestedKey?: string;
/**
* Enables logging. Default: `true`.
*/
enabled?: boolean;
/**
* Browser only, see http://getpino.io/#/docs/browser.
*/
browser?: {
/**
* The `asObject` option will create a pino-like log object instead of passing all arguments to a console
* method. When `write` is set, `asObject` will always be true.
*
* @example
* pino.info('hi') // creates and logs {msg: 'hi', level: 30, time: <ts>}
*/
asObject?: boolean;
/**
* The `asObjectBindingsOnly` option is similar to `asObject` but will keep the message and arguments
* unformatted. This allows to defer formatting the message to the actual call to `console` methods,
* where browsers then have richer formatting in their devtools than when pino will format the message to
* a string first.
*
* @example
* pino.info('hello %s', 'world') // creates and logs {level: 30, time: <ts>}, 'hello %s', 'world'
*/
asObjectBindingsOnly?: boolean;
formatters?: {
/**
* Changes the shape of the log level.
* The default shape is { level: number }.
*/
level?: (label: string, number: number) => object;
/**
* Changes the shape of the log object.
*/
log?: (object: Record<string, unknown>) => Record<string, unknown>;
}
/**
* Instead of passing log messages to `console.log` they can be passed to a supplied function. If `write` is
* set to a single function, all logging objects are passed to this function. If `write` is an object, it
* can have methods that correspond to the levels. When a message is logged at a given level, the
* corresponding method is called. If a method isn't present, the logging falls back to using the `console`.
*
* @example
* const pino = require('pino')({
* browser: {
* write: (o) => {
* // do something with o
* }
* }
* })
*
* @example
* const pino = require('pino')({
* browser: {
* write: {
* info: function (o) {
* //process info log object
* },
* error: function (o) {
* //process error log object
* }
* }
* }
* })
*/
write?:
| WriteFn
| ({
fatal?: WriteFn;
error?: WriteFn;
warn?: WriteFn;
info?: WriteFn;
debug?: WriteFn;
trace?: WriteFn;
} & { [logLevel: string]: WriteFn });
/**
* The serializers provided to `pino` are ignored by default in the browser, including the standard
* serializers provided with Pino. Since the default destination for log messages is the console, values
* such as `Error` objects are enhanced for inspection, which they otherwise wouldn't be if the Error
* serializer was enabled. We can turn all serializers on or we can selectively enable them via an array.
*
* When `serialize` is `true` the standard error serializer is also enabled (see
* {@link https://github.com/pinojs/pino/blob/master/docs/api.md#pino-stdserializers}). This is a global
* serializer which will apply to any `Error` objects passed to the logger methods.
*
* If `serialize` is an array the standard error serializer is also automatically enabled, it can be
* explicitly disabled by including a string in the serialize array: `!stdSerializers.err` (see example).
*
* The `serialize` array also applies to any child logger serializers (see
* {@link https://github.com/pinojs/pino/blob/master/docs/api.md#bindingsserializers-object} for how to
* set child-bound serializers).
*
* Unlike server pino the serializers apply to every object passed to the logger method, if the `asObject`
* option is `true`, this results in the serializers applying to the first object (as in server pino).
*
* For more info on serializers see
* {@link https://github.com/pinojs/pino/blob/master/docs/api.md#serializers-object}.
*
* @example
* const pino = require('pino')({
* browser: {
* serialize: true
* }
* })
*
* @example
* const pino = require('pino')({
* serializers: {
* custom: myCustomSerializer,
* another: anotherSerializer
* },
* browser: {
* serialize: ['custom']
* }
* })
* // following will apply myCustomSerializer to the custom property,
* // but will not apply anotherSerializer to another key
* pino.info({custom: 'a', another: 'b'})
*
* @example
* const pino = require('pino')({
* serializers: {
* custom: myCustomSerializer,
* another: anotherSerializer
* },
* browser: {
* serialize: ['!stdSerializers.err', 'custom'] //will not serialize Errors, will serialize `custom` keys
* }
* })
*/
serialize?: boolean | string[];
/**
* Options for transmission of logs.
*
* @example
* const pino = require('pino')({
* browser: {
* transmit: {
* level: 'warn',
* send: function (level, logEvent) {
* if (level === 'warn') {
* // maybe send the logEvent to a separate endpoint
* // or maybe analyse the messages further before sending
* }
* // we could also use the `logEvent.level.value` property to determine
* // numerical value
* if (logEvent.level.value >= 50) { // covers error and fatal
*
* // send the logEvent somewhere
* }
* }
* }
* }
* })
*/
transmit?: {
/**
* Specifies the minimum level (inclusive) of when the `send` function should be called, if not supplied
* the `send` function will be called based on the main logging `level` (set via `options.level`,
* defaulting to `info`).
*/
level?: LevelOrString;
/**
* Remotely record log messages.
*
* @description Called after writing the log message.
*/
send: (level: Level, logEvent: LogEvent) => void;
};
/**
* The disabled option will disable logging in browser if set to true, by default it is set to false.
*
* @example
* const pino = require('pino')({browser: {disabled: true}})
*/
disabled?: boolean;
};
/**
* key-value object added as child logger to each log line. If set to null the base child logger is not added
*/
base?: { [key: string]: any } | null;
/**
* An object containing functions for formatting the shape of the log lines.
* These functions should return a JSONifiable object and should never throw.
* These functions allow for full customization of the resulting log lines.
* For example, they can be used to change the level key name or to enrich the default metadata.
*/
formatters?: {
/**
* Changes the shape of the log level.
* The default shape is { level: number }.
* The function takes two arguments, the label of the level (e.g. 'info') and the numeric value (e.g. 30).
*/
level?: (label: string, number: number) => object;
/**
* Changes the shape of the bindings.
* The default shape is { pid, hostname }.
* The function takes a single argument, the bindings object.
* It will be called every time a child logger is created.
*/
bindings?: (bindings: Bindings) => object;
/**
* Changes the shape of the log object.
* This function will be called every time one of the log methods (such as .info) is called.
* All arguments passed to the log method, except the message, will be pass to this function.
* By default it does not change the shape of the log object.
*/
log?: (object: Record<string, unknown>) => Record<string, unknown>;
};
/**
* A string that would be prefixed to every message (and child message)
*/
msgPrefix?: string
/**
* An object mapping to hook functions. Hook functions allow for customizing internal logger operations.
* Hook functions must be synchronous functions.
*/
hooks?: {
/**
* Allows for manipulating the parameters passed to logger methods. The signature for this hook is
* logMethod (args, method, level) {}, where args is an array of the arguments that were passed to the
* log method and method is the log method itself, and level is the log level. This hook must invoke the method function by
* using apply, like so: method.apply(this, newArgumentsArray).
*/
logMethod?: (this: Logger, args: Parameters<LogFn>, method: LogFn, level: number) => void;
/**
* Allows for manipulating the stringified JSON log output just before writing to various transports.
* This function must return a string and must be valid JSON.
*/
streamWrite?: (s: string) => string;
};
/**
* Stringification limit at a specific nesting depth when logging circular object. Default: `5`.
*/
depthLimit?: number
/**
* Stringification limit of properties/elements when logging a specific object/array with circular references. Default: `100`.
*/
edgeLimit?: number
/**
* Optional child creation callback.
*/
onChild?: OnChildCallback<CustomLevels>;
/**
* logs newline delimited JSON with `\r\n` instead of `\n`. Default: `false`.
*/
crlf?: boolean;
}
export interface ChildLoggerOptions<CustomLevels extends string = never> {
level?: LevelOrString;
serializers?: { [key: string]: SerializerFn };
customLevels?: { [level in CustomLevels]: number };
formatters?: {
level?: (label: string, number: number) => object;
bindings?: (bindings: Bindings) => object;
log?: (object: object) => object;
};
redact?: string[] | redactOptions;
msgPrefix?: string
}
/**
* A data structure representing a log message, it represents the arguments passed to a logger statement, the level
* at which they were logged and the hierarchy of child bindings.
*
* @description By default serializers are not applied to log output in the browser, but they will always be applied
* to `messages` and `bindings` in the `logEvent` object. This allows us to ensure a consistent format for all
* values between server and client.
*/
export interface LogEvent {
/**
* Unix epoch timestamp in milliseconds, the time is taken from the moment the logger method is called.
*/
ts: number;
/**
* All arguments passed to logger method, (for instance `logger.info('a', 'b', 'c')` would result in `messages`
* array `['a', 'b', 'c']`).
*/
messages: any[];
/**
* Represents each child logger (if any), and the relevant bindings.
*
* @description For instance, given `logger.child({a: 1}).child({b: 2}).info({c: 3})`, the bindings array would
* hold `[{a: 1}, {b: 2}]` and the `messages` array would be `[{c: 3}]`. The `bindings` are ordered according to
* their position in the child logger hierarchy, with the lowest index being the top of the hierarchy.
*/
bindings: Bindings[];
/**
* Holds the `label` (for instance `info`), and the corresponding numerical `value` (for instance `30`).
* This could be important in cases where client side level values and labels differ from server side.
*/
level: {
label: string;
value: number;
};
}
//// Top level variable (const) exports
/**
* Provides functions for serializing objects common to many projects.
*/
export const stdSerializers: typeof pinoStdSerializers;
/**
* Holds the current log format version (as output in the v property of each log record).
*/
export const levels: LevelMapping;
export const symbols: {
readonly setLevelSym: unique symbol;
readonly getLevelSym: unique symbol;
readonly levelValSym: unique symbol;
readonly useLevelLabelsSym: unique symbol;
readonly mixinSym: unique symbol;
readonly lsCacheSym: unique symbol;
readonly chindingsSym: unique symbol;
readonly parsedChindingsSym: unique symbol;
readonly asJsonSym: unique symbol;
readonly writeSym: unique symbol;
readonly serializersSym: unique symbol;
readonly redactFmtSym: unique symbol;
readonly timeSym: unique symbol;
readonly timeSliceIndexSym: unique symbol;
readonly streamSym: unique symbol;
readonly stringifySym: unique symbol;
readonly stringifySafeSym: unique symbol;
readonly stringifiersSym: unique symbol;
readonly endSym: unique symbol;
readonly formatOptsSym: unique symbol;
readonly messageKeySym: unique symbol;
readonly errorKeySym: unique symbol;
readonly nestedKeySym: unique symbol;
readonly wildcardFirstSym: unique symbol;
readonly needsMetadataGsym: unique symbol;
readonly useOnlyCustomLevelsSym: unique symbol;
readonly formattersSym: unique symbol;
readonly hooksSym: unique symbol;
};
/**
* Exposes the Pino package version. Also available on the logger instance.
*/
export const version: string;
/**
* Provides functions for generating the timestamp property in the log output. You can set the `timestamp` option during
* initialization to one of these functions to adjust the output format. Alternatively, you can specify your own time function.
* A time function must synchronously return a string that would be a valid component of a JSON string. For example,
* the default function returns a string like `,"time":1493426328206`.
*/
export const stdTimeFunctions: {
/**
* The default time function for Pino. Returns a string like `,"time":1493426328206`.
*/
epochTime: TimeFn;
/*
* Returns the seconds since Unix epoch
*/
unixTime: TimeFn;
/**
* Returns an empty string. This function is used when the `timestamp` option is set to `false`.
*/
nullTime: TimeFn;
/*
* Returns ISO 8601-formatted time in UTC
*/
isoTime: TimeFn;
};
//// Exported functions
/**
* Create a Pino Destination instance: a stream-like object with significantly more throughput (over 30%) than a standard Node.js stream.
* @param [dest]: The `destination` parameter, can be a file descriptor, a file path, or an object with `dest` property pointing to a fd or path.
* An ordinary Node.js `stream` file descriptor can be passed as the destination (such as the result of `fs.createWriteStream`)
* but for peak log writing performance, it is strongly recommended to use `pino.destination` to create the destination stream.
* @returns A Sonic-Boom stream to be used as destination for the pino function
*/
export function destination(
dest?: number | object | string | DestinationStream | NodeJS.WritableStream | SonicBoomOpts,
): SonicBoom;
export function transport<TransportOptions = Record<string, any>>(
options: TransportSingleOptions<TransportOptions> | TransportMultiOptions<TransportOptions> | TransportPipelineOptions<TransportOptions>
): ThreadStream
export function multistream<TLevel = Level>(
streamsArray: (DestinationStream | StreamEntry<TLevel>)[] | DestinationStream | StreamEntry<TLevel>,
opts?: MultiStreamOptions
): MultiStreamRes<TLevel>
//// Nested version of default export for TypeScript/Babel compatibility
/**
* @param [optionsOrStream]: an options object or a writable stream where the logs will be written. It can also receive some log-line metadata, if the
* relative protocol is enabled. Default: process.stdout
* @returns a new logger instance.
*/
function pino<CustomLevels extends string = never, UseOnlyCustomLevels extends boolean = boolean>(optionsOrStream?: LoggerOptions<CustomLevels, UseOnlyCustomLevels> | DestinationStream): Logger<CustomLevels, UseOnlyCustomLevels>;
/**
* @param [options]: an options object
* @param [stream]: a writable stream where the logs will be written. It can also receive some log-line metadata, if the
* relative protocol is enabled. Default: process.stdout
* @returns a new logger instance.
*/
function pino<CustomLevels extends string = never, UseOnlyCustomLevels extends boolean = boolean>(options: LoggerOptions<CustomLevels, UseOnlyCustomLevels>, stream?: DestinationStream | undefined): Logger<CustomLevels, UseOnlyCustomLevels>;
}
//// Callable default export
/**
* @param [optionsOrStream]: an options object or a writable stream where the logs will be written. It can also receive some log-line metadata, if the
* relative protocol is enabled. Default: process.stdout
* @returns a new logger instance.
*/
declare function pino<CustomLevels extends string = never, UseOnlyCustomLevels extends boolean = boolean>(optionsOrStream?: pino.LoggerOptions<CustomLevels, UseOnlyCustomLevels> | pino.DestinationStream): pino.Logger<CustomLevels, UseOnlyCustomLevels>;
/**
* @param [options]: an options object
* @param [stream]: a writable stream where the logs will be written. It can also receive some log-line metadata, if the
* relative protocol is enabled. Default: process.stdout
* @returns a new logger instance.
*/
declare function pino<CustomLevels extends string = never, UseOnlyCustomLevels extends boolean = boolean>(options: pino.LoggerOptions<CustomLevels, UseOnlyCustomLevels>, stream?: pino.DestinationStream | undefined): pino.Logger<CustomLevels, UseOnlyCustomLevels>;
export = pino;

234
node_modules/pino/pino.js generated vendored Normal file
View File

@@ -0,0 +1,234 @@
'use strict'
const os = require('node:os')
const stdSerializers = require('pino-std-serializers')
const caller = require('./lib/caller')
const redaction = require('./lib/redaction')
const time = require('./lib/time')
const proto = require('./lib/proto')
const symbols = require('./lib/symbols')
const { configure } = require('safe-stable-stringify')
const { assertDefaultLevelFound, mappings, genLsCache, genLevelComparison, assertLevelComparison } = require('./lib/levels')
const { DEFAULT_LEVELS, SORTING_ORDER } = require('./lib/constants')
const {
createArgsNormalizer,
asChindings,
buildSafeSonicBoom,
buildFormatters,
stringify,
normalizeDestFileDescriptor,
noop
} = require('./lib/tools')
const { version } = require('./lib/meta')
const {
chindingsSym,
redactFmtSym,
serializersSym,
timeSym,
timeSliceIndexSym,
streamSym,
stringifySym,
stringifySafeSym,
stringifiersSym,
setLevelSym,
endSym,
formatOptsSym,
messageKeySym,
errorKeySym,
nestedKeySym,
mixinSym,
levelCompSym,
useOnlyCustomLevelsSym,
formattersSym,
hooksSym,
nestedKeyStrSym,
mixinMergeStrategySym,
msgPrefixSym
} = symbols
const { epochTime, nullTime } = time
const { pid } = process
const hostname = os.hostname()
const defaultErrorSerializer = stdSerializers.err
const defaultOptions = {
level: 'info',
levelComparison: SORTING_ORDER.ASC,
levels: DEFAULT_LEVELS,
messageKey: 'msg',
errorKey: 'err',
nestedKey: null,
enabled: true,
base: { pid, hostname },
serializers: Object.assign(Object.create(null), {
err: defaultErrorSerializer
}),
formatters: Object.assign(Object.create(null), {
bindings (bindings) {
return bindings
},
level (label, number) {
return { level: number }
}
}),
hooks: {
logMethod: undefined,
streamWrite: undefined
},
timestamp: epochTime,
name: undefined,
redact: null,
customLevels: null,
useOnlyCustomLevels: false,
depthLimit: 5,
edgeLimit: 100
}
const normalize = createArgsNormalizer(defaultOptions)
const serializers = Object.assign(Object.create(null), stdSerializers)
function pino (...args) {
const instance = {}
const { opts, stream } = normalize(instance, caller(), ...args)
if (opts.level && typeof opts.level === 'string' && DEFAULT_LEVELS[opts.level.toLowerCase()] !== undefined) opts.level = opts.level.toLowerCase()
const {
redact,
crlf,
serializers,
timestamp,
messageKey,
errorKey,
nestedKey,
base,
name,
level,
customLevels,
levelComparison,
mixin,
mixinMergeStrategy,
useOnlyCustomLevels,
formatters,
hooks,
depthLimit,
edgeLimit,
onChild,
msgPrefix
} = opts
const stringifySafe = configure({
maximumDepth: depthLimit,
maximumBreadth: edgeLimit
})
const allFormatters = buildFormatters(
formatters.level,
formatters.bindings,
formatters.log
)
const stringifyFn = stringify.bind({
[stringifySafeSym]: stringifySafe
})
const stringifiers = redact ? redaction(redact, stringifyFn) : {}
const formatOpts = redact
? { stringify: stringifiers[redactFmtSym] }
: { stringify: stringifyFn }
const end = '}' + (crlf ? '\r\n' : '\n')
const coreChindings = asChindings.bind(null, {
[chindingsSym]: '',
[serializersSym]: serializers,
[stringifiersSym]: stringifiers,
[stringifySym]: stringify,
[stringifySafeSym]: stringifySafe,
[formattersSym]: allFormatters
})
let chindings = ''
if (base !== null) {
if (name === undefined) {
chindings = coreChindings(base)
} else {
chindings = coreChindings(Object.assign({}, base, { name }))
}
}
const time = (timestamp instanceof Function)
? timestamp
: (timestamp ? epochTime : nullTime)
const timeSliceIndex = time().indexOf(':') + 1
if (useOnlyCustomLevels && !customLevels) throw Error('customLevels is required if useOnlyCustomLevels is set true')
if (mixin && typeof mixin !== 'function') throw Error(`Unknown mixin type "${typeof mixin}" - expected "function"`)
if (msgPrefix && typeof msgPrefix !== 'string') throw Error(`Unknown msgPrefix type "${typeof msgPrefix}" - expected "string"`)
assertDefaultLevelFound(level, customLevels, useOnlyCustomLevels)
const levels = mappings(customLevels, useOnlyCustomLevels)
if (typeof stream.emit === 'function') {
stream.emit('message', { code: 'PINO_CONFIG', config: { levels, messageKey, errorKey } })
}
assertLevelComparison(levelComparison)
const levelCompFunc = genLevelComparison(levelComparison)
Object.assign(instance, {
levels,
[levelCompSym]: levelCompFunc,
[useOnlyCustomLevelsSym]: useOnlyCustomLevels,
[streamSym]: stream,
[timeSym]: time,
[timeSliceIndexSym]: timeSliceIndex,
[stringifySym]: stringify,
[stringifySafeSym]: stringifySafe,
[stringifiersSym]: stringifiers,
[endSym]: end,
[formatOptsSym]: formatOpts,
[messageKeySym]: messageKey,
[errorKeySym]: errorKey,
[nestedKeySym]: nestedKey,
// protect against injection
[nestedKeyStrSym]: nestedKey ? `,${JSON.stringify(nestedKey)}:{` : '',
[serializersSym]: serializers,
[mixinSym]: mixin,
[mixinMergeStrategySym]: mixinMergeStrategy,
[chindingsSym]: chindings,
[formattersSym]: allFormatters,
[hooksSym]: hooks,
silent: noop,
onChild,
[msgPrefixSym]: msgPrefix
})
Object.setPrototypeOf(instance, proto())
genLsCache(instance)
instance[setLevelSym](level)
return instance
}
module.exports = pino
module.exports.destination = (dest = process.stdout.fd) => {
if (typeof dest === 'object') {
dest.dest = normalizeDestFileDescriptor(dest.dest || process.stdout.fd)
return buildSafeSonicBoom(dest)
} else {
return buildSafeSonicBoom({ dest: normalizeDestFileDescriptor(dest), minLength: 0 })
}
}
module.exports.transport = require('./lib/transport')
module.exports.multistream = require('./lib/multistream')
module.exports.levels = mappings()
module.exports.stdSerializers = serializers
module.exports.stdTimeFunctions = Object.assign({}, time)
module.exports.symbols = symbols
module.exports.version = version
// Enables default and name export with TypeScript and Babel
module.exports.default = pino
module.exports.pino = pino

BIN
node_modules/pino/pretty-demo.png generated vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

876
node_modules/pino/test/basic.test.js generated vendored Normal file
View File

@@ -0,0 +1,876 @@
'use strict'
const os = require('node:os')
const { readFileSync } = require('node:fs')
const { test } = require('tap')
const { sink, check, once, watchFileCreated, file } = require('./helper')
const pino = require('../')
const { version } = require('../package.json')
const { pid } = process
const hostname = os.hostname()
test('pino version is exposed on export', async ({ equal }) => {
equal(pino.version, version)
})
test('pino version is exposed on instance', async ({ equal }) => {
const instance = pino()
equal(instance.version, version)
})
test('child instance exposes pino version', async ({ equal }) => {
const child = pino().child({ foo: 'bar' })
equal(child.version, version)
})
test('bindings are exposed on every instance', async ({ same }) => {
const instance = pino()
same(instance.bindings(), {})
})
test('bindings contain the name and the child bindings', async ({ same }) => {
const instance = pino({ name: 'basicTest', level: 'info' }).child({ foo: 'bar' }).child({ a: 2 })
same(instance.bindings(), { name: 'basicTest', foo: 'bar', a: 2 })
})
test('set bindings on instance', async ({ same }) => {
const instance = pino({ name: 'basicTest', level: 'info' })
instance.setBindings({ foo: 'bar' })
same(instance.bindings(), { name: 'basicTest', foo: 'bar' })
})
test('newly set bindings overwrite old bindings', async ({ same }) => {
const instance = pino({ name: 'basicTest', level: 'info', base: { foo: 'bar' } })
instance.setBindings({ foo: 'baz' })
same(instance.bindings(), { name: 'basicTest', foo: 'baz' })
})
test('set bindings on child instance', async ({ same }) => {
const child = pino({ name: 'basicTest', level: 'info' }).child({})
child.setBindings({ foo: 'bar' })
same(child.bindings(), { name: 'basicTest', foo: 'bar' })
})
test('child should have bindings set by parent', async ({ same }) => {
const instance = pino({ name: 'basicTest', level: 'info' })
instance.setBindings({ foo: 'bar' })
const child = instance.child({})
same(child.bindings(), { name: 'basicTest', foo: 'bar' })
})
test('child should not share bindings of parent set after child creation', async ({ same }) => {
const instance = pino({ name: 'basicTest', level: 'info' })
const child = instance.child({})
instance.setBindings({ foo: 'bar' })
same(instance.bindings(), { name: 'basicTest', foo: 'bar' })
same(child.bindings(), { name: 'basicTest' })
})
function levelTest (name, level) {
test(`${name} logs as ${level}`, async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
instance.level = name
instance[name]('hello world')
check(equal, await once(stream, 'data'), level, 'hello world')
})
test(`passing objects at level ${name}`, async ({ equal, same }) => {
const stream = sink()
const instance = pino(stream)
instance.level = name
const obj = { hello: 'world' }
instance[name](obj)
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
equal(result.pid, pid)
equal(result.hostname, hostname)
equal(result.level, level)
equal(result.hello, 'world')
same(Object.keys(obj), ['hello'])
})
test(`passing an object and a string at level ${name}`, async ({ equal, same }) => {
const stream = sink()
const instance = pino(stream)
instance.level = name
const obj = { hello: 'world' }
instance[name](obj, 'a string')
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
msg: 'a string',
hello: 'world'
})
same(Object.keys(obj), ['hello'])
})
test(`passing a undefined and a string at level ${name}`, async ({ equal, same }) => {
const stream = sink()
const instance = pino(stream)
instance.level = name
instance[name](undefined, 'a string')
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
msg: 'a string'
})
})
test(`overriding object key by string at level ${name}`, async ({ equal, same }) => {
const stream = sink()
const instance = pino(stream)
instance.level = name
instance[name]({ hello: 'world', msg: 'object' }, 'string')
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
msg: 'string',
hello: 'world'
})
})
test(`formatting logs as ${name}`, async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
instance.level = name
instance[name]('hello %d', 42)
const result = await once(stream, 'data')
check(equal, result, level, 'hello 42')
})
test(`formatting a symbol at level ${name}`, async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
instance.level = name
const sym = Symbol('foo')
instance[name]('hello %s', sym)
const result = await once(stream, 'data')
check(equal, result, level, 'hello Symbol(foo)')
})
test(`passing error with a serializer at level ${name}`, async ({ equal, same }) => {
const stream = sink()
const err = new Error('myerror')
const instance = pino({
serializers: {
err: pino.stdSerializers.err
}
}, stream)
instance.level = name
instance[name]({ err })
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
err: {
type: 'Error',
message: err.message,
stack: err.stack
},
msg: err.message
})
})
test(`child logger for level ${name}`, async ({ equal, same }) => {
const stream = sink()
const instance = pino(stream)
instance.level = name
const child = instance.child({ hello: 'world' })
child[name]('hello world')
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
msg: 'hello world',
hello: 'world'
})
})
}
levelTest('fatal', 60)
levelTest('error', 50)
levelTest('warn', 40)
levelTest('info', 30)
levelTest('debug', 20)
levelTest('trace', 10)
test('serializers can return undefined to strip field', async ({ equal }) => {
const stream = sink()
const instance = pino({
serializers: {
test () { return undefined }
}
}, stream)
instance.info({ test: 'sensitive info' })
const result = await once(stream, 'data')
equal('test' in result, false)
})
test('streams receive a message event with PINO_CONFIG', ({ match, end }) => {
const stream = sink()
stream.once('message', (message) => {
match(message, {
code: 'PINO_CONFIG',
config: {
errorKey: 'err',
levels: {
labels: {
10: 'trace',
20: 'debug',
30: 'info',
40: 'warn',
50: 'error',
60: 'fatal'
},
values: {
debug: 20,
error: 50,
fatal: 60,
info: 30,
trace: 10,
warn: 40
}
},
messageKey: 'msg'
}
})
end()
})
pino(stream)
})
test('does not explode with a circular ref', async ({ doesNotThrow }) => {
const stream = sink()
const instance = pino(stream)
const b = {}
const a = {
hello: b
}
b.a = a // circular ref
doesNotThrow(() => instance.info(a))
})
test('set the name', async ({ equal, same }) => {
const stream = sink()
const instance = pino({
name: 'hello'
}, stream)
instance.fatal('this is fatal')
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level: 60,
name: 'hello',
msg: 'this is fatal'
})
})
test('set the messageKey', async ({ equal, same }) => {
const stream = sink()
const message = 'hello world'
const messageKey = 'fooMessage'
const instance = pino({
messageKey
}, stream)
instance.info(message)
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level: 30,
fooMessage: message
})
})
test('set the nestedKey', async ({ equal, same }) => {
const stream = sink()
const object = { hello: 'world' }
const nestedKey = 'stuff'
const instance = pino({
nestedKey
}, stream)
instance.info(object)
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level: 30,
stuff: object
})
})
test('set undefined properties', async ({ equal, same }) => {
const stream = sink()
const instance = pino(stream)
instance.info({ hello: 'world', property: undefined })
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level: 30,
hello: 'world'
})
})
test('prototype properties are not logged', async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
instance.info(Object.create({ hello: 'world' }))
const { hello } = await once(stream, 'data')
equal(hello, undefined)
})
test('set the base', async ({ equal, same }) => {
const stream = sink()
const instance = pino({
base: {
a: 'b'
}
}, stream)
instance.fatal('this is fatal')
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
a: 'b',
level: 60,
msg: 'this is fatal'
})
})
test('set the base to null', async ({ equal, same }) => {
const stream = sink()
const instance = pino({
base: null
}, stream)
instance.fatal('this is fatal')
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
level: 60,
msg: 'this is fatal'
})
})
test('set the base to null and use a formatter', async ({ equal, same }) => {
const stream = sink()
const instance = pino({
base: null,
formatters: {
log (input) {
return Object.assign({}, input, { additionalMessage: 'using pino' })
}
}
}, stream)
instance.fatal('this is fatal too')
const result = await once(stream, 'data')
equal(new Date(result.time) <= new Date(), true, 'time is greater than Date.now()')
delete result.time
same(result, {
level: 60,
msg: 'this is fatal too',
additionalMessage: 'using pino'
})
})
test('throw if creating child without bindings', async ({ equal, fail }) => {
const stream = sink()
const instance = pino(stream)
try {
instance.child()
fail('it should throw')
} catch (err) {
equal(err.message, 'missing bindings for child Pino')
}
})
test('correctly escapes msg strings with stray double quote at end', async ({ same }) => {
const stream = sink()
const instance = pino({
name: 'hello'
}, stream)
instance.fatal('this contains "')
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
name: 'hello',
msg: 'this contains "'
})
})
test('correctly escape msg strings with unclosed double quote', async ({ same }) => {
const stream = sink()
const instance = pino({
name: 'hello'
}, stream)
instance.fatal('" this contains')
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
name: 'hello',
msg: '" this contains'
})
})
test('correctly escape quote in a key', async ({ same }) => {
const stream = sink()
const instance = pino(stream)
const obj = { 'some"obj': 'world' }
instance.info(obj, 'a string')
const result = await once(stream, 'data')
delete result.time
same(result, {
level: 30,
pid,
hostname,
msg: 'a string',
'some"obj': 'world'
})
same(Object.keys(obj), ['some"obj'])
})
// https://github.com/pinojs/pino/issues/139
test('object and format string', async ({ same }) => {
const stream = sink()
const instance = pino(stream)
instance.info({}, 'foo %s', 'bar')
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'foo bar'
})
})
test('object and format string property', async ({ same }) => {
const stream = sink()
const instance = pino(stream)
instance.info({ answer: 42 }, 'foo %s', 'bar')
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'foo bar',
answer: 42
})
})
test('correctly strip undefined when returned from toJSON', async ({ equal }) => {
const stream = sink()
const instance = pino({
test: 'this'
}, stream)
instance.fatal({ test: { toJSON () { return undefined } } })
const result = await once(stream, 'data')
equal('test' in result, false)
})
test('correctly supports stderr', async ({ same }) => {
// stderr inherits from Stream, rather than Writable
const dest = {
writable: true,
write (result) {
result = JSON.parse(result)
delete result.time
same(result, {
pid,
hostname,
level: 60,
msg: 'a message'
})
}
}
const instance = pino(dest)
instance.fatal('a message')
})
test('normalize number to string', async ({ same }) => {
const stream = sink()
const instance = pino(stream)
instance.info(1)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: '1'
})
})
test('normalize number to string with an object', async ({ same }) => {
const stream = sink()
const instance = pino(stream)
instance.info({ answer: 42 }, 1)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: '1',
answer: 42
})
})
test('handles objects with null prototype', async ({ same }) => {
const stream = sink()
const instance = pino(stream)
const o = Object.create(null)
o.test = 'test'
instance.info(o)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 30,
test: 'test'
})
})
test('pino.destination', async ({ same }) => {
const tmp = file()
const instance = pino(pino.destination(tmp))
instance.info('hello')
await watchFileCreated(tmp)
const result = JSON.parse(readFileSync(tmp).toString())
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test('auto pino.destination with a string', async ({ same }) => {
const tmp = file()
const instance = pino(tmp)
instance.info('hello')
await watchFileCreated(tmp)
const result = JSON.parse(readFileSync(tmp).toString())
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test('auto pino.destination with a string as second argument', async ({ same }) => {
const tmp = file()
const instance = pino(null, tmp)
instance.info('hello')
await watchFileCreated(tmp)
const result = JSON.parse(readFileSync(tmp).toString())
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test('does not override opts with a string as second argument', async ({ same }) => {
const tmp = file()
const instance = pino({
timestamp: () => ',"time":"none"'
}, tmp)
instance.info('hello')
await watchFileCreated(tmp)
const result = JSON.parse(readFileSync(tmp).toString())
same(result, {
pid,
hostname,
level: 30,
time: 'none',
msg: 'hello'
})
})
// https://github.com/pinojs/pino/issues/222
test('children with same names render in correct order', async ({ equal }) => {
const stream = sink()
const root = pino(stream)
root.child({ a: 1 }).child({ a: 2 }).info({ a: 3 })
const { a } = await once(stream, 'data')
equal(a, 3, 'last logged object takes precedence')
})
test('use `safe-stable-stringify` to avoid circular dependencies', async ({ same }) => {
const stream = sink()
const root = pino(stream)
// circular depth
const obj = {}
obj.a = obj
root.info(obj)
const { a } = await once(stream, 'data')
same(a, { a: '[Circular]' })
})
test('correctly log non circular objects', async ({ same }) => {
const stream = sink()
const root = pino(stream)
const obj = {}
let parent = obj
for (let i = 0; i < 10; i++) {
parent.node = {}
parent = parent.node
}
root.info(obj)
const { node } = await once(stream, 'data')
same(node, { node: { node: { node: { node: { node: { node: { node: { node: { node: {} } } } } } } } } })
})
test('safe-stable-stringify must be used when interpolating', async (t) => {
const stream = sink()
const instance = pino(stream)
const o = { a: { b: {} } }
o.a.b.c = o.a.b
instance.info('test %j', o)
const { msg } = await once(stream, 'data')
t.equal(msg, 'test {"a":{"b":{"c":"[Circular]"}}}')
})
test('throws when setting useOnlyCustomLevels without customLevels', async ({ throws }) => {
throws(() => {
pino({
useOnlyCustomLevels: true
})
}, 'customLevels is required if useOnlyCustomLevels is set true')
})
test('correctly log Infinity', async (t) => {
const stream = sink()
const instance = pino(stream)
const o = { num: Infinity }
instance.info(o)
const { num } = await once(stream, 'data')
t.equal(num, null)
})
test('correctly log -Infinity', async (t) => {
const stream = sink()
const instance = pino(stream)
const o = { num: -Infinity }
instance.info(o)
const { num } = await once(stream, 'data')
t.equal(num, null)
})
test('correctly log NaN', async (t) => {
const stream = sink()
const instance = pino(stream)
const o = { num: NaN }
instance.info(o)
const { num } = await once(stream, 'data')
t.equal(num, null)
})
test('offers a .default() method to please typescript', async ({ equal }) => {
equal(pino.default, pino)
const stream = sink()
const instance = pino.default(stream)
instance.info('hello world')
check(equal, await once(stream, 'data'), 30, 'hello world')
})
test('correctly skip function', async (t) => {
const stream = sink()
const instance = pino(stream)
const o = { num: NaN }
instance.info(o, () => {})
const { msg } = await once(stream, 'data')
t.equal(msg, undefined)
})
test('correctly skip Infinity', async (t) => {
const stream = sink()
const instance = pino(stream)
const o = { num: NaN }
instance.info(o, Infinity)
const { msg } = await once(stream, 'data')
t.equal(msg, null)
})
test('correctly log number', async (t) => {
const stream = sink()
const instance = pino(stream)
const o = { num: NaN }
instance.info(o, 42)
const { msg } = await once(stream, 'data')
t.equal(msg, 42)
})
test('nestedKey should not be used for non-objects', async ({ strictSame }) => {
const stream = sink()
const message = 'hello'
const nestedKey = 'stuff'
const instance = pino({
nestedKey
}, stream)
instance.info(message)
const result = await once(stream, 'data')
delete result.time
strictSame(result, {
pid,
hostname,
level: 30,
msg: message
})
})
test('throws if prettyPrint is passed in as an option', async (t) => {
t.throws(() => {
pino({
prettyPrint: true
})
}, new Error('prettyPrint option is no longer supported, see the pino-pretty package (https://github.com/pinojs/pino-pretty)'))
})
test('Should invoke `onChild` with the newly created child', async ({ equal }) => {
let innerChild
const child = pino({
onChild: (instance) => {
innerChild = instance
}
}).child({ foo: 'bar' })
equal(child, innerChild)
})
test('logger message should have the prefix message that defined in the logger creation', async ({ equal }) => {
const stream = sink()
const logger = pino({
msgPrefix: 'My name is Bond '
}, stream)
equal(logger.msgPrefix, 'My name is Bond ')
logger.info('James Bond')
const { msg } = await once(stream, 'data')
equal(msg, 'My name is Bond James Bond')
})
test('child message should have the prefix message that defined in the child creation', async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
const child = instance.child({}, { msgPrefix: 'My name is Bond ' })
child.info('James Bond')
const { msg } = await once(stream, 'data')
equal(msg, 'My name is Bond James Bond')
})
test('child message should have the prefix message that defined in the child creation when logging with log meta', async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
const child = instance.child({}, { msgPrefix: 'My name is Bond ' })
child.info({ hello: 'world' }, 'James Bond')
const { msg, hello } = await once(stream, 'data')
equal(hello, 'world')
equal(msg, 'My name is Bond James Bond')
})
test('logged message should not have the prefix when not providing any message', async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
const child = instance.child({}, { msgPrefix: 'This should not be shown ' })
child.info({ hello: 'world' })
const { msg, hello } = await once(stream, 'data')
equal(hello, 'world')
equal(msg, undefined)
})
test('child message should append parent prefix to current prefix that defined in the child creation', async ({ equal }) => {
const stream = sink()
const instance = pino({
msgPrefix: 'My name is Bond '
}, stream)
const child = instance.child({}, { msgPrefix: 'James ' })
child.info('Bond')
equal(child.msgPrefix, 'My name is Bond James ')
const { msg } = await once(stream, 'data')
equal(msg, 'My name is Bond James Bond')
})
test('child message should inherent parent prefix', async ({ equal }) => {
const stream = sink()
const instance = pino({
msgPrefix: 'My name is Bond '
}, stream)
const child = instance.child({})
child.info('James Bond')
const { msg } = await once(stream, 'data')
equal(msg, 'My name is Bond James Bond')
})
test('grandchild message should inherent parent prefix', async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
const child = instance.child({}, { msgPrefix: 'My name is Bond ' })
const grandchild = child.child({})
grandchild.info('James Bond')
const { msg } = await once(stream, 'data')
equal(msg, 'My name is Bond James Bond')
})

57
node_modules/pino/test/broken-pipe.test.js generated vendored Normal file
View File

@@ -0,0 +1,57 @@
'use strict'
const t = require('tap')
const { join } = require('node:path')
const { fork } = require('node:child_process')
const { once } = require('./helper')
const pino = require('..')
if (process.platform === 'win32') {
t.skip('skipping on windows')
process.exit(0)
}
if (process.env.CITGM) {
// This looks like a some form of limitations of the CITGM test runner
// or the HW/SW we run it on. This file can hang on Node.js v18.x.
// The failure does not reproduce locally or on our CI.
// Skipping it is the only way to keep pino in CITGM.
// https://github.com/nodejs/citgm/pull/1002#issuecomment-1751942988
t.skip('Skipping on Node.js core CITGM because it hangs on v18.x')
process.exit(0)
}
function test (file) {
file = join('fixtures', 'broken-pipe', file)
t.test(file, { parallel: true }, async ({ equal }) => {
const child = fork(join(__dirname, file), { silent: true })
child.stdout.destroy()
child.stderr.pipe(process.stdout)
const res = await once(child, 'close')
equal(res, 0) // process exits successfully
})
}
t.jobs = 42
test('basic.js')
test('destination.js')
test('syncfalse.js')
t.test('let error pass through', ({ equal, plan }) => {
plan(3)
const stream = pino.destination({ sync: true })
// side effect of the pino constructor is that it will set an
// event handler for error
pino(stream)
process.nextTick(() => stream.emit('error', new Error('kaboom')))
process.nextTick(() => stream.emit('error', new Error('kaboom')))
stream.on('error', (err) => {
equal(err.message, 'kaboom')
})
})

132
node_modules/pino/test/browser-child.test.js generated vendored Normal file
View File

@@ -0,0 +1,132 @@
'use strict'
const test = require('tape')
const pino = require('../browser')
test('child has parent level', ({ end, same, is }) => {
const instance = pino({
level: 'error',
browser: {}
})
const child = instance.child({})
same(child.level, instance.level)
end()
})
test('child can set level at creation time', ({ end, same, is }) => {
const instance = pino({
level: 'error',
browser: {}
})
const child = instance.child({}, { level: 'info' }) // first bindings, then options
same(child.level, 'info')
end()
})
test('changing child level does not affect parent', ({ end, same, is }) => {
const instance = pino({
level: 'error',
browser: {}
})
const child = instance.child({})
child.level = 'info'
same(instance.level, 'error')
end()
})
test('child should log, if its own level allows it', ({ end, same, is }) => {
const expected = [
{
level: 30,
msg: 'this is info'
},
{
level: 40,
msg: 'this is warn'
},
{
level: 50,
msg: 'this is an error'
}
]
const instance = pino({
level: 'error',
browser: {
write (actual) {
checkLogObjects(is, same, actual, expected.shift())
}
}
})
const child = instance.child({})
child.level = 'info'
child.debug('this is debug')
child.info('this is info')
child.warn('this is warn')
child.error('this is an error')
same(expected.length, 0, 'not all messages were read')
end()
})
test('changing child log level should not affect parent log behavior', ({ end, same, is }) => {
const expected = [
{
level: 50,
msg: 'this is an error'
},
{
level: 60,
msg: 'this is fatal'
}
]
const instance = pino({
level: 'error',
browser: {
write (actual) {
checkLogObjects(is, same, actual, expected.shift())
}
}
})
const child = instance.child({})
child.level = 'info'
instance.warn('this is warn')
instance.error('this is an error')
instance.fatal('this is fatal')
same(expected.length, 0, 'not all messages were read')
end()
})
test('onChild callback should be called when new child is created', ({ end, pass, plan }) => {
plan(1)
const instance = pino({
level: 'error',
browser: {},
onChild: (_child) => {
pass('onChild callback was called')
end()
}
})
instance.child({})
})
function checkLogObjects (is, same, actual, expected) {
is(actual.time <= Date.now(), true, 'time is greater than Date.now()')
const actualCopy = Object.assign({}, actual)
const expectedCopy = Object.assign({}, expected)
delete actualCopy.time
delete expectedCopy.time
same(actualCopy, expectedCopy)
}

87
node_modules/pino/test/browser-disabled.test.js generated vendored Normal file
View File

@@ -0,0 +1,87 @@
'use strict'
const test = require('tape')
const pino = require('../browser')
test('set browser opts disabled to true', ({ end, same }) => {
const instance = pino({
browser: {
disabled: true,
write (actual) {
checkLogObjects(same, actual, [])
}
}
})
instance.info('hello world')
instance.error('this is an error')
instance.fatal('this is fatal')
end()
})
test('set browser opts disabled to false', ({ end, same }) => {
const expected = [
{
level: 30,
msg: 'hello world'
},
{
level: 50,
msg: 'this is an error'
},
{
level: 60,
msg: 'this is fatal'
}
]
const instance = pino({
browser: {
disabled: false,
write (actual) {
checkLogObjects(same, actual, expected.shift())
}
}
})
instance.info('hello world')
instance.error('this is an error')
instance.fatal('this is fatal')
end()
})
test('disabled is not set in browser opts', ({ end, same }) => {
const expected = [
{
level: 30,
msg: 'hello world'
},
{
level: 50,
msg: 'this is an error'
},
{
level: 60,
msg: 'this is fatal'
}
]
const instance = pino({
browser: {
write (actual) {
checkLogObjects(same, actual, expected.shift())
}
}
})
instance.info('hello world')
instance.error('this is an error')
instance.fatal('this is fatal')
end()
})
function checkLogObjects (same, actual, expected, is) {
const actualCopy = Object.assign({}, actual)
const expectedCopy = Object.assign({}, expected)
delete actualCopy.time
delete expectedCopy.time
same(actualCopy, expectedCopy)
}

View File

@@ -0,0 +1,12 @@
'use strict'
Object.freeze(console)
const test = require('tape')
const pino = require('../browser')
test('silent level', ({ end, fail, pass }) => {
pino({
level: 'silent',
browser: { }
})
end()
})

104
node_modules/pino/test/browser-is-level-enabled.test.js generated vendored Normal file
View File

@@ -0,0 +1,104 @@
'use strict'
const { test } = require('tap')
const pino = require('../browser')
const customLevels = {
trace: 10,
debug: 20,
info: 30,
warn: 40,
error: 50,
fatal: 60
}
test('Default levels suite', ({ test, end }) => {
test('can check if current level enabled', async ({ equal }) => {
const log = pino({ level: 'debug' })
equal(true, log.isLevelEnabled('debug'))
})
test('can check if current level enabled when as object', async ({ equal }) => {
const log = pino({ asObject: true, level: 'debug' })
equal(true, log.isLevelEnabled('debug'))
})
test('can check if level enabled after level set', async ({ equal }) => {
const log = pino()
equal(false, log.isLevelEnabled('debug'))
log.level = 'debug'
equal(true, log.isLevelEnabled('debug'))
})
test('can check if higher level enabled', async ({ equal }) => {
const log = pino({ level: 'debug' })
equal(true, log.isLevelEnabled('error'))
})
test('can check if lower level is disabled', async ({ equal }) => {
const log = pino({ level: 'error' })
equal(false, log.isLevelEnabled('trace'))
})
test('ASC: can check if child has current level enabled', async ({ equal }) => {
const log = pino().child({}, { level: 'debug' })
equal(true, log.isLevelEnabled('debug'))
equal(true, log.isLevelEnabled('error'))
equal(false, log.isLevelEnabled('trace'))
})
test('can check if custom level is enabled', async ({ equal }) => {
const log = pino({
customLevels: { foo: 35 },
level: 'debug'
})
equal(true, log.isLevelEnabled('foo'))
equal(true, log.isLevelEnabled('error'))
equal(false, log.isLevelEnabled('trace'))
})
end()
})
test('Custom levels suite', ({ test, end }) => {
test('can check if current level enabled', async ({ equal }) => {
const log = pino({ level: 'debug', customLevels })
equal(true, log.isLevelEnabled('debug'))
})
test('can check if level enabled after level set', async ({ equal }) => {
const log = pino({ customLevels })
equal(false, log.isLevelEnabled('debug'))
log.level = 'debug'
equal(true, log.isLevelEnabled('debug'))
})
test('can check if higher level enabled', async ({ equal }) => {
const log = pino({ level: 'debug', customLevels })
equal(true, log.isLevelEnabled('error'))
})
test('can check if lower level is disabled', async ({ equal }) => {
const log = pino({ level: 'error', customLevels })
equal(false, log.isLevelEnabled('trace'))
})
test('can check if child has current level enabled', async ({ equal }) => {
const log = pino().child({ customLevels }, { level: 'debug' })
equal(true, log.isLevelEnabled('debug'))
equal(true, log.isLevelEnabled('error'))
equal(false, log.isLevelEnabled('trace'))
})
test('can check if custom level is enabled', async ({ equal }) => {
const log = pino({
customLevels: { foo: 35, ...customLevels },
level: 'debug'
})
equal(true, log.isLevelEnabled('foo'))
equal(true, log.isLevelEnabled('error'))
equal(false, log.isLevelEnabled('trace'))
})
end()
})

241
node_modules/pino/test/browser-levels.test.js generated vendored Normal file
View File

@@ -0,0 +1,241 @@
'use strict'
const test = require('tape')
const pino = require('../browser')
test('set the level by string', ({ end, same, is }) => {
const expected = [
{
level: 50,
msg: 'this is an error'
},
{
level: 60,
msg: 'this is fatal'
}
]
const instance = pino({
browser: {
write (actual) {
checkLogObjects(is, same, actual, expected.shift())
}
}
})
instance.level = 'error'
instance.info('hello world')
instance.error('this is an error')
instance.fatal('this is fatal')
end()
})
test('set the level by string. init with silent', ({ end, same, is }) => {
const expected = [
{
level: 50,
msg: 'this is an error'
},
{
level: 60,
msg: 'this is fatal'
}
]
const instance = pino({
level: 'silent',
browser: {
write (actual) {
checkLogObjects(is, same, actual, expected.shift())
}
}
})
instance.level = 'error'
instance.info('hello world')
instance.error('this is an error')
instance.fatal('this is fatal')
end()
})
test('set the level by string. init with silent and transmit', ({ end, same, is }) => {
const expected = [
{
level: 50,
msg: 'this is an error'
},
{
level: 60,
msg: 'this is fatal'
}
]
const instance = pino({
level: 'silent',
browser: {
write (actual) {
checkLogObjects(is, same, actual, expected.shift())
}
},
transmit: {
send () {}
}
})
instance.level = 'error'
instance.info('hello world')
instance.error('this is an error')
instance.fatal('this is fatal')
end()
})
test('set the level via constructor', ({ end, same, is }) => {
const expected = [
{
level: 50,
msg: 'this is an error'
},
{
level: 60,
msg: 'this is fatal'
}
]
const instance = pino({
level: 'error',
browser: {
write (actual) {
checkLogObjects(is, same, actual, expected.shift())
}
}
})
instance.info('hello world')
instance.error('this is an error')
instance.fatal('this is fatal')
end()
})
test('set custom level and use it', ({ end, same, is }) => {
const expected = [
{
level: 31,
msg: 'this is a custom level'
}
]
const instance = pino({
customLevels: {
success: 31
},
browser: {
write (actual) {
checkLogObjects(is, same, actual, expected.shift())
}
}
})
instance.success('this is a custom level')
end()
})
test('the wrong level throws', ({ end, throws }) => {
const instance = pino()
throws(() => {
instance.level = 'kaboom'
})
end()
})
test('the wrong level by number throws', ({ end, throws }) => {
const instance = pino()
throws(() => {
instance.levelVal = 55
})
end()
})
test('exposes level string mappings', ({ end, is }) => {
is(pino.levels.values.error, 50)
end()
})
test('exposes level number mappings', ({ end, is }) => {
is(pino.levels.labels[50], 'error')
end()
})
test('returns level integer', ({ end, is }) => {
const instance = pino({ level: 'error' })
is(instance.levelVal, 50)
end()
})
test('silent level via constructor', ({ end, fail }) => {
const instance = pino({
level: 'silent',
browser: {
write () {
fail('no data should be logged')
}
}
})
Object.keys(pino.levels.values).forEach((level) => {
instance[level]('hello world')
})
end()
})
test('silent level by string', ({ end, fail }) => {
const instance = pino({
browser: {
write () {
fail('no data should be logged')
}
}
})
instance.level = 'silent'
Object.keys(pino.levels.values).forEach((level) => {
instance[level]('hello world')
})
end()
})
test('exposed levels', ({ end, same }) => {
same(Object.keys(pino.levels.values), [
'fatal',
'error',
'warn',
'info',
'debug',
'trace'
])
end()
})
test('exposed labels', ({ end, same }) => {
same(Object.keys(pino.levels.labels), [
'10',
'20',
'30',
'40',
'50',
'60'
])
end()
})
function checkLogObjects (is, same, actual, expected) {
is(actual.time <= Date.now(), true, 'time is greater than Date.now()')
const actualCopy = Object.assign({}, actual)
const expectedCopy = Object.assign({}, expected)
delete actualCopy.time
delete expectedCopy.time
same(actualCopy, expectedCopy)
}

352
node_modules/pino/test/browser-serializers.test.js generated vendored Normal file
View File

@@ -0,0 +1,352 @@
'use strict'
// eslint-disable-next-line
if (typeof $1 !== 'undefined') $1 = arguments.callee.caller.arguments[0]
const test = require('tape')
const fresh = require('import-fresh')
const pino = require('../browser')
const parentSerializers = {
test: () => 'parent'
}
const childSerializers = {
test: () => 'child'
}
test('serializers override values', ({ end, is }) => {
const parent = pino({
serializers: parentSerializers,
browser: {
serialize: true,
write (o) {
is(o.test, 'parent')
end()
}
}
})
parent.fatal({ test: 'test' })
})
test('without the serialize option, serializers do not override values', ({ end, is }) => {
const parent = pino({
serializers: parentSerializers,
browser: {
write (o) {
is(o.test, 'test')
end()
}
}
})
parent.fatal({ test: 'test' })
})
if (process.title !== 'browser') {
test('if serialize option is true, standard error serializer is auto enabled', ({ end, same }) => {
const err = Error('test')
err.code = 'test'
err.type = 'Error' // get that cov
const expect = pino.stdSerializers.err(err)
const consoleError = console.error
console.error = function (err) {
same(err, expect)
}
const logger = fresh('../browser')({
browser: { serialize: true }
})
console.error = consoleError
logger.fatal(err)
end()
})
test('if serialize option is array, standard error serializer is auto enabled', ({ end, same }) => {
const err = Error('test')
err.code = 'test'
const expect = pino.stdSerializers.err(err)
const consoleError = console.error
console.error = function (err) {
same(err, expect)
}
const logger = fresh('../browser', require)({
browser: { serialize: [] }
})
console.error = consoleError
logger.fatal(err)
end()
})
test('if serialize option is array containing !stdSerializers.err, standard error serializer is disabled', ({ end, is }) => {
const err = Error('test')
err.code = 'test'
const expect = err
const consoleError = console.error
console.error = function (err) {
is(err, expect)
}
const logger = fresh('../browser', require)({
browser: { serialize: ['!stdSerializers.err'] }
})
console.error = consoleError
logger.fatal(err)
end()
})
test('in browser, serializers apply to all objects', ({ end, is }) => {
const consoleError = console.error
console.error = function (test, test2, test3, test4, test5) {
is(test.key, 'serialized')
is(test2.key2, 'serialized2')
is(test5.key3, 'serialized3')
}
const logger = fresh('../browser', require)({
serializers: {
key: () => 'serialized',
key2: () => 'serialized2',
key3: () => 'serialized3'
},
browser: { serialize: true }
})
console.error = consoleError
logger.fatal({ key: 'test' }, { key2: 'test' }, 'str should skip', [{ foo: 'array should skip' }], { key3: 'test' })
end()
})
test('serialize can be an array of selected serializers', ({ end, is }) => {
const consoleError = console.error
console.error = function (test, test2, test3, test4, test5) {
is(test.key, 'test')
is(test2.key2, 'serialized2')
is(test5.key3, 'test')
}
const logger = fresh('../browser', require)({
serializers: {
key: () => 'serialized',
key2: () => 'serialized2',
key3: () => 'serialized3'
},
browser: { serialize: ['key2'] }
})
console.error = consoleError
logger.fatal({ key: 'test' }, { key2: 'test' }, 'str should skip', [{ foo: 'array should skip' }], { key3: 'test' })
end()
})
test('serialize filter applies to child loggers', ({ end, is }) => {
const consoleError = console.error
console.error = function (binding, test, test2, test3, test4, test5) {
is(test.key, 'test')
is(test2.key2, 'serialized2')
is(test5.key3, 'test')
}
const logger = fresh('../browser', require)({
browser: { serialize: ['key2'] }
})
console.error = consoleError
logger.child({
aBinding: 'test'
}, {
serializers: {
key: () => 'serialized',
key2: () => 'serialized2',
key3: () => 'serialized3'
}
}).fatal({ key: 'test' }, { key2: 'test' }, 'str should skip', [{ foo: 'array should skip' }], { key3: 'test' })
end()
})
test('serialize filter applies to child loggers through bindings', ({ end, is }) => {
const consoleError = console.error
console.error = function (binding, test, test2, test3, test4, test5) {
is(test.key, 'test')
is(test2.key2, 'serialized2')
is(test5.key3, 'test')
}
const logger = fresh('../browser', require)({
browser: { serialize: ['key2'] }
})
console.error = consoleError
logger.child({
aBinding: 'test',
serializers: {
key: () => 'serialized',
key2: () => 'serialized2',
key3: () => 'serialized3'
}
}).fatal({ key: 'test' }, { key2: 'test' }, 'str should skip', [{ foo: 'array should skip' }], { key3: 'test' })
end()
})
test('parent serializers apply to child bindings', ({ end, is }) => {
const consoleError = console.error
console.error = function (binding) {
is(binding.key, 'serialized')
}
const logger = fresh('../browser', require)({
serializers: {
key: () => 'serialized'
},
browser: { serialize: true }
})
console.error = consoleError
logger.child({ key: 'test' }).fatal({ test: 'test' })
end()
})
test('child serializers apply to child bindings', ({ end, is }) => {
const consoleError = console.error
console.error = function (binding) {
is(binding.key, 'serialized')
}
const logger = fresh('../browser', require)({
browser: { serialize: true }
})
console.error = consoleError
logger.child({
key: 'test'
}, {
serializers: {
key: () => 'serialized'
}
}).fatal({ test: 'test' })
end()
})
}
test('child does not overwrite parent serializers', ({ end, is }) => {
let c = 0
const parent = pino({
serializers: parentSerializers,
browser: {
serialize: true,
write (o) {
c++
if (c === 1) is(o.test, 'parent')
if (c === 2) {
is(o.test, 'child')
end()
}
}
}
})
const child = parent.child({}, { serializers: childSerializers })
parent.fatal({ test: 'test' })
child.fatal({ test: 'test' })
})
test('children inherit parent serializers', ({ end, is }) => {
const parent = pino({
serializers: parentSerializers,
browser: {
serialize: true,
write (o) {
is(o.test, 'parent')
}
}
})
const child = parent.child({ a: 'property' })
child.fatal({ test: 'test' })
end()
})
test('children serializers get called', ({ end, is }) => {
const parent = pino({
browser: {
serialize: true,
write (o) {
is(o.test, 'child')
}
}
})
const child = parent.child({ a: 'property' }, { serializers: childSerializers })
child.fatal({ test: 'test' })
end()
})
test('children serializers get called when inherited from parent', ({ end, is }) => {
const parent = pino({
serializers: parentSerializers,
browser: {
serialize: true,
write: (o) => {
is(o.test, 'pass')
}
}
})
const child = parent.child({}, { serializers: { test: () => 'pass' } })
child.fatal({ test: 'fail' })
end()
})
test('non overridden serializers are available in the children', ({ end, is }) => {
const pSerializers = {
onlyParent: () => 'parent',
shared: () => 'parent'
}
const cSerializers = {
shared: () => 'child',
onlyChild: () => 'child'
}
let c = 0
const parent = pino({
serializers: pSerializers,
browser: {
serialize: true,
write (o) {
c++
if (c === 1) is(o.shared, 'child')
if (c === 2) is(o.onlyParent, 'parent')
if (c === 3) is(o.onlyChild, 'child')
if (c === 4) is(o.onlyChild, 'test')
}
}
})
const child = parent.child({}, { serializers: cSerializers })
child.fatal({ shared: 'test' })
child.fatal({ onlyParent: 'test' })
child.fatal({ onlyChild: 'test' })
parent.fatal({ onlyChild: 'test' })
end()
})

88
node_modules/pino/test/browser-timestamp.test.js generated vendored Normal file
View File

@@ -0,0 +1,88 @@
'use strict'
const test = require('tape')
const pino = require('../browser')
Date.now = () => 1599400603614
test('null timestamp', ({ end, is }) => {
const instance = pino({
timestamp: pino.stdTimeFunctions.nullTime,
browser: {
asObject: true,
write: function (o) {
is(o.time, undefined)
}
}
})
instance.info('hello world')
end()
})
test('iso timestamp', ({ end, is }) => {
const instance = pino({
timestamp: pino.stdTimeFunctions.isoTime,
browser: {
asObject: true,
write: function (o) {
is(o.time, '2020-09-06T13:56:43.614Z')
}
}
})
instance.info('hello world')
end()
})
test('epoch timestamp', ({ end, is }) => {
const instance = pino({
timestamp: pino.stdTimeFunctions.epochTime,
browser: {
asObject: true,
write: function (o) {
is(o.time, 1599400603614)
}
}
})
instance.info('hello world')
end()
})
test('unix timestamp', ({ end, is }) => {
const instance = pino({
timestamp: pino.stdTimeFunctions.unixTime,
browser: {
asObject: true,
write: function (o) {
is(o.time, Math.round(1599400603614 / 1000.0))
}
}
})
instance.info('hello world')
end()
})
test('epoch timestamp by default', ({ end, is }) => {
const instance = pino({
browser: {
asObject: true,
write: function (o) {
is(o.time, 1599400603614)
}
}
})
instance.info('hello world')
end()
})
test('not print timestamp if the option is false', ({ end, is }) => {
const instance = pino({
timestamp: false,
browser: {
asObject: true,
write: function (o) {
is(o.time, undefined)
}
}
})
instance.info('hello world')
end()
})

417
node_modules/pino/test/browser-transmit.test.js generated vendored Normal file
View File

@@ -0,0 +1,417 @@
'use strict'
const test = require('tape')
const pino = require('../browser')
function noop () {}
test('throws if transmit object does not have send function', ({ end, throws }) => {
throws(() => {
pino({ browser: { transmit: {} } })
})
throws(() => {
pino({ browser: { transmit: { send: 'not a func' } } })
})
end()
})
test('calls send function after write', ({ end, is }) => {
let c = 0
const logger = pino({
browser: {
write: () => {
c++
},
transmit: {
send () { is(c, 1) }
}
}
})
logger.fatal({ test: 'test' })
end()
})
test('passes send function the logged level', ({ end, is }) => {
const logger = pino({
browser: {
write () {},
transmit: {
send (level) {
is(level, 'fatal')
}
}
}
})
logger.fatal({ test: 'test' })
end()
})
test('passes send function message strings in logEvent object when asObject is not set', ({ end, same, is }) => {
const logger = pino({
browser: {
write: noop,
transmit: {
send (level, { messages }) {
is(messages[0], 'test')
is(messages[1], 'another test')
}
}
}
})
logger.fatal('test', 'another test')
end()
})
test('passes send function message objects in logEvent object when asObject is not set', ({ end, same, is }) => {
const logger = pino({
browser: {
write: noop,
transmit: {
send (level, { messages }) {
same(messages[0], { test: 'test' })
is(messages[1], 'another test')
}
}
}
})
logger.fatal({ test: 'test' }, 'another test')
end()
})
test('passes send function message strings in logEvent object when asObject is set', ({ end, same, is }) => {
const logger = pino({
browser: {
asObject: true,
write: noop,
transmit: {
send (level, { messages }) {
is(messages[0], 'test')
is(messages[1], 'another test')
}
}
}
})
logger.fatal('test', 'another test')
end()
})
test('passes send function message objects in logEvent object when asObject is set', ({ end, same, is }) => {
const logger = pino({
browser: {
asObject: true,
write: noop,
transmit: {
send (level, { messages }) {
same(messages[0], { test: 'test' })
is(messages[1], 'another test')
}
}
}
})
logger.fatal({ test: 'test' }, 'another test')
end()
})
test('supplies a timestamp (ts) in logEvent object which is exactly the same as the `time` property in asObject mode', ({ end, is }) => {
let expected
const logger = pino({
browser: {
asObject: true, // implicit because `write`, but just to be explicit
write (o) {
expected = o.time
},
transmit: {
send (level, logEvent) {
is(logEvent.ts, expected)
}
}
}
})
logger.fatal('test')
end()
})
test('passes send function child bindings via logEvent object', ({ end, same, is }) => {
const logger = pino({
browser: {
write: noop,
transmit: {
send (level, logEvent) {
const messages = logEvent.messages
const bindings = logEvent.bindings
same(bindings[0], { first: 'binding' })
same(bindings[1], { second: 'binding2' })
same(messages[0], { test: 'test' })
is(messages[1], 'another test')
}
}
}
})
logger
.child({ first: 'binding' })
.child({ second: 'binding2' })
.fatal({ test: 'test' }, 'another test')
end()
})
test('passes send function level:{label, value} via logEvent object', ({ end, is }) => {
const logger = pino({
browser: {
write: noop,
transmit: {
send (level, logEvent) {
const label = logEvent.level.label
const value = logEvent.level.value
is(label, 'fatal')
is(value, 60)
}
}
}
})
logger.fatal({ test: 'test' }, 'another test')
end()
})
test('calls send function according to transmit.level', ({ end, is }) => {
let c = 0
const logger = pino({
browser: {
write: noop,
transmit: {
level: 'error',
send (level) {
c++
if (c === 1) is(level, 'error')
if (c === 2) is(level, 'fatal')
}
}
}
})
logger.warn('ignored')
logger.error('test')
logger.fatal('test')
end()
})
test('transmit.level defaults to logger level', ({ end, is }) => {
let c = 0
const logger = pino({
level: 'error',
browser: {
write: noop,
transmit: {
send (level) {
c++
if (c === 1) is(level, 'error')
if (c === 2) is(level, 'fatal')
}
}
}
})
logger.warn('ignored')
logger.error('test')
logger.fatal('test')
end()
})
test('transmit.level is effective even if lower than logger level', ({ end, is }) => {
let c = 0
const logger = pino({
level: 'error',
browser: {
write: noop,
transmit: {
level: 'info',
send (level) {
c++
if (c === 1) is(level, 'warn')
if (c === 2) is(level, 'error')
if (c === 3) is(level, 'fatal')
}
}
}
})
logger.warn('ignored')
logger.error('test')
logger.fatal('test')
end()
})
test('applies all serializers to messages and bindings (serialize:false - default)', ({ end, same, is }) => {
const logger = pino({
serializers: {
first: () => 'first',
second: () => 'second',
test: () => 'serialize it'
},
browser: {
write: noop,
transmit: {
send (level, logEvent) {
const messages = logEvent.messages
const bindings = logEvent.bindings
same(bindings[0], { first: 'first' })
same(bindings[1], { second: 'second' })
same(messages[0], { test: 'serialize it' })
is(messages[1].type, 'Error')
}
}
}
})
logger
.child({ first: 'binding' })
.child({ second: 'binding2' })
.fatal({ test: 'test' }, Error())
end()
})
test('applies all serializers to messages and bindings (serialize:true)', ({ end, same, is }) => {
const logger = pino({
serializers: {
first: () => 'first',
second: () => 'second',
test: () => 'serialize it'
},
browser: {
serialize: true,
write: noop,
transmit: {
send (level, logEvent) {
const messages = logEvent.messages
const bindings = logEvent.bindings
same(bindings[0], { first: 'first' })
same(bindings[1], { second: 'second' })
same(messages[0], { test: 'serialize it' })
is(messages[1].type, 'Error')
}
}
}
})
logger
.child({ first: 'binding' })
.child({ second: 'binding2' })
.fatal({ test: 'test' }, Error())
end()
})
test('extracts correct bindings and raw messages over multiple transmits', ({ end, same, is }) => {
let messages = null
let bindings = null
const logger = pino({
browser: {
write: noop,
transmit: {
send (level, logEvent) {
messages = logEvent.messages
bindings = logEvent.bindings
}
}
}
})
const child = logger.child({ child: true })
const grandchild = child.child({ grandchild: true })
logger.fatal({ test: 'parent:test1' })
logger.fatal({ test: 'parent:test2' })
same([], bindings)
same([{ test: 'parent:test2' }], messages)
child.fatal({ test: 'child:test1' })
child.fatal({ test: 'child:test2' })
same([{ child: true }], bindings)
same([{ test: 'child:test2' }], messages)
grandchild.fatal({ test: 'grandchild:test1' })
grandchild.fatal({ test: 'grandchild:test2' })
same([{ child: true }, { grandchild: true }], bindings)
same([{ test: 'grandchild:test2' }], messages)
end()
})
test('does not log below configured level', ({ end, is }) => {
let message = null
const logger = pino({
level: 'info',
browser: {
write (o) {
message = o.msg
},
transmit: {
send () { }
}
}
})
logger.debug('this message is silent')
is(message, null)
end()
})
test('silent level prevents logging even with transmit', ({ end, fail }) => {
const logger = pino({
level: 'silent',
browser: {
write () {
fail('no data should be logged by the write method')
},
transmit: {
send () {
fail('no data should be logged by the send method')
}
}
}
})
Object.keys(pino.levels.values).forEach((level) => {
logger[level]('ignored')
})
end()
})
test('does not call send when transmit.level is set to silent', ({ end, fail, is }) => {
let c = 0
const logger = pino({
level: 'trace',
browser: {
write () {
c++
},
transmit: {
level: 'silent',
send () {
fail('no data should be logged by the transmit method')
}
}
}
})
const levels = Object.keys(pino.levels.values)
levels.forEach((level) => {
logger[level]('message')
})
is(c, levels.length, 'write must be called exactly once per level')
end()
})

679
node_modules/pino/test/browser.test.js generated vendored Normal file
View File

@@ -0,0 +1,679 @@
'use strict'
const test = require('tape')
const fresh = require('import-fresh')
const pinoStdSerializers = require('pino-std-serializers')
const pino = require('../browser')
levelTest('fatal')
levelTest('error')
levelTest('warn')
levelTest('info')
levelTest('debug')
levelTest('trace')
test('silent level', ({ end, fail, pass }) => {
const instance = pino({
level: 'silent',
browser: { write: fail }
})
instance.info('test')
const child = instance.child({ test: 'test' })
child.info('msg-test')
// use setTimeout because setImmediate isn't supported in most browsers
setTimeout(() => {
pass()
end()
}, 0)
})
test('enabled false', ({ end, fail, pass }) => {
const instance = pino({
enabled: false,
browser: { write: fail }
})
instance.info('test')
const child = instance.child({ test: 'test' })
child.info('msg-test')
// use setTimeout because setImmediate isn't supported in most browsers
setTimeout(() => {
pass()
end()
}, 0)
})
test('throw if creating child without bindings', ({ end, throws }) => {
const instance = pino()
throws(() => instance.child())
end()
})
test('stubs write, flush and ee methods on instance', ({ end, ok, is }) => {
const instance = pino()
ok(isFunc(instance.setMaxListeners))
ok(isFunc(instance.getMaxListeners))
ok(isFunc(instance.emit))
ok(isFunc(instance.addListener))
ok(isFunc(instance.on))
ok(isFunc(instance.prependListener))
ok(isFunc(instance.once))
ok(isFunc(instance.prependOnceListener))
ok(isFunc(instance.removeListener))
ok(isFunc(instance.removeAllListeners))
ok(isFunc(instance.listeners))
ok(isFunc(instance.listenerCount))
ok(isFunc(instance.eventNames))
ok(isFunc(instance.write))
ok(isFunc(instance.flush))
is(instance.on(), undefined)
end()
})
test('exposes levels object', ({ end, same }) => {
same(pino.levels, {
values: {
fatal: 60,
error: 50,
warn: 40,
info: 30,
debug: 20,
trace: 10
},
labels: {
10: 'trace',
20: 'debug',
30: 'info',
40: 'warn',
50: 'error',
60: 'fatal'
}
})
end()
})
test('exposes faux stdSerializers', ({ end, ok, same }) => {
ok(pino.stdSerializers)
// make sure faux stdSerializers match pino-std-serializers
for (const serializer in pinoStdSerializers) {
ok(pino.stdSerializers[serializer], `pino.stdSerializers.${serializer}`)
}
// confirm faux methods return empty objects
same(pino.stdSerializers.req(), {})
same(pino.stdSerializers.mapHttpRequest(), {})
same(pino.stdSerializers.mapHttpResponse(), {})
same(pino.stdSerializers.res(), {})
// confirm wrapping function is a passthrough
const noChange = { foo: 'bar', fuz: 42 }
same(pino.stdSerializers.wrapRequestSerializer(noChange), noChange)
same(pino.stdSerializers.wrapResponseSerializer(noChange), noChange)
end()
})
test('exposes err stdSerializer', ({ end, ok }) => {
ok(pino.stdSerializers.err)
ok(pino.stdSerializers.err(Error()))
end()
})
consoleMethodTest('error')
consoleMethodTest('fatal', 'error')
consoleMethodTest('warn')
consoleMethodTest('info')
consoleMethodTest('debug')
consoleMethodTest('trace')
absentConsoleMethodTest('error', 'log')
absentConsoleMethodTest('warn', 'error')
absentConsoleMethodTest('info', 'log')
absentConsoleMethodTest('debug', 'log')
absentConsoleMethodTest('trace', 'log')
// do not run this with airtap
if (process.title !== 'browser') {
test('in absence of console, log methods become noops', ({ end, ok }) => {
const console = global.console
delete global.console
const instance = fresh('../browser')()
global.console = console
ok(fnName(instance.log).match(/noop/))
ok(fnName(instance.fatal).match(/noop/))
ok(fnName(instance.error).match(/noop/))
ok(fnName(instance.warn).match(/noop/))
ok(fnName(instance.info).match(/noop/))
ok(fnName(instance.debug).match(/noop/))
ok(fnName(instance.trace).match(/noop/))
end()
})
}
test('opts.browser.asObject logs pino-like object to console', ({ end, ok, is }) => {
const info = console.info
console.info = function (o) {
is(o.level, 30)
is(o.msg, 'test')
ok(o.time)
console.info = info
}
const instance = require('../browser')({
browser: {
asObject: true
}
})
instance.info('test')
end()
})
test('opts.browser.asObject uses opts.messageKey in logs', ({ end, ok, is }) => {
const messageKey = 'message'
const instance = require('../browser')({
messageKey,
browser: {
asObject: true,
write: function (o) {
is(o.level, 30)
is(o[messageKey], 'test')
ok(o.time)
}
}
})
instance.info('test')
end()
})
test('opts.browser.asObjectBindingsOnly passes the bindings but keep the message unformatted', ({ end, ok, is, deepEqual }) => {
const messageKey = 'message'
const instance = require('../browser')({
messageKey,
browser: {
asObjectBindingsOnly: true,
write: function (o, msg, ...args) {
is(o.level, 30)
ok(o.time)
is(msg, 'test %s')
deepEqual(args, ['foo'])
}
}
})
instance.info('test %s', 'foo')
end()
})
test('opts.browser.formatters (level) logs pino-like object to console', ({ end, ok, is }) => {
const info = console.info
console.info = function (o) {
is(o.level, 30)
is(o.label, 'info')
is(o.msg, 'test')
ok(o.time)
console.info = info
}
const instance = require('../browser')({
browser: {
formatters: {
level (label, number) {
return { label, level: number }
}
}
}
})
instance.info('test')
end()
})
test('opts.browser.formatters (log) logs pino-like object to console', ({ end, ok, is }) => {
const info = console.info
console.info = function (o) {
is(o.level, 30)
is(o.msg, 'test')
is(o.hello, 'world')
is(o.newField, 'test')
ok(o.time, `Logged at ${o.time}`)
console.info = info
}
const instance = require('../browser')({
browser: {
formatters: {
log (o) {
return { ...o, newField: 'test', time: `Logged at ${o.time}` }
}
}
}
})
instance.info({ hello: 'world' }, 'test')
end()
})
test('opts.browser.serialize and opts.browser.transmit only serializes log data once', ({ end, ok, is }) => {
const instance = require('../browser')({
serializers: {
extras (data) {
return { serializedExtras: data }
}
},
browser: {
serialize: ['extras'],
transmit: {
level: 'info',
send (level, o) {
is(o.messages[0].extras.serializedExtras, 'world')
}
}
}
})
instance.info({ extras: 'world' }, 'test')
end()
})
test('opts.browser.serialize and opts.asObject only serializes log data once', ({ end, ok, is }) => {
const instance = require('../browser')({
serializers: {
extras (data) {
return { serializedExtras: data }
}
},
browser: {
serialize: ['extras'],
asObject: true,
write: function (o) {
is(o.extras.serializedExtras, 'world')
}
}
})
instance.info({ extras: 'world' }, 'test')
end()
})
test('opts.browser.serialize, opts.asObject and opts.browser.transmit only serializes log data once', ({ end, ok, is }) => {
const instance = require('../browser')({
serializers: {
extras (data) {
return { serializedExtras: data }
}
},
browser: {
serialize: ['extras'],
asObject: true,
transmit: {
send (level, o) {
is(o.messages[0].extras.serializedExtras, 'world')
}
}
}
})
instance.info({ extras: 'world' }, 'test')
end()
})
test('opts.browser.write func log single string', ({ end, ok, is }) => {
const instance = pino({
browser: {
write: function (o) {
is(o.level, 30)
is(o.msg, 'test')
ok(o.time)
}
}
})
instance.info('test')
end()
})
test('opts.browser.write func string joining', ({ end, ok, is }) => {
const instance = pino({
browser: {
write: function (o) {
is(o.level, 30)
is(o.msg, 'test test2 test3')
ok(o.time)
}
}
})
instance.info('test %s %s', 'test2', 'test3')
end()
})
test('opts.browser.write func string joining when asObject is true', ({ end, ok, is }) => {
const instance = pino({
browser: {
asObject: true,
write: function (o) {
is(o.level, 30)
is(o.msg, 'test test2 test3')
ok(o.time)
}
}
})
instance.info('test %s %s', 'test2', 'test3')
end()
})
test('opts.browser.write func string object joining', ({ end, ok, is }) => {
const instance = pino({
browser: {
write: function (o) {
is(o.level, 30)
is(o.msg, 'test {"test":"test2"} {"test":"test3"}')
ok(o.time)
}
}
})
instance.info('test %j %j', { test: 'test2' }, { test: 'test3' })
end()
})
test('opts.browser.write func string object joining when asObject is true', ({ end, ok, is }) => {
const instance = pino({
browser: {
asObject: true,
write: function (o) {
is(o.level, 30)
is(o.msg, 'test {"test":"test2"} {"test":"test3"}')
ok(o.time)
}
}
})
instance.info('test %j %j', { test: 'test2' }, { test: 'test3' })
end()
})
test('opts.browser.write func string interpolation', ({ end, ok, is }) => {
const instance = pino({
browser: {
write: function (o) {
is(o.level, 30)
is(o.msg, 'test2 test ({"test":"test3"})')
ok(o.time)
}
}
})
instance.info('%s test (%j)', 'test2', { test: 'test3' })
end()
})
test('opts.browser.write func number', ({ end, ok, is }) => {
const instance = pino({
browser: {
write: function (o) {
is(o.level, 30)
is(o.msg, 1)
ok(o.time)
}
}
})
instance.info(1)
end()
})
test('opts.browser.write func log single object', ({ end, ok, is }) => {
const instance = pino({
browser: {
write: function (o) {
is(o.level, 30)
is(o.test, 'test')
ok(o.time)
}
}
})
instance.info({ test: 'test' })
end()
})
test('opts.browser.write obj writes to methods corresponding to level', ({ end, ok, is }) => {
const instance = pino({
browser: {
write: {
error: function (o) {
is(o.level, 50)
is(o.test, 'test')
ok(o.time)
}
}
}
})
instance.error({ test: 'test' })
end()
})
test('opts.browser.asObject/write supports child loggers', ({ end, ok, is }) => {
const instance = pino({
browser: {
write (o) {
is(o.level, 30)
is(o.test, 'test')
is(o.msg, 'msg-test')
ok(o.time)
}
}
})
const child = instance.child({ test: 'test' })
child.info('msg-test')
end()
})
test('opts.browser.asObject/write supports child child loggers', ({ end, ok, is }) => {
const instance = pino({
browser: {
write (o) {
is(o.level, 30)
is(o.test, 'test')
is(o.foo, 'bar')
is(o.msg, 'msg-test')
ok(o.time)
}
}
})
const child = instance.child({ test: 'test' }).child({ foo: 'bar' })
child.info('msg-test')
end()
})
test('opts.browser.asObject/write supports child child child loggers', ({ end, ok, is }) => {
const instance = pino({
browser: {
write (o) {
is(o.level, 30)
is(o.test, 'test')
is(o.foo, 'bar')
is(o.baz, 'bop')
is(o.msg, 'msg-test')
ok(o.time)
}
}
})
const child = instance.child({ test: 'test' }).child({ foo: 'bar' }).child({ baz: 'bop' })
child.info('msg-test')
end()
})
test('opts.browser.asObject defensively mitigates naughty numbers', ({ end, pass }) => {
const instance = pino({
browser: { asObject: true, write: () => {} }
})
const child = instance.child({ test: 'test' })
child._childLevel = -10
child.info('test')
pass() // if we reached here, there was no infinite loop, so, .. pass.
end()
})
test('opts.browser.write obj falls back to console where a method is not supplied', ({ end, ok, is }) => {
const info = console.info
console.info = (o) => {
is(o.level, 30)
is(o.msg, 'test')
ok(o.time)
console.info = info
}
const instance = require('../browser')({
browser: {
write: {
error (o) {
is(o.level, 50)
is(o.test, 'test')
ok(o.time)
}
}
}
})
instance.error({ test: 'test' })
instance.info('test')
end()
})
function levelTest (name) {
test(name + ' logs', ({ end, is }) => {
const msg = 'hello world'
sink(name, (args) => {
is(args[0], msg)
end()
})
pino({ level: name })[name](msg)
})
test('passing objects at level ' + name, ({ end, is }) => {
const msg = { hello: 'world' }
sink(name, (args) => {
is(args[0], msg)
end()
})
pino({ level: name })[name](msg)
})
test('passing an object and a string at level ' + name, ({ end, is }) => {
const a = { hello: 'world' }
const b = 'a string'
sink(name, (args) => {
is(args[0], a)
is(args[1], b)
end()
})
pino({ level: name })[name](a, b)
})
test('formatting logs as ' + name, ({ end, is }) => {
sink(name, (args) => {
is(args[0], 'hello %d')
is(args[1], 42)
end()
})
pino({ level: name })[name]('hello %d', 42)
})
test('passing error at level ' + name, ({ end, is }) => {
const err = new Error('myerror')
sink(name, (args) => {
is(args[0], err)
end()
})
pino({ level: name })[name](err)
})
test('passing error with a serializer at level ' + name, ({ end, is }) => {
// in browser - should have no effect (should not crash)
const err = new Error('myerror')
sink(name, (args) => {
is(args[0].err, err)
end()
})
const instance = pino({
level: name,
serializers: {
err: pino.stdSerializers.err
}
})
instance[name]({ err })
})
test('child logger for level ' + name, ({ end, is }) => {
const msg = 'hello world'
const parent = { hello: 'world' }
sink(name, (args) => {
is(args[0], parent)
is(args[1], msg)
end()
})
const instance = pino({ level: name })
const child = instance.child(parent)
child[name](msg)
})
test('child-child logger for level ' + name, ({ end, is }) => {
const msg = 'hello world'
const grandParent = { hello: 'world' }
const parent = { hello: 'you' }
sink(name, (args) => {
is(args[0], grandParent)
is(args[1], parent)
is(args[2], msg)
end()
})
const instance = pino({ level: name })
const child = instance.child(grandParent).child(parent)
child[name](msg)
})
}
function consoleMethodTest (level, method) {
if (!method) method = level
test('pino().' + level + ' uses console.' + method, ({ end, is }) => {
sink(method, (args) => {
is(args[0], 'test')
end()
})
const instance = require('../browser')({ level })
instance[level]('test')
})
}
function absentConsoleMethodTest (method, fallback) {
test('in absence of console.' + method + ', console.' + fallback + ' is used', ({ end, is }) => {
const fn = console[method]
console[method] = undefined
sink(fallback, function (args) {
is(args[0], 'test')
end()
console[method] = fn
})
const instance = require('../browser')({ level: method })
instance[method]('test')
})
}
function isFunc (fn) { return typeof fn === 'function' }
function fnName (fn) {
const rx = /^\s*function\s*([^(]*)/i
const match = rx.exec(fn)
return match && match[1]
}
function sink (method, fn) {
if (method === 'fatal') method = 'error'
const orig = console[method]
console[method] = function () {
console[method] = orig
fn(Array.prototype.slice.call(arguments))
}
}

34
node_modules/pino/test/complex-objects.test.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
'use strict'
const { test } = require('tap')
const { sink, once } = require('./helper')
const { PassThrough } = require('node:stream')
const pino = require('../')
test('Proxy and stream objects', async ({ equal }) => {
const s = new PassThrough()
s.resume()
s.write('', () => {})
const obj = { s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) }
const stream = sink()
const instance = pino(stream)
instance.info({ obj })
const result = await once(stream, 'data')
equal(result.obj, '[unable to serialize, circular reference is too complex to analyze]')
})
test('Proxy and stream objects', async ({ equal }) => {
const s = new PassThrough()
s.resume()
s.write('', () => {})
const obj = { s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) }
const stream = sink()
const instance = pino(stream)
instance.info(obj)
const result = await once(stream, 'data')
equal(result.p, '[unable to serialize, circular reference is too complex to analyze]')
})

32
node_modules/pino/test/crlf.test.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
'use strict'
const { test } = require('tap')
const writer = require('flush-write-stream')
const pino = require('../')
function capture () {
const ws = writer((chunk, enc, cb) => {
ws.data += chunk.toString()
cb()
})
ws.data = ''
return ws
}
test('pino uses LF by default', async ({ ok }) => {
const stream = capture()
const logger = pino(stream)
logger.info('foo')
logger.error('bar')
ok(/foo[^\r\n]+\n[^\r\n]+bar[^\r\n]+\n/.test(stream.data))
})
test('pino can log CRLF', async ({ ok }) => {
const stream = capture()
const logger = pino({
crlf: true
}, stream)
logger.info('foo')
logger.error('bar')
ok(/foo[^\n]+\r\n[^\n]+bar[^\n]+\r\n/.test(stream.data))
})

253
node_modules/pino/test/custom-levels.test.js generated vendored Normal file
View File

@@ -0,0 +1,253 @@
'use strict'
/* eslint no-prototype-builtins: 0 */
const { test } = require('tap')
const { sink, once } = require('./helper')
const pino = require('../')
// Silence all warnings for this test
process.removeAllListeners('warning')
process.on('warning', () => {})
test('adds additional levels', async ({ equal }) => {
const stream = sink()
const logger = pino({
customLevels: {
foo: 35,
bar: 45
}
}, stream)
logger.foo('test')
const { level } = await once(stream, 'data')
equal(level, 35)
})
test('custom levels does not override default levels', async ({ equal }) => {
const stream = sink()
const logger = pino({
customLevels: {
foo: 35
}
}, stream)
logger.info('test')
const { level } = await once(stream, 'data')
equal(level, 30)
})
test('default levels can be redefined using custom levels', async ({ equal }) => {
const stream = sink()
const logger = pino({
customLevels: {
info: 35,
debug: 45
},
useOnlyCustomLevels: true
}, stream)
equal(logger.hasOwnProperty('info'), true)
logger.info('test')
const { level } = await once(stream, 'data')
equal(level, 35)
})
test('custom levels overrides default level label if use useOnlyCustomLevels', async ({ equal }) => {
const stream = sink()
const logger = pino({
customLevels: {
foo: 35
},
useOnlyCustomLevels: true,
level: 'foo'
}, stream)
equal(logger.hasOwnProperty('info'), false)
})
test('custom levels overrides default level value if use useOnlyCustomLevels', async ({ equal }) => {
const stream = sink()
const logger = pino({
customLevels: {
foo: 35
},
useOnlyCustomLevels: true,
level: 35
}, stream)
equal(logger.hasOwnProperty('info'), false)
})
test('custom levels are inherited by children', async ({ equal }) => {
const stream = sink()
const logger = pino({
customLevels: {
foo: 35
}
}, stream)
logger.child({ childMsg: 'ok' }).foo('test')
const { msg, childMsg, level } = await once(stream, 'data')
equal(level, 35)
equal(childMsg, 'ok')
equal(msg, 'test')
})
test('custom levels can be specified on child bindings', async ({ equal }) => {
const stream = sink()
const logger = pino(stream).child({
childMsg: 'ok'
}, {
customLevels: {
foo: 35
}
})
logger.foo('test')
const { msg, childMsg, level } = await once(stream, 'data')
equal(level, 35)
equal(childMsg, 'ok')
equal(msg, 'test')
})
test('customLevels property child bindings does not get logged', async ({ equal }) => {
const stream = sink()
const logger = pino(stream).child({
childMsg: 'ok'
}, {
customLevels: {
foo: 35
}
})
logger.foo('test')
const { customLevels } = await once(stream, 'data')
equal(customLevels, undefined)
})
test('throws when specifying pre-existing parent labels via child bindings', async ({ throws }) => {
const stream = sink()
throws(() => pino({
customLevels: {
foo: 35
}
}, stream).child({}, {
customLevels: {
foo: 45
}
}), 'levels cannot be overridden')
})
test('throws when specifying pre-existing parent values via child bindings', async ({ throws }) => {
const stream = sink()
throws(() => pino({
customLevels: {
foo: 35
}
}, stream).child({}, {
customLevels: {
bar: 35
}
}), 'pre-existing level values cannot be used for new levels')
})
test('throws when specifying core values via child bindings', async ({ throws }) => {
const stream = sink()
throws(() => pino(stream).child({}, {
customLevels: {
foo: 30
}
}), 'pre-existing level values cannot be used for new levels')
})
test('throws when useOnlyCustomLevels is set true without customLevels', async ({ throws }) => {
const stream = sink()
throws(() => pino({
useOnlyCustomLevels: true
}, stream), 'customLevels is required if useOnlyCustomLevels is set true')
})
test('custom level on one instance does not affect other instances', async ({ equal }) => {
pino({
customLevels: {
foo: 37
}
})
equal(typeof pino().foo, 'undefined')
})
test('setting level below or at custom level will successfully log', async ({ equal }) => {
const stream = sink()
const instance = pino({ customLevels: { foo: 35 } }, stream)
instance.level = 'foo'
instance.info('nope')
instance.foo('bar')
const { msg } = await once(stream, 'data')
equal(msg, 'bar')
})
test('custom level below level threshold will not log', async ({ equal }) => {
const stream = sink()
const instance = pino({ customLevels: { foo: 15 } }, stream)
instance.level = 'info'
instance.info('bar')
instance.foo('nope')
const { msg } = await once(stream, 'data')
equal(msg, 'bar')
})
test('does not share custom level state across siblings', async ({ doesNotThrow }) => {
const stream = sink()
const logger = pino(stream)
logger.child({}, {
customLevels: { foo: 35 }
})
doesNotThrow(() => {
logger.child({}, {
customLevels: { foo: 35 }
})
})
})
test('custom level does not affect the levels serializer', async ({ equal }) => {
const stream = sink()
const logger = pino({
customLevels: {
foo: 35,
bar: 45
},
formatters: {
level (label, number) {
return { priority: number }
}
}
}, stream)
logger.foo('test')
const { priority } = await once(stream, 'data')
equal(priority, 35)
})
test('When useOnlyCustomLevels is set to true, the level formatter should only get custom levels', async ({ equal }) => {
const stream = sink()
const logger = pino({
customLevels: {
answer: 42
},
useOnlyCustomLevels: true,
level: 42,
formatters: {
level (label, number) {
equal(label, 'answer')
equal(number, 42)
return { level: number }
}
}
}, stream)
logger.answer('test')
const { level } = await once(stream, 'data')
equal(level, 42)
})

104
node_modules/pino/test/diagnostics.test.js generated vendored Normal file
View File

@@ -0,0 +1,104 @@
'use strict'
const test = require('node:test')
const os = require('node:os')
const diagChan = require('node:diagnostics_channel')
const { AsyncLocalStorage } = require('node:async_hooks')
const { Writable } = require('node:stream')
const tspl = require('@matteo.collina/tspl')
const pino = require('../pino')
const hostname = os.hostname()
const { pid } = process
const AS_JSON_START = 'tracing:pino_asJson:start'
const AS_JSON_END = 'tracing:pino_asJson:end'
test.beforeEach(ctx => {
ctx.pino = {
ts: 1757512800000, // 2025-09-10T10:00:00.000-05:00
now: Date.now
}
Date.now = () => ctx.pino.ts
ctx.pino.dest = new Writable({
objectMode: true,
write (data, enc, cb) {
cb()
}
})
})
test.afterEach(ctx => {
Date.now = ctx.pino.now
})
test('asJson emits events', async (t) => {
const plan = tspl(t, { plan: 8 })
const { dest } = t.pino
const logger = pino({}, dest)
const expectedArguments = [
{},
'testing',
30,
`,"time":${t.pino.ts}`
]
let startEvent
diagChan.subscribe(AS_JSON_START, startHandler)
diagChan.subscribe(AS_JSON_END, endHandler)
logger.info('testing')
await plan
diagChan.unsubscribe(AS_JSON_START, startHandler)
diagChan.unsubscribe(AS_JSON_END, endHandler)
function startHandler (event) {
startEvent = event
plan.equal(Object.prototype.toString.call(event.instance), '[object Pino]')
plan.equal(event.instance === logger, true)
plan.deepStrictEqual(Array.from(event.arguments ?? []), expectedArguments)
}
function endHandler (event) {
plan.equal(Object.prototype.toString.call(event.instance), '[object Pino]')
plan.equal(event.instance === logger, true)
plan.deepStrictEqual(Array.from(event.arguments ?? []), expectedArguments)
plan.equal(
event.result,
`{"level":30,"time":${t.pino.ts},"pid":${pid},"hostname":"${hostname}","msg":"testing"}\n`
)
plan.equal(event.arguments === startEvent.arguments, true, 'same event object is supplied to both events')
}
})
test('asJson context is not lost', async (t) => {
const plan = tspl(t, { plan: 2 })
const { dest } = t.pino
const logger = pino({}, dest)
const asyncLocalStorage = new AsyncLocalStorage()
const localStore = { foo: 'bar' }
diagChan.subscribe(AS_JSON_START, startHandler)
diagChan.subscribe(AS_JSON_END, endHandler)
asyncLocalStorage.run(localStore, () => {
logger.info('testing')
})
await plan
diagChan.unsubscribe(AS_JSON_START, startHandler)
diagChan.unsubscribe(AS_JSON_END, endHandler)
function startHandler () {
const store = asyncLocalStorage.getStore()
plan.equal(store === localStore, true)
}
function endHandler () {
const store = asyncLocalStorage.getStore()
plan.equal(store === localStore, true)
}
})

398
node_modules/pino/test/error.test.js generated vendored Normal file
View File

@@ -0,0 +1,398 @@
'use strict'
/* eslint no-prototype-builtins: 0 */
const os = require('node:os')
const { test } = require('tap')
const { sink, once } = require('./helper')
const pino = require('../')
const { pid } = process
const hostname = os.hostname()
const level = 50
const name = 'error'
test('err is serialized with additional properties set on the Error object', async ({ ok, same }) => {
const stream = sink()
const err = Object.assign(new Error('myerror'), { foo: 'bar' })
const instance = pino(stream)
instance.level = name
instance[name](err)
const result = await once(stream, 'data')
ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
err: {
type: 'Error',
message: err.message,
stack: err.stack,
foo: err.foo
},
msg: err.message
})
})
test('type should be detected based on constructor', async ({ ok, same }) => {
class Bar extends Error {}
const stream = sink()
const err = new Bar('myerror')
const instance = pino(stream)
instance.level = name
instance[name](err)
const result = await once(stream, 'data')
ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
err: {
type: 'Bar',
message: err.message,
stack: err.stack
},
msg: err.message
})
})
test('type, message and stack should be first level properties', async ({ ok, same }) => {
const stream = sink()
const err = Object.assign(new Error('foo'), { foo: 'bar' })
const instance = pino(stream)
instance.level = name
instance[name](err)
const result = await once(stream, 'data')
ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
err: {
type: 'Error',
message: err.message,
stack: err.stack,
foo: err.foo
},
msg: err.message
})
})
test('err serializer', async ({ ok, same }) => {
const stream = sink()
const err = Object.assign(new Error('myerror'), { foo: 'bar' })
const instance = pino({
serializers: {
err: pino.stdSerializers.err
}
}, stream)
instance.level = name
instance[name]({ err })
const result = await once(stream, 'data')
ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
err: {
type: 'Error',
message: err.message,
stack: err.stack,
foo: err.foo
},
msg: err.message
})
})
test('an error with statusCode property is not confused for a http response', async ({ ok, same }) => {
const stream = sink()
const err = Object.assign(new Error('StatusCodeErr'), { statusCode: 500 })
const instance = pino(stream)
instance.level = name
instance[name](err)
const result = await once(stream, 'data')
ok(new Date(result.time) <= new Date(), 'time is greater than Date.now()')
delete result.time
same(result, {
pid,
hostname,
level,
err: {
type: 'Error',
message: err.message,
stack: err.stack,
statusCode: err.statusCode
},
msg: err.message
})
})
test('stack is omitted if it is not set on err', t => {
t.plan(2)
const err = new Error('myerror')
delete err.stack
const instance = pino(sink(function (chunk, enc, cb) {
t.ok(new Date(chunk.time) <= new Date(), 'time is greater than Date.now()')
delete chunk.time
t.equal(chunk.hasOwnProperty('stack'), false)
cb()
}))
instance.level = name
instance[name](err)
})
test('correctly ignores toString on errors', async ({ same }) => {
const err = new Error('myerror')
err.toString = () => undefined
const stream = sink()
const instance = pino({
test: 'this'
}, stream)
instance.fatal(err)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
err: {
type: 'Error',
message: err.message,
stack: err.stack
},
msg: err.message
})
})
test('assign mixin()', async ({ same }) => {
const err = new Error('myerror')
const stream = sink()
const instance = pino({
mixin () {
return { hello: 'world' }
}
}, stream)
instance.fatal(err)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
hello: 'world',
err: {
type: 'Error',
message: err.message,
stack: err.stack
},
msg: err.message
})
})
test('no err serializer', async ({ same }) => {
const err = new Error('myerror')
const stream = sink()
const instance = pino({
serializers: {}
}, stream)
instance.fatal(err)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
err: {
type: 'Error',
message: err.message,
stack: err.stack
},
msg: err.message
})
})
test('empty serializer', async ({ same }) => {
const err = new Error('myerror')
const stream = sink()
const instance = pino({
serializers: {
err () {}
}
}, stream)
instance.fatal(err)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
msg: err.message
})
})
test('assign mixin()', async ({ same }) => {
const err = new Error('myerror')
const stream = sink()
const instance = pino({
mixin () {
return { hello: 'world' }
}
}, stream)
instance.fatal(err)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
hello: 'world',
err: {
type: 'Error',
message: err.message,
stack: err.stack
},
msg: err.message
})
})
test('no err serializer', async ({ same }) => {
const err = new Error('myerror')
const stream = sink()
const instance = pino({
serializers: {}
}, stream)
instance.fatal(err)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
err: {
type: 'Error',
message: err.message,
stack: err.stack
},
msg: err.message
})
})
test('empty serializer', async ({ same }) => {
const err = new Error('myerror')
const stream = sink()
const instance = pino({
serializers: {
err () {}
}
}, stream)
instance.fatal(err)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
msg: err.message
})
})
test('correctly adds error information when nestedKey is used', async ({ same }) => {
const err = new Error('myerror')
err.toString = () => undefined
const stream = sink()
const instance = pino({
test: 'this',
nestedKey: 'obj'
}, stream)
instance.fatal(err)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
obj: {
err: {
type: 'Error',
stack: err.stack,
message: err.message
}
},
msg: err.message
})
})
test('correctly adds msg on error when nestedKey is used', async ({ same }) => {
const err = new Error('myerror')
err.toString = () => undefined
const stream = sink()
const instance = pino({
test: 'this',
nestedKey: 'obj'
}, stream)
instance.fatal(err, 'msg message')
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
obj: {
err: {
type: 'Error',
stack: err.stack,
message: err.message
}
},
msg: 'msg message'
})
})
test('msg should take precedence over error message on mergingObject', async ({ same }) => {
const err = new Error('myerror')
const stream = sink()
const instance = pino(stream)
instance.error({ msg: 'my message', err })
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 50,
err: {
type: 'Error',
stack: err.stack,
message: err.message
},
msg: 'my message'
})
})
test('considers messageKey when giving msg precedence over error', async ({ same }) => {
const err = new Error('myerror')
const stream = sink()
const instance = pino({ messageKey: 'message' }, stream)
instance.error({ message: 'my message', err })
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 50,
err: {
type: 'Error',
stack: err.stack,
message: err.message
},
message: 'my message'
})
})

34
node_modules/pino/test/errorKey.test.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
'use strict'
const { test } = require('tap')
const { sink, once } = require('./helper')
const stdSerializers = require('pino-std-serializers')
const pino = require('../')
test('set the errorKey with error serializer', async ({ equal, same }) => {
const stream = sink()
const errorKey = 'error'
const instance = pino({
errorKey,
serializers: { [errorKey]: stdSerializers.err }
}, stream)
instance.error(new ReferenceError('test'))
const o = await once(stream, 'data')
equal(typeof o[errorKey], 'object')
equal(o[errorKey].type, 'ReferenceError')
equal(o[errorKey].message, 'test')
equal(typeof o[errorKey].stack, 'string')
})
test('set the errorKey without error serializer', async ({ equal, same }) => {
const stream = sink()
const errorKey = 'error'
const instance = pino({
errorKey
}, stream)
instance.error(new ReferenceError('test'))
const o = await once(stream, 'data')
equal(typeof o[errorKey], 'object')
equal(o[errorKey].type, 'ReferenceError')
equal(o[errorKey].message, 'test')
equal(typeof o[errorKey].stack, 'string')
})

91
node_modules/pino/test/escaping.test.js generated vendored Normal file
View File

@@ -0,0 +1,91 @@
'use strict'
const os = require('node:os')
const { test } = require('tap')
const { sink, once } = require('./helper')
const pino = require('../')
const { pid } = process
const hostname = os.hostname()
function testEscape (ch, key) {
test('correctly escape ' + ch, async ({ same }) => {
const stream = sink()
const instance = pino({
name: 'hello'
}, stream)
instance.fatal('this contains ' + key)
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
name: 'hello',
msg: 'this contains ' + key
})
})
}
testEscape('\\n', '\n')
testEscape('\\/', '/')
testEscape('\\\\', '\\')
testEscape('\\r', '\r')
testEscape('\\t', '\t')
testEscape('\\b', '\b')
const toEscape = [
'\u0000', // NUL Null character
'\u0001', // SOH Start of Heading
'\u0002', // STX Start of Text
'\u0003', // ETX End-of-text character
'\u0004', // EOT End-of-transmission character
'\u0005', // ENQ Enquiry character
'\u0006', // ACK Acknowledge character
'\u0007', // BEL Bell character
'\u0008', // BS Backspace
'\u0009', // HT Horizontal tab
'\u000A', // LF Line feed
'\u000B', // VT Vertical tab
'\u000C', // FF Form feed
'\u000D', // CR Carriage return
'\u000E', // SO Shift Out
'\u000F', // SI Shift In
'\u0010', // DLE Data Link Escape
'\u0011', // DC1 Device Control 1
'\u0012', // DC2 Device Control 2
'\u0013', // DC3 Device Control 3
'\u0014', // DC4 Device Control 4
'\u0015', // NAK Negative-acknowledge character
'\u0016', // SYN Synchronous Idle
'\u0017', // ETB End of Transmission Block
'\u0018', // CAN Cancel character
'\u0019', // EM End of Medium
'\u001A', // SUB Substitute character
'\u001B', // ESC Escape character
'\u001C', // FS File Separator
'\u001D', // GS Group Separator
'\u001E', // RS Record Separator
'\u001F' // US Unit Separator
]
toEscape.forEach((key) => {
testEscape(JSON.stringify(key), key)
})
test('correctly escape `hello \\u001F world \\n \\u0022`', async ({ same }) => {
const stream = sink()
const instance = pino({
name: 'hello'
}, stream)
instance.fatal('hello \u001F world \n \u0022')
const result = await once(stream, 'data')
delete result.time
same(result, {
pid,
hostname,
level: 60,
name: 'hello',
msg: 'hello \u001F world \n \u0022'
})
})

12
node_modules/pino/test/esm/esm.mjs generated vendored Normal file
View File

@@ -0,0 +1,12 @@
import t from 'tap'
import pino from '../../pino.js'
import helper from '../helper.js'
const { sink, check, once } = helper
t.test('esm support', async ({ equal }) => {
const stream = sink()
const instance = pino(stream)
instance.info('hello world')
check(equal, await once(stream, 'data'), 30, 'hello world')
})

Some files were not shown because too many files have changed in this diff Show More