修改成飞书webhook转gotify服务器了

This commit is contained in:
2025-09-21 15:57:44 +08:00
parent 1034d1371f
commit f6a9229e71
199 changed files with 43698 additions and 96 deletions

170
app.js
View File

@@ -1,108 +1,90 @@
// 1. 【引入和实例化
// 和 http.createServer() 类似,这是创建 Fastify 服务器实例的第一步
// 我们传入 { logger: true } 来开启 Fastify 自带的、性能极高的日志功能
// 1. 【引入依赖
const fastify = require('fastify')({ logger: true });
// undici 是 Node.js 内置的高性能 HTTP 客户端,我们用它来发请求
const { request: undiciRequest } = require('undici');
// 模拟一个简单的数据库
const users = [
{ id: 1, name: '张三', email: 'zhangsan@example.com' },
{ id: 2, name: '李四', email: 'lisi@example.com' },
];
// 2. 【配置】- 从环境变量读取 Gotify 的信息
// 这是最佳实践,避免将敏感信息硬编码在代码里
// const GOTIFY_URL = process.env.GOTIFY_URL; // 例如: 'http://your-gotify-server.com/message'
// const GOTIFY_TOKEN = process.env.GOTIFY_TOKEN; // 你的 Gotify 应用 Token
const GOTIFY_URL = 'https://gotify.zotv.ru/message'; // 例如: 'http://your-gotify-server.com/message'
const GOTIFY_TOKEN = 'A1wFaeaj-VskqyF'; // 你的 Gotify 应用 Token
// 2. 【核心功能:路由 (Routing)】
// 这是最基础的 GET 路由,类似于 Web 服务器的“欢迎页面”
fastify.get('/', async (request, reply) => {
// request 对象包含了所有请求信息
// reply 对象用于构建和发送响应
return { message: '欢迎来到 Fastify 核心功能示例!' };
});
// 启动前检查配置是否齐全
if (!GOTIFY_URL || !GOTIFY_TOKEN) {
console.error('错误:请设置 GOTIFY_URL 和 GOTIFY_TOKEN 环境变量!');
process.exit(1);
}
// GET 路由:带路径参数 (Params)
// :id 是一个占位符,可以匹配 /users/1, /users/2 等
fastify.get('/users/:id', async (request, reply) => {
const userId = parseInt(request.params.id, 10); // 从路径中获取 id
const user = users.find(u => u.id === userId);
// 3. 【核心路由】 - 接收和转发 Webhook
fastify.post('/webhook/feishu', async (request, reply) => {
fastify.log.info('收到来自飞书的 Webhook 请求...');
if (!user) {
// 使用 reply 对象设置状态码并发送响应
return reply.status(404).send({ error: '用户未找到' });
const feishuPayload = request.body;
let title = '来自飞书的新消息'; // 默认标题
let message = '';
// --- 解析飞书消息 ---
// A. 处理简单的 "text" 类型消息
if (feishuPayload.msg_type === 'text') {
message = feishuPayload.content?.text || '无法解析的文本消息';
}
return user;
// B. 处理 "interactive" (卡片消息) 类型
else if (feishuPayload.msg_type === 'interactive' && feishuPayload.card) {
// 尝试从卡片标题中获取标题
title = feishuPayload.card.header?.title?.content || title;
// 尝试从卡片的第一个元素中获取内容
// (这是一个简化的逻辑,真实卡片可能很复杂)
const firstElement = feishuPayload.card.elements?.[0];
if (firstElement?.tag === 'div' && firstElement.text) {
message = firstElement.text.content || '无法解析的卡片内容';
} else {
message = JSON.stringify(feishuPayload.card.elements); // 如果结构不认识,就发原始数据
}
}
// C. 其他未知类型
else {
fastify.log.warn('收到了未知的飞书消息类型将内容作为原始JSON转发');
message = JSON.stringify(feishuPayload, null, 2);
}
// --- 转发到 Gotify ---
const gotifyPayload = {
title: title,
message: message,
priority: 5, // Gotify 的消息优先级,可以按需调整
};
try {
fastify.log.info(`准备转发到 Gotify: ${GOTIFY_URL}`);
const { statusCode } = await undiciRequest(`${GOTIFY_URL}?token=${GOTIFY_TOKEN}`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(gotifyPayload),
});
if (statusCode >= 200 && statusCode < 300) {
fastify.log.info('成功转发消息到 Gotify');
} else {
fastify.log.error(`转发到 Gotify 失败,状态码: ${statusCode}`);
}
} catch (error) {
fastify.log.error(`转发到 Gotify 时发生网络错误: ${error.message}`);
}
// --- 响应飞书 ---
// **非常重要**:无论转发是否成功,都应该立即回复飞书一个成功的响应。
// 否则飞书会认为你的 Webhook 地址有问题,并可能不停重试。
return { success: true, message: '消息已接收' };
});
// GET 路由:带查询字符串 (Query String)
// 匹配 /search?name=张三
fastify.get('/search', async (request, reply) => {
const name = request.query.name; // 从 ?key=value 中获取
const results = users.filter(u => u.name.includes(name));
return results;
});
// POST 路由:处理请求体 (Body)
// 用于创建新资源
fastify.post('/users', async (request, reply) => {
const newUser = request.body; // 获取 POST 请求的 JSON 数据
newUser.id = users.length + 1; // 简单地分配一个新 ID
users.push(newUser);
// 返回 201 Created 状态码,并附上新创建的用户数据
return reply.status(201).send(newUser);
});
// 3. 【核心功能Schema (模式验证与序列化)】
// 这是 Fastify 性能超群的关键!
// 它能在请求进入处理函数前自动验证数据,在响应发送前自动快速序列化数据
const createUserSchema = {
// body 部分定义了对 POST 请求体的验证规则
body: {
type: 'object',
required: ['name', 'email'], // name 和 email 字段是必需的
properties: {
name: { type: 'string' },
email: { type: 'string', format: 'email' }, // 甚至可以验证 email 格式
},
},
// response 部分定义了响应的格式Fastify 会用它来极速序列化 JSON
response: {
201: { // 201 状态码对应的响应格式
type: 'object',
properties: {
id: { type: 'integer' },
name: { type: 'string' },
email: { type: 'string' },
},
},
},
};
// 带 Schema 的 POST 路由
fastify.post('/users-validated', { schema: createUserSchema }, async (request, reply) => {
// 如果请求体不符合上面的 body schema代码根本不会执行到这里
// Fastify 会自动返回一个 400 Bad Request 错误
const newUser = request.body;
newUser.id = users.length + 1;
users.push(newUser);
// 因为有 response schema这里返回的数据会被安全、快速地格式化
return reply.status(201).send(newUser);
});
// 4. 【核心功能:钩子 (Hooks)】
// Hooks 允许你在请求生命周期的特定点执行代码,非常适合做认证、日志记录等
fastify.addHook('onRequest', async (request, reply) => {
// 这个钩子会在每个请求匹配到路由之前执行
// 我们可以用它来打印一些全局信息
fastify.log.info(`收到一个 ${request.method} 请求,访问路径: ${request.url}`);
});
// 5. 【启动服务器】
// Fastify 的启动是异步的,所以我们用 async/await
// 4. 【启动服务器】
const start = async () => {
try {
// 监听 3000 端口
await fastify.listen({ port: 3000, host: '0.0.0.0'});
// 监听 0.0.0.0 以便从局域网或 Docker 容器外访问
await fastify.listen({ port: 3000, host: '0.0.0.0' });
} catch (err) {
// 如果启动失败,打印错误日志并退出
fastify.log.error(err);
process.exit(1);
}

9
node_modules/.package-lock.json generated vendored
View File

@@ -576,6 +576,15 @@
"engines": {
"node": ">=12"
}
},
"node_modules/undici": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-7.16.0.tgz",
"integrity": "sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==",
"license": "MIT",
"engines": {
"node": ">=20.18.1"
}
}
}
}

21
node_modules/undici/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) Matteo Collina and Undici contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

633
node_modules/undici/README.md generated vendored Normal file
View File

@@ -0,0 +1,633 @@
# undici
[![Node CI](https://github.com/nodejs/undici/actions/workflows/ci.yml/badge.svg)](https://github.com/nodejs/undici/actions/workflows/nodejs.yml) [![neostandard javascript style](https://img.shields.io/badge/neo-standard-7fffff?style=flat\&labelColor=ff80ff)](https://github.com/neostandard/neostandard) [![npm version](https://badge.fury.io/js/undici.svg)](https://badge.fury.io/js/undici) [![codecov](https://codecov.io/gh/nodejs/undici/branch/main/graph/badge.svg?token=yZL6LtXkOA)](https://codecov.io/gh/nodejs/undici)
An HTTP/1.1 client, written from scratch for Node.js.
> Undici means eleven in Italian. 1.1 -> 11 -> Eleven -> Undici.
It is also a Stranger Things reference.
## How to get involved
Have a question about using Undici? Open a [Q&A Discussion](https://github.com/nodejs/undici/discussions/new) or join our official OpenJS [Slack](https://openjs-foundation.slack.com/archives/C01QF9Q31QD) channel.
Looking to contribute? Start by reading the [contributing guide](./CONTRIBUTING.md)
## Install
```
npm i undici
```
## Benchmarks
The benchmark is a simple getting data [example](https://github.com/nodejs/undici/blob/main/benchmarks/benchmark.js) using a
50 TCP connections with a pipelining depth of 10 running on Node 22.11.0.
```
┌────────────────────────┬─────────┬────────────────────┬────────────┬─────────────────────────┐
│ Tests │ Samples │ Result │ Tolerance │ Difference with slowest │
├────────────────────────┼─────────┼────────────────────┼────────────┼─────────────────────────┤
│ 'axios' │ 15 │ '5708.26 req/sec' │ '± 2.91 %' │ '-' │
│ 'http - no keepalive' │ 10 │ '5809.80 req/sec' │ '± 2.30 %' │ '+ 1.78 %' │
│ 'request' │ 30 │ '5828.80 req/sec' │ '± 2.91 %' │ '+ 2.11 %' │
│ 'undici - fetch' │ 40 │ '5903.78 req/sec' │ '± 2.87 %' │ '+ 3.43 %' │
│ 'node-fetch' │ 10 │ '5945.40 req/sec' │ '± 2.13 %' │ '+ 4.15 %' │
│ 'got' │ 35 │ '6511.45 req/sec' │ '± 2.84 %' │ '+ 14.07 %' │
│ 'http - keepalive' │ 65 │ '9193.24 req/sec' │ '± 2.92 %' │ '+ 61.05 %' │
│ 'superagent' │ 35 │ '9339.43 req/sec' │ '± 2.95 %' │ '+ 63.61 %' │
│ 'undici - pipeline' │ 50 │ '13364.62 req/sec' │ '± 2.93 %' │ '+ 134.13 %' │
│ 'undici - stream' │ 95 │ '18245.36 req/sec' │ '± 2.99 %' │ '+ 219.63 %' │
│ 'undici - request' │ 50 │ '18340.17 req/sec' │ '± 2.84 %' │ '+ 221.29 %' │
│ 'undici - dispatch' │ 40 │ '22234.42 req/sec' │ '± 2.94 %' │ '+ 289.51 %' │
└────────────────────────┴─────────┴────────────────────┴────────────┴─────────────────────────┘
```
## Undici vs. Fetch
### Overview
Node.js includes a built-in `fetch()` implementation powered by undici starting from Node.js v18. However, there are important differences between using the built-in fetch and installing undici as a separate module.
### Built-in Fetch (Node.js v18+)
Node.js's built-in fetch is powered by a bundled version of undici:
```js
// Available globally in Node.js v18+
const response = await fetch('https://api.example.com/data');
const data = await response.json();
// Check the bundled undici version
console.log(process.versions.undici); // e.g., "5.28.4"
```
**Pros:**
- No additional dependencies required
- Works across different JavaScript runtimes
- Automatic compression handling (gzip, deflate, br)
- Built-in caching support (in development)
**Cons:**
- Limited to the undici version bundled with your Node.js version
- Less control over connection pooling and advanced features
- Error handling follows Web API standards (errors wrapped in `TypeError`)
- Performance overhead due to Web Streams implementation
### Undici Module
Installing undici as a separate module gives you access to the latest features and APIs:
```bash
npm install undici
```
```js
import { request, fetch, Agent, setGlobalDispatcher } from 'undici';
// Use undici.request for maximum performance
const { statusCode, headers, body } = await request('https://api.example.com/data');
const data = await body.json();
// Or use undici.fetch with custom configuration
const agent = new Agent({ keepAliveTimeout: 10000 });
setGlobalDispatcher(agent);
const response = await fetch('https://api.example.com/data');
```
**Pros:**
- Latest undici features and bug fixes
- Access to advanced APIs (`request`, `stream`, `pipeline`)
- Fine-grained control over connection pooling
- Better error handling with clearer error messages
- Superior performance, especially with `undici.request`
- HTTP/1.1 pipelining support
- Custom interceptors and middleware
- Advanced features like `ProxyAgent`, `MockAgent`
**Cons:**
- Additional dependency to manage
- Larger bundle size
### When to Use Each
#### Use Built-in Fetch When:
- You want zero dependencies
- Building isomorphic code that runs in browsers and Node.js
- Publishing to npm and want to maximize compatibility with JS runtimes
- Simple HTTP requests without advanced configuration
- You're publishing to npm and you want to maximize compatiblity
- You don't depend on features from a specific version of undici
#### Use Undici Module When:
- You need the latest undici features and performance improvements
- You require advanced connection pooling configuration
- You need APIs not available in the built-in fetch (`ProxyAgent`, `MockAgent`, etc.)
- Performance is critical (use `undici.request` for maximum speed)
- You want better error handling and debugging capabilities
- You need HTTP/1.1 pipelining or advanced interceptors
- You prefer decoupled protocol and API interfaces
### Performance Comparison
Based on benchmarks, here's the typical performance hierarchy:
1. **`undici.request()`** - Fastest, most efficient
2. **`undici.fetch()`** - Good performance, standard compliance
3. **Node.js `http`/`https`** - Baseline performance
### Migration Guide
If you're currently using built-in fetch and want to migrate to undici:
```js
// Before: Built-in fetch
const response = await fetch('https://api.example.com/data');
// After: Undici fetch (drop-in replacement)
import { fetch } from 'undici';
const response = await fetch('https://api.example.com/data');
// Or: Undici request (better performance)
import { request } from 'undici';
const { statusCode, body } = await request('https://api.example.com/data');
const data = await body.json();
```
### Version Compatibility
You can check which version of undici is bundled with your Node.js version:
```js
console.log(process.versions.undici);
```
Installing undici as a module allows you to use a newer version than what's bundled with Node.js, giving you access to the latest features and performance improvements.
## Quick Start
```js
import { request } from 'undici'
const {
statusCode,
headers,
trailers,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode)
console.log('headers', headers)
for await (const data of body) { console.log('data', data) }
console.log('trailers', trailers)
```
## Global Installation
Undici provides an `install()` function to add all WHATWG fetch classes to `globalThis`, making them available globally:
```js
import { install } from 'undici'
// Install all WHATWG fetch classes globally
install()
// Now you can use fetch classes globally without importing
const response = await fetch('https://api.example.com/data')
const data = await response.json()
// All classes are available globally:
const headers = new Headers([['content-type', 'application/json']])
const request = new Request('https://example.com')
const formData = new FormData()
const ws = new WebSocket('wss://example.com')
const eventSource = new EventSource('https://example.com/events')
```
The `install()` function adds the following classes to `globalThis`:
- `fetch` - The fetch function
- `Headers` - HTTP headers management
- `Response` - HTTP response representation
- `Request` - HTTP request representation
- `FormData` - Form data handling
- `WebSocket` - WebSocket client
- `CloseEvent`, `ErrorEvent`, `MessageEvent` - WebSocket events
- `EventSource` - Server-sent events client
This is useful for:
- Polyfilling environments that don't have fetch
- Ensuring consistent fetch behavior across different Node.js versions
- Making undici's implementations available globally for libraries that expect them
## Body Mixins
The `body` mixins are the most common way to format the request/response body. Mixins include:
- [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
- [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
- [`.bytes()`](https://fetch.spec.whatwg.org/#dom-body-bytes)
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
> [!NOTE]
> The body returned from `undici.request` does not implement `.formData()`.
Example usage:
```js
import { request } from 'undici'
const {
statusCode,
headers,
trailers,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode)
console.log('headers', headers)
console.log('data', await body.json())
console.log('trailers', trailers)
```
_Note: Once a mixin has been called then the body cannot be reused, thus calling additional mixins on `.body`, e.g. `.body.json(); .body.text()` will result in an error `TypeError: unusable` being thrown and returned through the `Promise` rejection._
Should you need to access the `body` in plain-text after using a mixin, the best practice is to use the `.text()` mixin first and then manually parse the text to the desired format.
For more information about their behavior, please reference the body mixin from the [Fetch Standard](https://fetch.spec.whatwg.org/#body-mixin).
## Common API Methods
This section documents our most commonly used API methods. Additional APIs are documented in their own files within the [docs](./docs/) folder and are accessible via the navigation list on the left side of the docs site.
### `undici.request([url, options]): Promise`
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`RequestOptions`](./docs/docs/api/Dispatcher.md#parameter-requestoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
Returns a promise with the result of the `Dispatcher.request` method.
Calls `options.dispatcher.request(options)`.
See [Dispatcher.request](./docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details, and [request examples](./docs/examples/README.md) for examples.
### `undici.stream([url, options, ]factory): Promise`
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`StreamOptions`](./docs/docs/api/Dispatcher.md#parameter-streamoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
* **factory** `Dispatcher.stream.factory`
Returns a promise with the result of the `Dispatcher.stream` method.
Calls `options.dispatcher.stream(options, factory)`.
See [Dispatcher.stream](./docs/docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details.
### `undici.pipeline([url, options, ]handler): Duplex`
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`PipelineOptions`](./docs/docs/api/Dispatcher.md#parameter-pipelineoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **method** `String` - Default: `PUT` if `options.body`, otherwise `GET`
* **handler** `Dispatcher.pipeline.handler`
Returns: `stream.Duplex`
Calls `options.dispatch.pipeline(options, handler)`.
See [Dispatcher.pipeline](./docs/docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details.
### `undici.connect([url, options]): Promise`
Starts two-way communications with the requested resource using [HTTP CONNECT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/CONNECT).
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`ConnectOptions`](./docs/docs/api/Dispatcher.md#parameter-connectoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional)
Returns a promise with the result of the `Dispatcher.connect` method.
Calls `options.dispatch.connect(options)`.
See [Dispatcher.connect](./docs/docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details.
### `undici.fetch(input[, init]): Promise`
Implements [fetch](https://fetch.spec.whatwg.org/#fetch-method).
* https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch
* https://fetch.spec.whatwg.org/#fetch-method
Basic usage example:
```js
import { fetch } from 'undici'
const res = await fetch('https://example.com')
const json = await res.json()
console.log(json)
```
You can pass an optional dispatcher to `fetch` as:
```js
import { fetch, Agent } from 'undici'
const res = await fetch('https://example.com', {
// Mocks are also supported
dispatcher: new Agent({
keepAliveTimeout: 10,
keepAliveMaxTimeout: 10
})
})
const json = await res.json()
console.log(json)
```
#### `request.body`
A body can be of the following types:
- ArrayBuffer
- ArrayBufferView
- AsyncIterables
- Blob
- Iterables
- String
- URLSearchParams
- FormData
In this implementation of fetch, ```request.body``` now accepts ```Async Iterables```. It is not present in the [Fetch Standard](https://fetch.spec.whatwg.org).
```js
import { fetch } from 'undici'
const data = {
async *[Symbol.asyncIterator]() {
yield 'hello'
yield 'world'
},
}
await fetch('https://example.com', { body: data, method: 'POST', duplex: 'half' })
```
[FormData](https://developer.mozilla.org/en-US/docs/Web/API/FormData) besides text data and buffers can also utilize streams via [Blob](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects:
```js
import { openAsBlob } from 'node:fs'
const file = await openAsBlob('./big.csv')
const body = new FormData()
body.set('file', file, 'big.csv')
await fetch('http://example.com', { method: 'POST', body })
```
#### `request.duplex`
- `'half'`
In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`, however, even though the value must be set to `'half'`, it is actually a _full_ duplex. For more detail refer to the [Fetch Standard](https://fetch.spec.whatwg.org/#dom-requestinit-duplex).
#### `response.body`
Nodejs has two kinds of streams: [web streams](https://nodejs.org/api/webstreams.html), which follow the API of the WHATWG web standard found in browsers, and an older Node-specific [streams API](https://nodejs.org/api/stream.html). `response.body` returns a readable web stream. If you would prefer to work with a Node stream you can convert a web stream using `.fromWeb()`.
```js
import { fetch } from 'undici'
import { Readable } from 'node:stream'
const response = await fetch('https://example.com')
const readableWebStream = response.body
const readableNodeStream = Readable.fromWeb(readableWebStream)
```
## Specification Compliance
This section documents parts of the [HTTP/1.1](https://www.rfc-editor.org/rfc/rfc9110.html) and [Fetch Standard](https://fetch.spec.whatwg.org) that Undici does
not support or does not fully implement.
#### CORS
Unlike browsers, Undici does not implement CORS (Cross-Origin Resource Sharing) checks by default. This means:
- No preflight requests are automatically sent for cross-origin requests
- No validation of `Access-Control-Allow-Origin` headers is performed
- Requests to any origin are allowed regardless of the source
This behavior is intentional for server-side environments where CORS restrictions are typically unnecessary. If your application requires CORS-like protections, you will need to implement these checks manually.
#### Garbage Collection
* https://fetch.spec.whatwg.org/#garbage-collection
The [Fetch Standard](https://fetch.spec.whatwg.org) allows users to skip consuming the response body by relying on
[garbage collection](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Memory_Management#garbage_collection) to release connection resources.
Garbage collection in Node is less aggressive and deterministic
(due to the lack of clear idle periods that browsers have through the rendering refresh rate)
which means that leaving the release of connection resources to the garbage collector can lead
to excessive connection usage, reduced performance (due to less connection re-use), and even
stalls or deadlocks when running out of connections.
Therefore, __it is important to always either consume or cancel the response body anyway__.
```js
// Do
const { body, headers } = await fetch(url);
for await (const chunk of body) {
// force consumption of body
}
// Do not
const { headers } = await fetch(url);
```
However, if you want to get only headers, it might be better to use `HEAD` request method. Usage of this method will obviate the need for consumption or cancelling of the response body. See [MDN - HTTP - HTTP request methods - HEAD](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD) for more details.
```js
const headers = await fetch(url, { method: 'HEAD' })
.then(res => res.headers)
```
Note that consuming the response body is _mandatory_ for `request`:
```js
// Do
const { body, headers } = await request(url);
await res.body.dump(); // force consumption of body
// Do not
const { headers } = await request(url);
```
#### Forbidden and Safelisted Header Names
* https://fetch.spec.whatwg.org/#cors-safelisted-response-header-name
* https://fetch.spec.whatwg.org/#forbidden-header-name
* https://fetch.spec.whatwg.org/#forbidden-response-header-name
* https://github.com/wintercg/fetch/issues/6
The [Fetch Standard](https://fetch.spec.whatwg.org) requires implementations to exclude certain headers from requests and responses. In browser environments, some headers are forbidden so the user agent remains in full control over them. In Undici, these constraints are removed to give more control to the user.
#### `undici.upgrade([url, options]): Promise`
Upgrade to a different protocol. See [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details.
Arguments:
* **url** `string | URL | UrlObject`
* **options** [`UpgradeOptions`](./docs/docs/api/Dispatcher.md#parameter-upgradeoptions)
* **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher)
* **callback** `(error: Error | null, data: UpgradeData) => void` (optional)
Returns a promise with the result of the `Dispatcher.upgrade` method.
Calls `options.dispatcher.upgrade(options)`.
See [Dispatcher.upgrade](./docs/docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details.
### `undici.setGlobalDispatcher(dispatcher)`
* dispatcher `Dispatcher`
Sets the global dispatcher used by Common API Methods. Global dispatcher is shared among compatible undici modules,
including undici that is bundled internally with node.js.
### `undici.getGlobalDispatcher()`
Gets the global dispatcher used by Common API Methods.
Returns: `Dispatcher`
### `undici.setGlobalOrigin(origin)`
* origin `string | URL | undefined`
Sets the global origin used in `fetch`.
If `undefined` is passed, the global origin will be reset. This will cause `Response.redirect`, `new Request()`, and `fetch` to throw an error when a relative path is passed.
```js
setGlobalOrigin('http://localhost:3000')
const response = await fetch('/api/ping')
console.log(response.url) // http://localhost:3000/api/ping
```
### `undici.getGlobalOrigin()`
Gets the global origin used in `fetch`.
Returns: `URL`
### `UrlObject`
* **port** `string | number` (optional)
* **path** `string` (optional)
* **pathname** `string` (optional)
* **hostname** `string` (optional)
* **origin** `string` (optional)
* **protocol** `string` (optional)
* **search** `string` (optional)
#### Expect
Undici does not support the `Expect` request header field. The request
body is always immediately sent and the `100 Continue` response will be
ignored.
Refs: https://tools.ietf.org/html/rfc7231#section-5.1.1
#### Pipelining
Undici will only use pipelining if configured with a `pipelining` factor
greater than `1`. Also it is important to pass `blocking: false` to the
request options to properly pipeline requests.
Undici always assumes that connections are persistent and will immediately
pipeline requests, without checking whether the connection is persistent.
Hence, automatic fallback to HTTP/1.0 or HTTP/1.1 without pipelining is
not supported.
Undici will immediately pipeline when retrying requests after a failed
connection. However, Undici will not retry the first remaining requests in
the prior pipeline and instead error the corresponding callback/promise/stream.
Undici will abort all running requests in the pipeline when any of them are
aborted.
* Refs: https://tools.ietf.org/html/rfc2616#section-8.1.2.2
* Refs: https://tools.ietf.org/html/rfc7230#section-6.3.2
#### Manual Redirect
Since it is not possible to manually follow an HTTP redirect on the server-side,
Undici returns the actual response instead of an `opaqueredirect` filtered one
when invoked with a `manual` redirect. This aligns `fetch()` with the other
implementations in Deno and Cloudflare Workers.
Refs: https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
### Workarounds
#### Network address family autoselection.
If you experience problem when connecting to a remote server that is resolved by your DNS servers to a IPv6 (AAAA record)
first, there are chances that your local router or ISP might have problem connecting to IPv6 networks. In that case
undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`.
If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version
(18.3.0 and above), you can fix the problem by providing the `autoSelectFamily` option (support by both `undici.request`
and `undici.Agent`) which will enable the family autoselection algorithm when establishing the connection.
## Collaborators
* [__Daniele Belardi__](https://github.com/dnlup), <https://www.npmjs.com/~dnlup>
* [__Ethan Arrowood__](https://github.com/ethan-arrowood), <https://www.npmjs.com/~ethan_arrowood>
* [__Matteo Collina__](https://github.com/mcollina), <https://www.npmjs.com/~matteo.collina>
* [__Matthew Aitken__](https://github.com/KhafraDev), <https://www.npmjs.com/~khaf>
* [__Robert Nagy__](https://github.com/ronag), <https://www.npmjs.com/~ronag>
* [__Szymon Marczak__](https://github.com/szmarczak), <https://www.npmjs.com/~szmarczak>
## Past Collaborators
* [__Tomas Della Vedova__](https://github.com/delvedor), <https://www.npmjs.com/~delvedor>
### Releasers
* [__Ethan Arrowood__](https://github.com/ethan-arrowood), <https://www.npmjs.com/~ethan_arrowood>
* [__Matteo Collina__](https://github.com/mcollina), <https://www.npmjs.com/~matteo.collina>
* [__Robert Nagy__](https://github.com/ronag), <https://www.npmjs.com/~ronag>
* [__Matthew Aitken__](https://github.com/KhafraDev), <https://www.npmjs.com/~khaf>
## Long Term Support
Undici aligns with the Node.js LTS schedule. The following table shows the supported versions:
| Undici Version | Bundled in Node.js | Node.js Versions Supported | End of Life |
|----------------|-------------------|----------------------------|-------------|
| 5.x | 18.x | ≥14.0 (tested: 14, 16, 18) | 2024-04-30 |
| 6.x | 20.x, 22.x | ≥18.17 (tested: 18, 20, 21, 22) | 2026-04-30 |
| 7.x | 24.x | ≥20.18.1 (tested: 20, 22, 24) | 2027-04-30 |
## License
MIT

84
node_modules/undici/docs/docs/api/Agent.md generated vendored Normal file
View File

@@ -0,0 +1,84 @@
# Agent
Extends: `undici.Dispatcher`
Agent allows dispatching requests against multiple different origins.
Requests are not guaranteed to be dispatched in order of invocation.
## `new undici.Agent([options])`
Arguments:
* **options** `AgentOptions` (optional)
Returns: `Agent`
### Parameter: `AgentOptions`
Extends: [`PoolOptions`](/docs/docs/api/Pool.md#parameter-pooloptions)
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)`
* **maxOrigins** `number` (optional) - Default: `Infinity` - Limits the total number of origins that can receive requests at a time, throwing an `MaxOriginsReachedError` error when attempting to dispatch when the max is reached. If `Infinity`, no limit is enforced.
## Instance Properties
### `Agent.closed`
Implements [Client.closed](/docs/docs/api/Client.md#clientclosed)
### `Agent.destroyed`
Implements [Client.destroyed](/docs/docs/api/Client.md#clientdestroyed)
## Instance Methods
### `Agent.close([callback])`
Implements [`Dispatcher.close([callback])`](/docs/docs/api/Dispatcher.md#dispatcherclosecallback-promise).
### `Agent.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](/docs/docs/api/Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `Agent.dispatch(options, handler: AgentDispatchOptions)`
Implements [`Dispatcher.dispatch(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
#### Parameter: `AgentDispatchOptions`
Extends: [`DispatchOptions`](/docs/docs/api/Dispatcher.md#parameter-dispatchoptions)
* **origin** `string | URL`
Implements [`Dispatcher.destroy([error, callback])`](/docs/docs/api/Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `Agent.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherconnectoptions-callback).
### `Agent.dispatch(options, handler)`
Implements [`Dispatcher.dispatch(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
### `Agent.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherpipelineoptions-handler).
### `Agent.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
### `Agent.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `Agent.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherupgradeoptions-callback).
### `Agent.stats()`
Returns an object of stats by origin in the format of `Record<string, TClientStats | TPoolStats>`
See [`PoolStats`](/docs/docs/api/PoolStats.md) and [`ClientStats`](/docs/docs/api/ClientStats.md).

99
node_modules/undici/docs/docs/api/BalancedPool.md generated vendored Normal file
View File

@@ -0,0 +1,99 @@
# Class: BalancedPool
Extends: `undici.Dispatcher`
A pool of [Pool](/docs/docs/api/Pool.md) instances connected to multiple upstreams.
Requests are not guaranteed to be dispatched in order of invocation.
## `new BalancedPool(upstreams [, options])`
Arguments:
* **upstreams** `URL | string | string[]` - It should only include the **protocol, hostname, and port**.
* **options** `BalancedPoolOptions` (optional)
### Parameter: `BalancedPoolOptions`
Extends: [`PoolOptions`](/docs/docs/api/Pool.md#parameter-pooloptions)
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)`
The `PoolOptions` are passed to each of the `Pool` instances being created.
## Instance Properties
### `BalancedPool.upstreams`
Returns an array of upstreams that were previously added.
### `BalancedPool.closed`
Implements [Client.closed](/docs/docs/api/Client.md#clientclosed)
### `BalancedPool.destroyed`
Implements [Client.destroyed](/docs/docs/api/Client.md#clientdestroyed)
### `Pool.stats`
Returns [`PoolStats`](/docs/docs/api/PoolStats.md) instance for this pool.
## Instance Methods
### `BalancedPool.addUpstream(upstream)`
Add an upstream.
Arguments:
* **upstream** `string` - It should only include the **protocol, hostname, and port**.
### `BalancedPool.removeUpstream(upstream)`
Removes an upstream that was previously added.
### `BalancedPool.close([callback])`
Implements [`Dispatcher.close([callback])`](/docs/docs/api/Dispatcher.md#dispatcherclosecallback-promise).
### `BalancedPool.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](/docs/docs/api/Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `BalancedPool.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherconnectoptions-callback).
### `BalancedPool.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
### `BalancedPool.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherpipelineoptions-handler).
### `BalancedPool.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
### `BalancedPool.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `BalancedPool.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherupgradeoptions-callback).
## Instance Events
### Event: `'connect'`
See [Dispatcher Event: `'connect'`](/docs/docs/api/Dispatcher.md#event-connect).
### Event: `'disconnect'`
See [Dispatcher Event: `'disconnect'`](/docs/docs/api/Dispatcher.md#event-disconnect).
### Event: `'drain'`
See [Dispatcher Event: `'drain'`](/docs/docs/api/Dispatcher.md#event-drain).

30
node_modules/undici/docs/docs/api/CacheStorage.md generated vendored Normal file
View File

@@ -0,0 +1,30 @@
# CacheStorage
Undici exposes a W3C spec-compliant implementation of [CacheStorage](https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage) and [Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache).
## Opening a Cache
Undici exports a top-level CacheStorage instance. You can open a new Cache, or duplicate a Cache with an existing name, by using `CacheStorage.prototype.open`. If you open a Cache with the same name as an already-existing Cache, its list of cached Responses will be shared between both instances.
```mjs
import { caches } from 'undici'
const cache_1 = await caches.open('v1')
const cache_2 = await caches.open('v1')
// Although .open() creates a new instance,
assert(cache_1 !== cache_2)
// The same Response is matched in both.
assert.deepStrictEqual(await cache_1.match('/req'), await cache_2.match('/req'))
```
## Deleting a Cache
If a Cache is deleted, the cached Responses/Requests can still be used.
```mjs
const response = await cache_1.match('/req')
await caches.delete('v1')
await response.text() // the Response's body
```

151
node_modules/undici/docs/docs/api/CacheStore.md generated vendored Normal file
View File

@@ -0,0 +1,151 @@
# Cache Store
A Cache Store is responsible for storing and retrieving cached responses.
It is also responsible for deciding which specific response to use based off of
a response's `Vary` header (if present). It is expected to be compliant with
[RFC-9111](https://www.rfc-editor.org/rfc/rfc9111.html).
## Pre-built Cache Stores
### `MemoryCacheStore`
The `MemoryCacheStore` stores the responses in-memory.
**Options**
- `maxSize` - The maximum total size in bytes of all stored responses. Default `104857600` (100MB).
- `maxCount` - The maximum amount of responses to store. Default `1024`.
- `maxEntrySize` - The maximum size in bytes that a response's body can be. If a response's body is greater than or equal to this, the response will not be cached. Default `5242880` (5MB).
### Getters
#### `MemoryCacheStore.size`
Returns the current total size in bytes of all stored responses.
### Methods
#### `MemoryCacheStore.isFull()`
Returns a boolean indicating whether the cache has reached its maximum size or count.
### Events
#### `'maxSizeExceeded'`
Emitted when the cache exceeds its maximum size or count limits. The event payload contains `size`, `maxSize`, `count`, and `maxCount` properties.
### `SqliteCacheStore`
The `SqliteCacheStore` stores the responses in a SQLite database.
Under the hood, it uses Node.js' [`node:sqlite`](https://nodejs.org/api/sqlite.html) api.
The `SqliteCacheStore` is only exposed if the `node:sqlite` api is present.
**Options**
- `location` - The location of the SQLite database to use. Default `:memory:`.
- `maxCount` - The maximum number of entries to store in the database. Default `Infinity`.
- `maxEntrySize` - The maximum size in bytes that a response's body can be. If a response's body is greater than or equal to this, the response will not be cached. Default `Infinity`.
## Defining a Custom Cache Store
The store must implement the following functions:
### Getter: `isFull`
Optional. This tells the cache interceptor if the store is full or not. If this is true,
the cache interceptor will not attempt to cache the response.
### Function: `get`
Parameters:
* **req** `Dispatcher.RequestOptions` - Incoming request
Returns: `GetResult | Promise<GetResult | undefined> | undefined` - If the request is cached, the cached response is returned. If the request's method is anything other than HEAD, the response is also returned.
If the request isn't cached, `undefined` is returned.
Response properties:
* **response** `CacheValue` - The cached response data.
* **body** `Readable | undefined` - The response's body.
### Function: `createWriteStream`
Parameters:
* **req** `Dispatcher.RequestOptions` - Incoming request
* **value** `CacheValue` - Response to store
Returns: `Writable | undefined` - If the store is full, return `undefined`. Otherwise, return a writable so that the cache interceptor can stream the body and trailers to the store.
## `CacheValue`
This is an interface containing the majority of a response's data (minus the body).
### Property `statusCode`
`number` - The response's HTTP status code.
### Property `statusMessage`
`string` - The response's HTTP status message.
### Property `rawHeaders`
`Buffer[]` - The response's headers.
### Property `vary`
`Record<string, string | string[]> | undefined` - The headers defined by the response's `Vary` header
and their respective values for later comparison
For example, for a response like
```
Vary: content-encoding, accepts
content-encoding: utf8
accepts: application/json
```
This would be
```js
{
'content-encoding': 'utf8',
accepts: 'application/json'
}
```
### Property `cachedAt`
`number` - Time in millis that this value was cached.
### Property `staleAt`
`number` - Time in millis that this value is considered stale.
### Property `deleteAt`
`number` - Time in millis that this value is to be deleted from the cache. This
is either the same sa staleAt or the `max-stale` caching directive.
The store must not return a response after the time defined in this property.
## `CacheStoreReadable`
This extends Node's [`Readable`](https://nodejs.org/api/stream.html#class-streamreadable)
and defines extra properties relevant to the cache interceptor.
### Getter: `value`
The response's [`CacheStoreValue`](/docs/docs/api/CacheStore.md#cachestorevalue)
## `CacheStoreWriteable`
This extends Node's [`Writable`](https://nodejs.org/api/stream.html#class-streamwritable)
and defines extra properties relevant to the cache interceptor.
### Setter: `rawTrailers`
If the response has trailers, the cache interceptor will pass them to the cache
interceptor through this method.

281
node_modules/undici/docs/docs/api/Client.md generated vendored Normal file
View File

@@ -0,0 +1,281 @@
# Class: Client
Extends: `undici.Dispatcher`
A basic HTTP/1.1 client, mapped on top a single TCP/TLS connection. Pipelining is disabled by default.
Requests are not guaranteed to be dispatched in order of invocation.
## `new Client(url[, options])`
Arguments:
* **url** `URL | string` - Should only include the **protocol, hostname, and port**.
* **options** `ClientOptions` (optional)
Returns: `Client`
### Parameter: `ClientOptions`
* **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds. Please note the `timeout` will be reset if you keep writing data to the socket everytime.
* **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
* **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Defaults to 10 minutes.
* **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds.
* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `2e3` - A number of milliseconds subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 2 seconds.
* **maxHeaderSize** `number | null` (optional) - Default: `--max-http-header-size` or `16384` - The maximum length of request headers in bytes. Defaults to Node.js' --max-http-header-size or 16KiB.
* **maxResponseSize** `number | null` (optional) - Default: `-1` - The maximum length of response body in bytes. Set to `-1` to disable.
* **pipelining** `number | null` (optional) - Default: `1` - The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Carefully consider your workload and environment before enabling concurrent requests as pipelining may reduce performance if used incorrectly. Pipelining is sensitive to network stack settings as well as head of line blocking caused by e.g. long running requests. Set to `0` to disable keep-alive connections.
* **connect** `ConnectOptions | Function | null` (optional) - Default: `null`.
* **strictContentLength** `Boolean` (optional) - Default: `true` - Whether to treat request content length mismatches as errors. If true, an error is thrown when the request content-length header doesn't match the length of the request body.
* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version.
* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details.
* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation.
* **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame.
> **Notes about HTTP/2**
> - It only works under TLS connections. h2c is not supported.
> - The server must support HTTP/2 and choose it as the protocol during the ALPN negotiation.
> - The server must not have a bigger priority for HTTP/1.1 than HTTP/2.
> - Pseudo headers are automatically attached to the request. If you try to set them, they will be overwritten.
> - The `:path` header is automatically set to the request path.
> - The `:method` header is automatically set to the request method.
> - The `:scheme` header is automatically set to the request scheme.
> - The `:authority` header is automatically set to the request `host[:port]`.
> - `PUSH` frames are yet not supported.
#### Parameter: `ConnectOptions`
Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback).
Furthermore, the following options can be passed:
* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: 100.
* **timeout** `number | null` (optional) - In milliseconds, Default `10e3`.
* **servername** `string | null` (optional)
* **keepAlive** `boolean | null` (optional) - Default: `true` - TCP keep-alive enabled
* **keepAliveInitialDelay** `number | null` (optional) - Default: `60000` - TCP keep-alive interval for the socket in milliseconds
### Example - Basic Client instantiation
This will instantiate the undici Client, but it will not connect to the origin until something is queued. Consider using `client.connect` to prematurely connect to the origin, or just call `client.request`.
```js
'use strict'
import { Client } from 'undici'
const client = new Client('http://localhost:3000')
```
### Example - Custom connector
This will allow you to perform some additional check on the socket that will be used for the next request.
```js
'use strict'
import { Client, buildConnector } from 'undici'
const connector = buildConnector({ rejectUnauthorized: false })
const client = new Client('https://localhost:3000', {
connect (opts, cb) {
connector(opts, (err, socket) => {
if (err) {
cb(err)
} else if (/* assertion */) {
socket.destroy()
cb(new Error('kaboom'))
} else {
cb(null, socket)
}
})
}
})
```
## Instance Methods
### `Client.close([callback])`
Implements [`Dispatcher.close([callback])`](/docs/docs/api/Dispatcher.md#dispatcherclosecallback-promise).
### `Client.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](/docs/docs/api/Dispatcher.md#dispatcherdestroyerror-callback-promise).
Waits until socket is closed before invoking the callback (or returning a promise if no callback is provided).
### `Client.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherconnectoptions-callback).
### `Client.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
### `Client.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherpipelineoptions-handler).
### `Client.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
### `Client.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `Client.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherupgradeoptions-callback).
## Instance Properties
### `Client.closed`
* `boolean`
`true` after `client.close()` has been called.
### `Client.destroyed`
* `boolean`
`true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed.
### `Client.pipelining`
* `number`
Property to get and set the pipelining factor.
## Instance Events
### Event: `'connect'`
See [Dispatcher Event: `'connect'`](/docs/docs/api/Dispatcher.md#event-connect).
Parameters:
* **origin** `URL`
* **targets** `Array<Dispatcher>`
Emitted when a socket has been created and connected. The client will connect once `client.size > 0`.
#### Example - Client connect event
```js
import { createServer } from 'http'
import { Client } from 'undici'
import { once } from 'events'
const server = createServer((request, response) => {
response.end('Hello, World!')
}).listen()
await once(server, 'listening')
const client = new Client(`http://localhost:${server.address().port}`)
client.on('connect', (origin) => {
console.log(`Connected to ${origin}`) // should print before the request body statement
})
try {
const { body } = await client.request({
path: '/',
method: 'GET'
})
body.setEncoding('utf-8')
body.on('data', console.log)
client.close()
server.close()
} catch (error) {
console.error(error)
client.close()
server.close()
}
```
### Event: `'disconnect'`
See [Dispatcher Event: `'disconnect'`](/docs/docs/api/Dispatcher.md#event-disconnect).
Parameters:
* **origin** `URL`
* **targets** `Array<Dispatcher>`
* **error** `Error`
Emitted when socket has disconnected. The error argument of the event is the error which caused the socket to disconnect. The client will reconnect if or once `client.size > 0`.
#### Example - Client disconnect event
```js
import { createServer } from 'http'
import { Client } from 'undici'
import { once } from 'events'
const server = createServer((request, response) => {
response.destroy()
}).listen()
await once(server, 'listening')
const client = new Client(`http://localhost:${server.address().port}`)
client.on('disconnect', (origin) => {
console.log(`Disconnected from ${origin}`)
})
try {
await client.request({
path: '/',
method: 'GET'
})
} catch (error) {
console.error(error.message)
client.close()
server.close()
}
```
### Event: `'drain'`
Emitted when pipeline is no longer busy.
See [Dispatcher Event: `'drain'`](/docs/docs/api/Dispatcher.md#event-drain).
#### Example - Client drain event
```js
import { createServer } from 'http'
import { Client } from 'undici'
import { once } from 'events'
const server = createServer((request, response) => {
response.end('Hello, World!')
}).listen()
await once(server, 'listening')
const client = new Client(`http://localhost:${server.address().port}`)
client.on('drain', () => {
console.log('drain event')
client.close()
server.close()
})
const requests = [
client.request({ path: '/', method: 'GET' }),
client.request({ path: '/', method: 'GET' }),
client.request({ path: '/', method: 'GET' })
]
await Promise.all(requests)
console.log('requests completed')
```
### Event: `'error'`
Invoked for users errors such as throwing in the `onError` handler.

27
node_modules/undici/docs/docs/api/ClientStats.md generated vendored Normal file
View File

@@ -0,0 +1,27 @@
# Class: ClientStats
Stats for a [Client](/docs/docs/api/Client.md).
## `new ClientStats(client)`
Arguments:
* **client** `Client` - Client from which to return stats.
## Instance Properties
### `ClientStats.connected`
Boolean if socket as open connection by this client.
### `ClientStats.pending`
Number of pending requests of this client.
### `ClientStats.running`
Number of currently active requests across this client.
### `ClientStats.size`
Number of active, pending, or queued requests of this clients.

115
node_modules/undici/docs/docs/api/Connector.md generated vendored Normal file
View File

@@ -0,0 +1,115 @@
# Connector
Undici creates the underlying socket via the connector builder.
Normally, this happens automatically and you don't need to care about this,
but if you need to perform some additional check over the currently used socket,
this is the right place.
If you want to create a custom connector, you must import the `buildConnector` utility.
#### Parameter: `buildConnector.BuildOptions`
Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback).
Furthermore, the following options can be passed:
* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: `100`.
* **timeout** `number | null` (optional) - In milliseconds. Default `10e3`.
* **servername** `string | null` (optional)
Once you call `buildConnector`, it will return a connector function, which takes the following parameters.
#### Parameter: `connector.Options`
* **hostname** `string` (required)
* **host** `string` (optional)
* **protocol** `string` (required)
* **port** `string` (required)
* **servername** `string` (optional)
* **localAddress** `string | null` (optional) Local address the socket should connect from.
* **httpSocket** `Socket` (optional) Establish secure connection on a given socket rather than creating a new socket. It can only be sent on TLS update.
### Basic example
```js
'use strict'
import { Client, buildConnector } from 'undici'
const connector = buildConnector({ rejectUnauthorized: false })
const client = new Client('https://localhost:3000', {
connect (opts, cb) {
connector(opts, (err, socket) => {
if (err) {
cb(err)
} else if (/* assertion */) {
socket.destroy()
cb(new Error('kaboom'))
} else {
cb(null, socket)
}
})
}
})
```
### Example: validate the CA fingerprint
```js
'use strict'
import { Client, buildConnector } from 'undici'
const caFingerprint = 'FO:OB:AR'
const connector = buildConnector({ rejectUnauthorized: false })
const client = new Client('https://localhost:3000', {
connect (opts, cb) {
connector(opts, (err, socket) => {
if (err) {
cb(err)
} else if (getIssuerCertificate(socket).fingerprint256 !== caFingerprint) {
socket.destroy()
cb(new Error('Fingerprint does not match or malformed certificate'))
} else {
cb(null, socket)
}
})
}
})
client.request({
path: '/',
method: 'GET'
}, (err, data) => {
if (err) throw err
const bufs = []
data.body.on('data', (buf) => {
bufs.push(buf)
})
data.body.on('end', () => {
console.log(Buffer.concat(bufs).toString('utf8'))
client.close()
})
})
function getIssuerCertificate (socket) {
let certificate = socket.getPeerCertificate(true)
while (certificate && Object.keys(certificate).length > 0) {
// invalid certificate
if (certificate.issuerCertificate == null) {
return null
}
// We have reached the root certificate.
// In case of self-signed certificates, `issuerCertificate` may be a circular reference.
if (certificate.fingerprint256 === certificate.issuerCertificate.fingerprint256) {
break
}
// continue the loop
certificate = certificate.issuerCertificate
}
return certificate
}
```

57
node_modules/undici/docs/docs/api/ContentType.md generated vendored Normal file
View File

@@ -0,0 +1,57 @@
# MIME Type Parsing
## `MIMEType` interface
* **type** `string`
* **subtype** `string`
* **parameters** `Map<string, string>`
* **essence** `string`
## `parseMIMEType(input)`
Implements [parse a MIME type](https://mimesniff.spec.whatwg.org/#parse-a-mime-type).
Parses a MIME type, returning its type, subtype, and any associated parameters. If the parser can't parse an input it returns the string literal `'failure'`.
```js
import { parseMIMEType } from 'undici'
parseMIMEType('text/html; charset=gbk')
// {
// type: 'text',
// subtype: 'html',
// parameters: Map(1) { 'charset' => 'gbk' },
// essence: 'text/html'
// }
```
Arguments:
* **input** `string`
Returns: `MIMEType|'failure'`
## `serializeAMimeType(input)`
Implements [serialize a MIME type](https://mimesniff.spec.whatwg.org/#serialize-a-mime-type).
Serializes a MIMEType object.
```js
import { serializeAMimeType } from 'undici'
serializeAMimeType({
type: 'text',
subtype: 'html',
parameters: new Map([['charset', 'gbk']]),
essence: 'text/html'
})
// text/html;charset=gbk
```
Arguments:
* **mimeType** `MIMEType`
Returns: `string`

101
node_modules/undici/docs/docs/api/Cookies.md generated vendored Normal file
View File

@@ -0,0 +1,101 @@
# Cookie Handling
## `Cookie` interface
* **name** `string`
* **value** `string`
* **expires** `Date|number` (optional)
* **maxAge** `number` (optional)
* **domain** `string` (optional)
* **path** `string` (optional)
* **secure** `boolean` (optional)
* **httpOnly** `boolean` (optional)
* **sameSite** `'String'|'Lax'|'None'` (optional)
* **unparsed** `string[]` (optional) Left over attributes that weren't parsed.
## `deleteCookie(headers, name[, attributes])`
Sets the expiry time of the cookie to the unix epoch, causing browsers to delete it when received.
```js
import { deleteCookie, Headers } from 'undici'
const headers = new Headers()
deleteCookie(headers, 'name')
console.log(headers.get('set-cookie')) // name=; Expires=Thu, 01 Jan 1970 00:00:00 GMT
```
Arguments:
* **headers** `Headers`
* **name** `string`
* **attributes** `{ path?: string, domain?: string }` (optional)
Returns: `void`
## `getCookies(headers)`
Parses the `Cookie` header and returns a list of attributes and values.
```js
import { getCookies, Headers } from 'undici'
const headers = new Headers({
cookie: 'get=cookies; and=attributes'
})
console.log(getCookies(headers)) // { get: 'cookies', and: 'attributes' }
```
Arguments:
* **headers** `Headers`
Returns: `Record<string, string>`
## `getSetCookies(headers)`
Parses all `Set-Cookie` headers.
```js
import { getSetCookies, Headers } from 'undici'
const headers = new Headers({ 'set-cookie': 'undici=getSetCookies; Secure' })
console.log(getSetCookies(headers))
// [
// {
// name: 'undici',
// value: 'getSetCookies',
// secure: true
// }
// ]
```
Arguments:
* **headers** `Headers`
Returns: `Cookie[]`
## `setCookie(headers, cookie)`
Appends a cookie to the `Set-Cookie` header.
```js
import { setCookie, Headers } from 'undici'
const headers = new Headers()
setCookie(headers, { name: 'undici', value: 'setCookie' })
console.log(headers.get('Set-Cookie')) // undici=setCookie
```
Arguments:
* **headers** `Headers`
* **cookie** `Cookie`
Returns: `void`

62
node_modules/undici/docs/docs/api/Debug.md generated vendored Normal file
View File

@@ -0,0 +1,62 @@
# Debug
Undici (and subsenquently `fetch` and `websocket`) exposes a debug statement that can be enabled by setting `NODE_DEBUG` within the environment.
The flags available are:
## `undici`
This flag enables debug statements for the core undici library.
```sh
NODE_DEBUG=undici node script.js
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: connected to nodejs.org using https:h1
UNDICI 16241: sending request to GET https://nodejs.org/
UNDICI 16241: received response to GET https://nodejs.org/ - HTTP 307
UNDICI 16241: connecting to nodejs.org using https:h1
UNDICI 16241: trailers received from GET https://nodejs.org/
UNDICI 16241: connected to nodejs.org using https:h1
UNDICI 16241: sending request to GET https://nodejs.org/en
UNDICI 16241: received response to GET https://nodejs.org/en - HTTP 200
UNDICI 16241: trailers received from GET https://nodejs.org/en
```
## `fetch`
This flag enables debug statements for the `fetch` API.
> **Note**: statements are pretty similar to the ones in the `undici` flag, but scoped to `fetch`
```sh
NODE_DEBUG=fetch node script.js
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: connected to nodejs.org using https:h1
FETCH 16241: sending request to GET https://nodejs.org/
FETCH 16241: received response to GET https://nodejs.org/ - HTTP 307
FETCH 16241: connecting to nodejs.org using https:h1
FETCH 16241: trailers received from GET https://nodejs.org/
FETCH 16241: connected to nodejs.org using https:h1
FETCH 16241: sending request to GET https://nodejs.org/en
FETCH 16241: received response to GET https://nodejs.org/en - HTTP 200
FETCH 16241: trailers received from GET https://nodejs.org/en
```
## `websocket`
This flag enables debug statements for the `Websocket` API.
> **Note**: statements can overlap with `UNDICI` ones if `undici` or `fetch` flag has been enabled as well.
```sh
NODE_DEBUG=websocket node script.js
WEBSOCKET 18309: connecting to echo.websocket.org using https:h1
WEBSOCKET 18309: connected to echo.websocket.org using https:h1
WEBSOCKET 18309: sending request to GET https://echo.websocket.org/
WEBSOCKET 18309: connection opened <ip_address>
```

256
node_modules/undici/docs/docs/api/DiagnosticsChannel.md generated vendored Normal file
View File

@@ -0,0 +1,256 @@
# Diagnostics Channel Support
Stability: Experimental.
Undici supports the [`diagnostics_channel`](https://nodejs.org/api/diagnostics_channel.html) (currently available only on Node.js v16+).
It is the preferred way to instrument Undici and retrieve internal information.
The channels available are the following.
## `undici:request:create`
This message is published when a new outgoing request is created.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => {
console.log('origin', request.origin)
console.log('completed', request.completed)
console.log('method', request.method)
console.log('path', request.path)
console.log('headers', request.headers) // array of strings, e.g: ['foo', 'bar']
request.addHeader('hello', 'world')
console.log('headers', request.headers) // e.g. ['foo', 'bar', 'hello', 'world']
})
```
Note: a request is only loosely completed to a given socket.
## `undici:request:bodyChunkSent`
This message is published when a chunk of the request body is being sent.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:bodyChunkSent').subscribe(({ request, chunk }) => {
// request is the same object undici:request:create
})
```
## `undici:request:bodySent`
This message is published after the request body has been fully sent.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:bodySent').subscribe(({ request }) => {
// request is the same object undici:request:create
})
```
## `undici:request:headers`
This message is published after the response headers have been received.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, response }) => {
// request is the same object undici:request:create
console.log('statusCode', response.statusCode)
console.log(response.statusText)
// response.headers are buffers.
console.log(response.headers.map((x) => x.toString()))
})
```
## `undici:request:bodyChunkReceived`
This message is published after a chunk of the response body has been received.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:bodyChunkReceived').subscribe(({ request, chunk }) => {
// request is the same object undici:request:create
})
```
## `undici:request:trailers`
This message is published after the response body and trailers have been received, i.e. the response has been completed.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:trailers').subscribe(({ request, trailers }) => {
// request is the same object undici:request:create
console.log('completed', request.completed)
// trailers are buffers.
console.log(trailers.map((x) => x.toString()))
})
```
## `undici:request:error`
This message is published if the request is going to error, but it has not errored yet.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:request:error').subscribe(({ request, error }) => {
// request is the same object undici:request:create
})
```
## `undici:client:sendHeaders`
This message is published right before the first byte of the request is written to the socket.
*Note*: It will publish the exact headers that will be sent to the server in raw format.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(({ request, headers, socket }) => {
// request is the same object undici:request:create
console.log(`Full headers list ${headers.split('\r\n')}`);
})
```
## `undici:client:beforeConnect`
This message is published before creating a new connection for **any** request.
You can not assume that this event is related to any specific request.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => {
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
})
```
## `undici:client:connected`
This message is published after a connection is established.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:connected').subscribe(({ socket, connectParams, connector }) => {
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
})
```
## `undici:client:connectError`
This message is published if it did not succeed to create new connection
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:client:connectError').subscribe(({ error, socket, connectParams, connector }) => {
// const { host, hostname, protocol, port, servername, version } = connectParams
// connector is a function that creates the socket
console.log(`Connect failed with ${error.message}`)
})
```
## `undici:websocket:open`
This message is published after the client has successfully connected to a server.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:open').subscribe(({
address, // { address: string, family: string, port: number }
protocol, // string - negotiated subprotocol
extensions, // string - negotiated extensions
websocket, // WebSocket - the WebSocket instance
handshakeResponse // object - HTTP response that upgraded the connection
}) => {
console.log(address) // address, family, and port
console.log(protocol) // negotiated subprotocols
console.log(extensions) // negotiated extensions
console.log(websocket) // the WebSocket instance
// Handshake response details
console.log(handshakeResponse.status) // 101 for successful WebSocket upgrade
console.log(handshakeResponse.statusText) // 'Switching Protocols'
console.log(handshakeResponse.headers) // Object containing response headers
})
```
### Handshake Response Object
The `handshakeResponse` object contains the HTTP response that upgraded the connection to WebSocket:
- `status` (number): The HTTP status code (101 for successful WebSocket upgrade)
- `statusText` (string): The HTTP status message ('Switching Protocols' for successful upgrade)
- `headers` (object): The HTTP response headers from the server, including:
- `upgrade: 'websocket'`
- `connection: 'upgrade'`
- `sec-websocket-accept` and other WebSocket-related headers
This information is particularly useful for debugging and monitoring WebSocket connections, as it provides access to the initial HTTP handshake response that established the WebSocket connection.
## `undici:websocket:close`
This message is published after the connection has closed.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:close').subscribe(({ websocket, code, reason }) => {
console.log(websocket) // the WebSocket instance
console.log(code) // the closing status code
console.log(reason) // the closing reason
})
```
## `undici:websocket:socket_error`
This message is published if the socket experiences an error.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:socket_error').subscribe((error) => {
console.log(error)
})
```
## `undici:websocket:ping`
This message is published after the client receives a ping frame, if the connection is not closing.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:ping').subscribe(({ payload, websocket }) => {
// a Buffer or undefined, containing the optional application data of the frame
console.log(payload)
console.log(websocket) // the WebSocket instance
})
```
## `undici:websocket:pong`
This message is published after the client receives a pong frame.
```js
import diagnosticsChannel from 'diagnostics_channel'
diagnosticsChannel.channel('undici:websocket:pong').subscribe(({ payload, websocket }) => {
// a Buffer or undefined, containing the optional application data of the frame
console.log(payload)
console.log(websocket) // the WebSocket instance
})
```

1278
node_modules/undici/docs/docs/api/Dispatcher.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

159
node_modules/undici/docs/docs/api/EnvHttpProxyAgent.md generated vendored Normal file
View File

@@ -0,0 +1,159 @@
# Class: EnvHttpProxyAgent
Extends: `undici.Dispatcher`
EnvHttpProxyAgent automatically reads the proxy configuration from the environment variables `http_proxy`, `https_proxy`, and `no_proxy` and sets up the proxy agents accordingly. When `http_proxy` and `https_proxy` are set, `http_proxy` is used for HTTP requests and `https_proxy` is used for HTTPS requests. If only `http_proxy` is set, `http_proxy` is used for both HTTP and HTTPS requests. If only `https_proxy` is set, it is only used for HTTPS requests.
`no_proxy` is a comma or space-separated list of hostnames that should not be proxied. The list may contain leading wildcard characters (`*`). If `no_proxy` is set, the EnvHttpProxyAgent will bypass the proxy for requests to hosts that match the list. If `no_proxy` is set to `"*"`, the EnvHttpProxyAgent will bypass the proxy for all requests.
Uppercase environment variables are also supported: `HTTP_PROXY`, `HTTPS_PROXY`, and `NO_PROXY`. However, if both the lowercase and uppercase environment variables are set, the uppercase environment variables will be ignored.
## `new EnvHttpProxyAgent([options])`
Arguments:
* **options** `EnvHttpProxyAgentOptions` (optional) - extends the `Agent` options.
Returns: `EnvHttpProxyAgent`
### Parameter: `EnvHttpProxyAgentOptions`
Extends: [`AgentOptions`](/docs/docs/api/Agent.md#parameter-agentoptions)
* **httpProxy** `string` (optional) - When set, it will override the `HTTP_PROXY` environment variable.
* **httpsProxy** `string` (optional) - When set, it will override the `HTTPS_PROXY` environment variable.
* **noProxy** `string` (optional) - When set, it will override the `NO_PROXY` environment variable.
Examples:
```js
import { EnvHttpProxyAgent } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
// or
const envHttpProxyAgent = new EnvHttpProxyAgent({ httpProxy: 'my.proxy.server:8080', httpsProxy: 'my.proxy.server:8443', noProxy: 'localhost' })
```
#### Example - EnvHttpProxyAgent instantiation
This will instantiate the EnvHttpProxyAgent. It will not do anything until registered as the agent to use with requests.
```js
import { EnvHttpProxyAgent } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
```
#### Example - Basic Proxy Fetch with global agent dispatcher
```js
import { setGlobalDispatcher, fetch, EnvHttpProxyAgent } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
setGlobalDispatcher(envHttpProxyAgent)
const { status, json } = await fetch('http://localhost:3000/foo')
console.log('response received', status) // response received 200
const data = await json() // data { foo: "bar" }
```
#### Example - Basic Proxy Request with global agent dispatcher
```js
import { setGlobalDispatcher, request, EnvHttpProxyAgent } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
setGlobalDispatcher(envHttpProxyAgent)
const { statusCode, body } = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Proxy Request with local agent dispatcher
```js
import { EnvHttpProxyAgent, request } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: envHttpProxyAgent })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Proxy Fetch with local agent dispatcher
```js
import { EnvHttpProxyAgent, fetch } from 'undici'
const envHttpProxyAgent = new EnvHttpProxyAgent()
const {
status,
json
} = await fetch('http://localhost:3000/foo', { dispatcher: envHttpProxyAgent })
console.log('response received', status) // response received 200
const data = await json() // data { foo: "bar" }
```
## Instance Methods
### `EnvHttpProxyAgent.close([callback])`
Implements [`Dispatcher.close([callback])`](/docs/docs/api/Dispatcher.md#dispatcherclosecallback-promise).
### `EnvHttpProxyAgent.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](/docs/docs/api/Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `EnvHttpProxyAgent.dispatch(options, handler: AgentDispatchOptions)`
Implements [`Dispatcher.dispatch(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
#### Parameter: `AgentDispatchOptions`
Extends: [`DispatchOptions`](/docs/docs/api/Dispatcher.md#parameter-dispatchoptions)
* **origin** `string | URL`
Implements [`Dispatcher.destroy([error, callback])`](/docs/docs/api/Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `EnvHttpProxyAgent.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherconnectoptions-callback).
### `EnvHttpProxyAgent.dispatch(options, handler)`
Implements [`Dispatcher.dispatch(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
### `EnvHttpProxyAgent.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherpipelineoptions-handler).
### `EnvHttpProxyAgent.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
### `EnvHttpProxyAgent.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `EnvHttpProxyAgent.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherupgradeoptions-callback).

48
node_modules/undici/docs/docs/api/Errors.md generated vendored Normal file
View File

@@ -0,0 +1,48 @@
# Errors
Undici exposes a variety of error objects that you can use to enhance your error handling.
You can find all the error objects inside the `errors` key.
```js
import { errors } from 'undici'
```
| Error | Error Codes | Description |
| ------------------------------------ | ------------------------------------- | ------------------------------------------------------------------------- |
| `UndiciError` | `UND_ERR` | all errors below are extended from `UndiciError`. |
| `ConnectTimeoutError` | `UND_ERR_CONNECT_TIMEOUT` | socket is destroyed due to connect timeout. |
| `HeadersTimeoutError` | `UND_ERR_HEADERS_TIMEOUT` | socket is destroyed due to headers timeout. |
| `HeadersOverflowError` | `UND_ERR_HEADERS_OVERFLOW` | socket is destroyed due to headers' max size being exceeded. |
| `BodyTimeoutError` | `UND_ERR_BODY_TIMEOUT` | socket is destroyed due to body timeout. |
| `InvalidArgumentError` | `UND_ERR_INVALID_ARG` | passed an invalid argument. |
| `InvalidReturnValueError` | `UND_ERR_INVALID_RETURN_VALUE` | returned an invalid value. |
| `RequestAbortedError` | `UND_ERR_ABORTED` | the request has been aborted by the user |
| `ClientDestroyedError` | `UND_ERR_DESTROYED` | trying to use a destroyed client. |
| `ClientClosedError` | `UND_ERR_CLOSED` | trying to use a closed client. |
| `SocketError` | `UND_ERR_SOCKET` | there is an error with the socket. |
| `NotSupportedError` | `UND_ERR_NOT_SUPPORTED` | encountered unsupported functionality. |
| `RequestContentLengthMismatchError` | `UND_ERR_REQ_CONTENT_LENGTH_MISMATCH` | request body does not match content-length header |
| `ResponseContentLengthMismatchError` | `UND_ERR_RES_CONTENT_LENGTH_MISMATCH` | response body does not match content-length header |
| `InformationalError` | `UND_ERR_INFO` | expected error with reason |
| `ResponseExceededMaxSizeError` | `UND_ERR_RES_EXCEEDED_MAX_SIZE` | response body exceed the max size allowed |
| `SecureProxyConnectionError` | `UND_ERR_PRX_TLS` | tls connection to a proxy failed |
Be aware of the possible difference between the global dispatcher version and the actual undici version you might be using. We recommend to avoid the check `instanceof errors.UndiciError` and seek for the `error.code === '<error_code>'` instead to avoid inconsistencies.
### `SocketError`
The `SocketError` has a `.socket` property which holds socket metadata:
```ts
interface SocketInfo {
localAddress?: string
localPort?: number
remoteAddress?: string
remotePort?: number
remoteFamily?: string
timeout?: number
bytesWritten?: number
bytesRead?: number
}
```
Be aware that in some cases the `.socket` property can be `null`.

45
node_modules/undici/docs/docs/api/EventSource.md generated vendored Normal file
View File

@@ -0,0 +1,45 @@
# EventSource
> ⚠️ Warning: the EventSource API is experimental.
Undici exposes a WHATWG spec-compliant implementation of [EventSource](https://developer.mozilla.org/en-US/docs/Web/API/EventSource)
for [Server-Sent Events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events).
## Instantiating EventSource
Undici exports a EventSource class. You can instantiate the EventSource as
follows:
```mjs
import { EventSource } from 'undici'
const eventSource = new EventSource('http://localhost:3000')
eventSource.onmessage = (event) => {
console.log(event.data)
}
```
## Using a custom Dispatcher
undici allows you to set your own Dispatcher in the EventSource constructor.
An example which allows you to modify the request headers is:
```mjs
import { EventSource, Agent } from 'undici'
class CustomHeaderAgent extends Agent {
dispatch (opts) {
opts.headers['x-custom-header'] = 'hello world'
return super.dispatch(...arguments)
}
}
const eventSource = new EventSource('http://localhost:3000', {
dispatcher: new CustomHeaderAgent()
})
```
More information about the EventSource API can be found on
[MDN](https://developer.mozilla.org/en-US/docs/Web/API/EventSource).

52
node_modules/undici/docs/docs/api/Fetch.md generated vendored Normal file
View File

@@ -0,0 +1,52 @@
# Fetch
Undici exposes a fetch() method starts the process of fetching a resource from the network.
Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch).
## FormData
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData).
If any parameters are passed to the FormData constructor other than `undefined`, an error will be thrown. Other parameters are ignored.
## Response
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response)
## Request
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request)
## Header
This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
# Body Mixins
`Response` and `Request` body inherit body mixin methods. These methods include:
- [`.arrayBuffer()`](https://fetch.spec.whatwg.org/#dom-body-arraybuffer)
- [`.blob()`](https://fetch.spec.whatwg.org/#dom-body-blob)
- [`.bytes()`](https://fetch.spec.whatwg.org/#dom-body-bytes)
- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata)
- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json)
- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text)
There is an ongoing discussion regarding `.formData()` and its usefulness and performance in server environments. It is recommended to use a dedicated library for parsing `multipart/form-data` bodies, such as [Busboy](https://www.npmjs.com/package/busboy) or [@fastify/busboy](https://www.npmjs.com/package/@fastify/busboy).
These libraries can be interfaced with fetch with the following example code:
```mjs
import { Busboy } from '@fastify/busboy'
import { Readable } from 'node:stream'
const response = await fetch('...')
const busboy = new Busboy({
headers: {
'content-type': response.headers.get('content-type')
}
})
Readable.fromWeb(response.body).pipe(busboy)
```

View File

@@ -0,0 +1,91 @@
# Global Installation
Undici provides an `install()` function to add all WHATWG fetch classes to `globalThis`, making them available globally without requiring imports.
## `install()`
Install all WHATWG fetch classes globally on `globalThis`.
**Example:**
```js
import { install } from 'undici'
// Install all WHATWG fetch classes globally
install()
// Now you can use fetch classes globally without importing
const response = await fetch('https://api.example.com/data')
const data = await response.json()
// All classes are available globally:
const headers = new Headers([['content-type', 'application/json']])
const request = new Request('https://example.com')
const formData = new FormData()
const ws = new WebSocket('wss://example.com')
const eventSource = new EventSource('https://example.com/events')
```
## Installed Classes
The `install()` function adds the following classes to `globalThis`:
| Class | Description |
|-------|-------------|
| `fetch` | The fetch function for making HTTP requests |
| `Headers` | HTTP headers management |
| `Response` | HTTP response representation |
| `Request` | HTTP request representation |
| `FormData` | Form data handling |
| `WebSocket` | WebSocket client |
| `CloseEvent` | WebSocket close event |
| `ErrorEvent` | WebSocket error event |
| `MessageEvent` | WebSocket message event |
| `EventSource` | Server-sent events client |
## Use Cases
Global installation is useful for:
- **Polyfilling environments** that don't have native fetch support
- **Ensuring consistent behavior** across different Node.js versions
- **Library compatibility** when third-party libraries expect global fetch
- **Migration scenarios** where you want to replace built-in implementations
- **Testing environments** where you need predictable fetch behavior
## Example: Polyfilling an Environment
```js
import { install } from 'undici'
// Check if fetch is available and install if needed
if (typeof globalThis.fetch === 'undefined') {
install()
console.log('Undici fetch installed globally')
}
// Now fetch is guaranteed to be available
const response = await fetch('https://api.example.com')
```
## Example: Testing Environment
```js
import { install } from 'undici'
// In test setup, ensure consistent fetch behavior
install()
// Now all tests use undici's implementations
test('fetch API test', async () => {
const response = await fetch('https://example.com')
expect(response).toBeInstanceOf(Response)
})
```
## Notes
- The `install()` function overwrites any existing global implementations
- Classes installed are undici's implementations, not Node.js built-ins
- This provides access to undici's latest features and performance improvements
- The global installation persists for the lifetime of the process

262
node_modules/undici/docs/docs/api/H2CClient.md generated vendored Normal file
View File

@@ -0,0 +1,262 @@
# Class: H2CClient
Extends: `undici.Dispatcher`
A basic H2C client.
**Example**
```js
const { createServer } = require('node:http2')
const { once } = require('node:events')
const { H2CClient } = require('undici')
const server = createServer((req, res) => {
res.writeHead(200)
res.end('Hello, world!')
})
server.listen()
once(server, 'listening').then(() => {
const client = new H2CClient(`http://localhost:${server.address().port}/`)
const response = await client.request({ path: '/', method: 'GET' })
console.log(response.statusCode) // 200
response.body.text.then((text) => {
console.log(text) // Hello, world!
})
})
```
## `new H2CClient(url[, options])`
Arguments:
- **url** `URL | string` - Should only include the **protocol, hostname, and port**. It only supports `http` protocol.
- **options** `H2CClientOptions` (optional)
Returns: `H2CClient`
### Parameter: `H2CClientOptions`
- **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds. Please note the `timeout` will be reset if you keep writing data to the socket everytime.
- **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
- **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout`, in milliseconds, when overridden by _keep-alive_ hints from the server. Defaults to 10 minutes.
- **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by _keep-alive_ hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds.
- **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `2e3` - A number of milliseconds subtracted from server _keep-alive_ hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 2 seconds.
- **maxHeaderSize** `number | null` (optional) - Default: `--max-http-header-size` or `16384` - The maximum length of request headers in bytes. Defaults to Node.js' --max-http-header-size or 16KiB.
- **maxResponseSize** `number | null` (optional) - Default: `-1` - The maximum length of response body in bytes. Set to `-1` to disable.
- **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame.
- **pipelining** `number | null` (optional) - Default to `maxConcurrentStreams` - The amount of concurrent requests sent over a single HTTP/2 session in accordance with [RFC-7540](https://httpwg.org/specs/rfc7540.html#StreamsLayer) Stream specification. Streams can be closed up by remote server at any time.
- **connect** `ConnectOptions | null` (optional) - Default: `null`.
- **strictContentLength** `Boolean` (optional) - Default: `true` - Whether to treat request content length mismatches as errors. If true, an error is thrown when the request content-length header doesn't match the length of the request body.
- **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version.
- **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details.
#### Parameter: `H2CConnectOptions`
- **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
- **timeout** `number | null` (optional) - In milliseconds, Default `10e3`.
- **servername** `string | null` (optional)
- **keepAlive** `boolean | null` (optional) - Default: `true` - TCP keep-alive enabled
- **keepAliveInitialDelay** `number | null` (optional) - Default: `60000` - TCP keep-alive interval for the socket in milliseconds
### Example - Basic Client instantiation
This will instantiate the undici H2CClient, but it will not connect to the origin until something is queued. Consider using `client.connect` to prematurely connect to the origin, or just call `client.request`.
```js
"use strict";
import { H2CClient } from "undici";
const client = new H2CClient("http://localhost:3000");
```
## Instance Methods
### `H2CClient.close([callback])`
Implements [`Dispatcher.close([callback])`](/docs/docs/api/Dispatcher.md#dispatcherclosecallback-promise).
### `H2CClient.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](/docs/docs/api/Dispatcher.md#dispatcherdestroyerror-callback-promise).
Waits until socket is closed before invoking the callback (or returning a promise if no callback is provided).
### `H2CClient.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherconnectoptions-callback).
### `H2CClient.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
### `H2CClient.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherpipelineoptions-handler).
### `H2CClient.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
### `H2CClient.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `H2CClient.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherupgradeoptions-callback).
## Instance Properties
### `H2CClient.closed`
- `boolean`
`true` after `H2CClient.close()` has been called.
### `H2CClient.destroyed`
- `boolean`
`true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed.
### `H2CClient.pipelining`
- `number`
Property to get and set the pipelining factor.
## Instance Events
### Event: `'connect'`
See [Dispatcher Event: `'connect'`](/docs/docs/api/Dispatcher.md#event-connect).
Parameters:
- **origin** `URL`
- **targets** `Array<Dispatcher>`
Emitted when a socket has been created and connected. The client will connect once `client.size > 0`.
#### Example - Client connect event
```js
import { createServer } from "node:http2";
import { H2CClient } from "undici";
import { once } from "events";
const server = createServer((request, response) => {
response.end("Hello, World!");
}).listen();
await once(server, "listening");
const client = new H2CClient(`http://localhost:${server.address().port}`);
client.on("connect", (origin) => {
console.log(`Connected to ${origin}`); // should print before the request body statement
});
try {
const { body } = await client.request({
path: "/",
method: "GET",
});
body.setEncoding("utf-8");
body.on("data", console.log);
client.close();
server.close();
} catch (error) {
console.error(error);
client.close();
server.close();
}
```
### Event: `'disconnect'`
See [Dispatcher Event: `'disconnect'`](/docs/docs/api/Dispatcher.md#event-disconnect).
Parameters:
- **origin** `URL`
- **targets** `Array<Dispatcher>`
- **error** `Error`
Emitted when socket has disconnected. The error argument of the event is the error which caused the socket to disconnect. The client will reconnect if or once `client.size > 0`.
#### Example - Client disconnect event
```js
import { createServer } from "node:http2";
import { H2CClient } from "undici";
import { once } from "events";
const server = createServer((request, response) => {
response.destroy();
}).listen();
await once(server, "listening");
const client = new H2CClient(`http://localhost:${server.address().port}`);
client.on("disconnect", (origin) => {
console.log(`Disconnected from ${origin}`);
});
try {
await client.request({
path: "/",
method: "GET",
});
} catch (error) {
console.error(error.message);
client.close();
server.close();
}
```
### Event: `'drain'`
Emitted when pipeline is no longer busy.
See [Dispatcher Event: `'drain'`](/docs/docs/api/Dispatcher.md#event-drain).
#### Example - Client drain event
```js
import { createServer } from "node:http2";
import { H2CClient } from "undici";
import { once } from "events";
const server = createServer((request, response) => {
response.end("Hello, World!");
}).listen();
await once(server, "listening");
const client = new H2CClient(`http://localhost:${server.address().port}`);
client.on("drain", () => {
console.log("drain event");
client.close();
server.close();
});
const requests = [
client.request({ path: "/", method: "GET" }),
client.request({ path: "/", method: "GET" }),
client.request({ path: "/", method: "GET" }),
];
await Promise.all(requests);
console.log("requests completed");
```
### Event: `'error'`
Invoked for users errors such as throwing in the `onError` handler.

603
node_modules/undici/docs/docs/api/MockAgent.md generated vendored Normal file
View File

@@ -0,0 +1,603 @@
# Class: MockAgent
Extends: `undici.Dispatcher`
A mocked Agent class that implements the Agent API. It allows one to intercept HTTP requests made through undici and return mocked responses instead.
## `new MockAgent([options])`
Arguments:
* **options** `MockAgentOptions` (optional) - It extends the `Agent` options.
Returns: `MockAgent`
### Parameter: `MockAgentOptions`
Extends: [`AgentOptions`](/docs/docs/api/Agent.md#parameter-agentoptions)
* **agent** `Agent` (optional) - Default: `new Agent([options])` - a custom agent encapsulated by the MockAgent.
* **ignoreTrailingSlash** `boolean` (optional) - Default: `false` - set the default value for `ignoreTrailingSlash` for interceptors.
* **acceptNonStandardSearchParameters** `boolean` (optional) - Default: `false` - set to `true` if the matcher should also accept non standard search parameters such as multi-value items specified with `[]` (e.g. `param[]=1&param[]=2&param[]=3`) and multi-value items which values are comma separated (e.g. `param=1,2,3`).
### Example - Basic MockAgent instantiation
This will instantiate the MockAgent. It will not do anything until registered as the agent to use with requests and mock interceptions are added.
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
```
### Example - Basic MockAgent instantiation with custom agent
```js
import { Agent, MockAgent } from 'undici'
const agent = new Agent()
const mockAgent = new MockAgent({ agent })
```
## Instance Methods
### `MockAgent.get(origin)`
This method creates and retrieves MockPool or MockClient instances which can then be used to intercept HTTP requests. If the number of connections on the mock agent is set to 1, a MockClient instance is returned. Otherwise a MockPool instance is returned.
For subsequent `MockAgent.get` calls on the same origin, the same mock instance will be returned.
Arguments:
* **origin** `string | RegExp | (value) => boolean` - a matcher for the pool origin to be retrieved from the MockAgent.
| Matcher type | Condition to pass |
|:------------:| -------------------------- |
| `string` | Exact match against string |
| `RegExp` | Regex must pass |
| `Function` | Function must return true |
Returns: `MockClient | MockPool`.
| `MockAgentOptions` | Mock instance returned |
| -------------------- | ---------------------- |
| `connections === 1` | `MockClient` |
| `connections` > `1` | `MockPool` |
#### Example - Basic Mocked Request
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const { statusCode, body } = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Mocked Request with local mock agent dispatcher
```js
import { MockAgent, request } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: mockAgent })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Mocked Request with local mock pool dispatcher
```js
import { MockAgent, request } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: mockPool })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Mocked Request with local mock client dispatcher
```js
import { MockAgent, request } from 'undici'
const mockAgent = new MockAgent({ connections: 1 })
const mockClient = mockAgent.get('http://localhost:3000')
mockClient.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: mockClient })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Mocked requests with multiple intercepts
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
mockPool.intercept({ path: '/hello'}).reply(200, 'hello')
const result1 = await request('http://localhost:3000/foo')
console.log('response received', result1.statusCode) // response received 200
for await (const data of result1.body) {
console.log('data', data.toString('utf8')) // data foo
}
const result2 = await request('http://localhost:3000/hello')
console.log('response received', result2.statusCode) // response received 200
for await (const data of result2.body) {
console.log('data', data.toString('utf8')) // data hello
}
```
#### Example - Mock different requests within the same file
```js
const { MockAgent, setGlobalDispatcher } = require('undici');
const agent = new MockAgent();
agent.disableNetConnect();
setGlobalDispatcher(agent);
describe('Test', () => {
it('200', async () => {
const mockAgent = agent.get('http://test.com');
// your test
});
it('200', async () => {
const mockAgent = agent.get('http://testing.com');
// your test
});
});
```
#### Example - Mocked request with query body, headers and trailers
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo?hello=there&see=ya',
method: 'POST',
body: 'form1=data1&form2=data2'
}).reply(200, { foo: 'bar' }, {
headers: { 'content-type': 'application/json' },
trailers: { 'Content-MD5': 'test' }
})
const {
statusCode,
headers,
trailers,
body
} = await request('http://localhost:3000/foo?hello=there&see=ya', {
method: 'POST',
body: 'form1=data1&form2=data2'
})
console.log('response received', statusCode) // response received 200
console.log('headers', headers) // { 'content-type': 'application/json' }
for await (const data of body) {
console.log('data', data.toString('utf8')) // '{"foo":"bar"}'
}
console.log('trailers', trailers) // { 'content-md5': 'test' }
```
#### Example - Mocked request with origin regex
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get(new RegExp('http://localhost:3000'))
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Mocked request with origin function
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get((origin) => origin === 'http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
### `MockAgent.close()`
Closes the mock agent and waits for registered mock pools and clients to also close before resolving.
Returns: `Promise<void>`
#### Example - clean up after tests are complete
```js
import { MockAgent, setGlobalDispatcher } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
await mockAgent.close()
```
### `MockAgent.dispatch(options, handlers)`
Implements [`Agent.dispatch(options, handlers)`](/docs/docs/api/Agent.md#parameter-agentdispatchoptions).
### `MockAgent.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
#### Example - MockAgent request
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await mockAgent.request({
origin: 'http://localhost:3000',
path: '/foo',
method: 'GET'
})
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
### `MockAgent.deactivate()`
This method disables mocking in MockAgent.
Returns: `void`
#### Example - Deactivate Mocking
```js
import { MockAgent, setGlobalDispatcher } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.deactivate()
```
### `MockAgent.activate()`
This method enables mocking in a MockAgent instance. When instantiated, a MockAgent is automatically activated. Therefore, this method is only effective after `MockAgent.deactivate` has been called.
Returns: `void`
#### Example - Activate Mocking
```js
import { MockAgent, setGlobalDispatcher } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.deactivate()
// No mocking will occur
// Later
mockAgent.activate()
```
### `MockAgent.enableNetConnect([host])`
When requests are not matched in a MockAgent intercept, a real HTTP request is attempted. We can control this further through the use of `enableNetConnect`. This is achieved by defining host matchers so only matching requests will be attempted.
When using a string, it should only include the **hostname and optionally, the port**. In addition, calling this method multiple times with a string will allow all HTTP requests that match these values.
Arguments:
* **host** `string | RegExp | (value) => boolean` - (optional)
Returns: `void`
#### Example - Allow all non-matching urls to be dispatched in a real HTTP request
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.enableNetConnect()
await request('http://example.com')
// A real request is made
```
#### Example - Allow requests matching a host string to make real requests
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.enableNetConnect('example-1.com')
mockAgent.enableNetConnect('example-2.com:8080')
await request('http://example-1.com')
// A real request is made
await request('http://example-2.com:8080')
// A real request is made
await request('http://example-3.com')
// Will throw
```
#### Example - Allow requests matching a host regex to make real requests
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.enableNetConnect(new RegExp('example.com'))
await request('http://example.com')
// A real request is made
```
#### Example - Allow requests matching a host function to make real requests
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
mockAgent.enableNetConnect((value) => value === 'example.com')
await request('http://example.com')
// A real request is made
```
### `MockAgent.disableNetConnect()`
This method causes all requests to throw when requests are not matched in a MockAgent intercept.
Returns: `void`
#### Example - Disable all non-matching requests by throwing an error for each
```js
import { MockAgent, request } from 'undici'
const mockAgent = new MockAgent()
mockAgent.disableNetConnect()
await request('http://example.com')
// Will throw
```
### `MockAgent.pendingInterceptors()`
This method returns any pending interceptors registered on a mock agent. A pending interceptor meets one of the following criteria:
- Is registered with neither `.times(<number>)` nor `.persist()`, and has not been invoked;
- Is persistent (i.e., registered with `.persist()`) and has not been invoked;
- Is registered with `.times(<number>)` and has not been invoked `<number>` of times.
Returns: `PendingInterceptor[]` (where `PendingInterceptor` is a `MockDispatch` with an additional `origin: string`)
#### Example - List all pending interceptors
```js
const agent = new MockAgent()
agent.disableNetConnect()
agent
.get('https://example.com')
.intercept({ method: 'GET', path: '/' })
.reply(200)
const pendingInterceptors = agent.pendingInterceptors()
// Returns [
// {
// timesInvoked: 0,
// times: 1,
// persist: false,
// consumed: false,
// pending: true,
// path: '/',
// method: 'GET',
// body: undefined,
// headers: undefined,
// data: {
// error: null,
// statusCode: 200,
// data: '',
// headers: {},
// trailers: {}
// },
// origin: 'https://example.com'
// }
// ]
```
### `MockAgent.assertNoPendingInterceptors([options])`
This method throws if the mock agent has any pending interceptors. A pending interceptor meets one of the following criteria:
- Is registered with neither `.times(<number>)` nor `.persist()`, and has not been invoked;
- Is persistent (i.e., registered with `.persist()`) and has not been invoked;
- Is registered with `.times(<number>)` and has not been invoked `<number>` of times.
#### Example - Check that there are no pending interceptors
```js
const agent = new MockAgent()
agent.disableNetConnect()
agent
.get('https://example.com')
.intercept({ method: 'GET', path: '/' })
.reply(200)
agent.assertNoPendingInterceptors()
// Throws an UndiciError with the following message:
//
// 1 interceptor is pending:
//
// ┌─────────┬────────┬───────────────────────┬──────┬─────────────┬────────────┬─────────────┬───────────┐
// │ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │
// ├─────────┼────────┼───────────────────────┼──────┼─────────────┼────────────┼─────────────┼───────────┤
// │ 0 │ 'GET' │ 'https://example.com' │ '/' │ 200 │ '❌' │ 0 │ 1 │
// └─────────┴────────┴───────────────────────┴──────┴─────────────┴────────────┴─────────────┴───────────┘
```
#### Example - access call history on MockAgent
You can register every call made within a MockAgent to be able to retrieve the body, headers and so on.
This is not enabled by default.
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent({ enableCallHistory: true })
setGlobalDispatcher(mockAgent)
await request('http://example.com', { query: { item: 1 }})
mockAgent.getCallHistory()?.firstCall()
// Returns
// MockCallHistoryLog {
// body: undefined,
// headers: undefined,
// method: 'GET',
// origin: 'http://example.com',
// fullUrl: 'http://example.com/?item=1',
// path: '/',
// searchParams: { item: '1' },
// protocol: 'http:',
// host: 'example.com',
// port: ''
// }
```
#### Example - clear call history
```js
const mockAgent = new MockAgent()
mockAgent.clearAllCallHistory()
```
#### Example - call history instance class method
```js
const mockAgent = new MockAgent()
const mockAgentHistory = mockAgent.getCallHistory()
mockAgentHistory?.calls() // returns an array of MockCallHistoryLogs
mockAgentHistory?.firstCall() // returns the first MockCallHistoryLogs or undefined
mockAgentHistory?.lastCall() // returns the last MockCallHistoryLogs or undefined
mockAgentHistory?.nthCall(3) // returns the third MockCallHistoryLogs or undefined
mockAgentHistory?.filterCalls({ path: '/endpoint', hash: '#hash-value' }) // returns an Array of MockCallHistoryLogs WHERE path === /endpoint OR hash === #hash-value
mockAgentHistory?.filterCalls({ path: '/endpoint', hash: '#hash-value' }, { operator: 'AND' }) // returns an Array of MockCallHistoryLogs WHERE path === /endpoint AND hash === #hash-value
mockAgentHistory?.filterCalls(/"data": "{}"/) // returns an Array of MockCallHistoryLogs where any value match regexp
mockAgentHistory?.filterCalls('application/json') // returns an Array of MockCallHistoryLogs where any value === 'application/json'
mockAgentHistory?.filterCalls((log) => log.path === '/endpoint') // returns an Array of MockCallHistoryLogs when given function returns true
mockAgentHistory?.clear() // clear the history
```

197
node_modules/undici/docs/docs/api/MockCallHistory.md generated vendored Normal file
View File

@@ -0,0 +1,197 @@
# Class: MockCallHistory
Access to an instance with :
```js
const mockAgent = new MockAgent({ enableCallHistory: true })
mockAgent.getCallHistory()
// or
const mockAgent = new MockAgent()
mockAgent.enableMockHistory()
mockAgent.getCallHistory()
```
a MockCallHistory instance implements a **Symbol.iterator** letting you iterate on registered logs :
```ts
for (const log of mockAgent.getCallHistory()) {
//...
}
const array: Array<MockCallHistoryLog> = [...mockAgent.getCallHistory()]
const set: Set<MockCallHistoryLog> = new Set(mockAgent.getCallHistory())
```
## class methods
### clear
Clear all MockCallHistoryLog registered. This is automatically done when calling `mockAgent.close()`
```js
mockAgent.clearCallHistory()
// same as
mockAgent.getCallHistory()?.clear()
```
### calls
Get all MockCallHistoryLog registered as an array
```js
mockAgent.getCallHistory()?.calls()
```
### firstCall
Get the first MockCallHistoryLog registered or undefined
```js
mockAgent.getCallHistory()?.firstCall()
```
### lastCall
Get the last MockCallHistoryLog registered or undefined
```js
mockAgent.getCallHistory()?.lastCall()
```
### nthCall
Get the nth MockCallHistoryLog registered or undefined
```js
mockAgent.getCallHistory()?.nthCall(3) // the third MockCallHistoryLog registered
```
### filterCallsByProtocol
Filter MockCallHistoryLog by protocol.
> more details for the first parameter can be found [here](/docs/docs/api/MockCallHistory.md#filter-parameter)
```js
mockAgent.getCallHistory()?.filterCallsByProtocol(/https/)
mockAgent.getCallHistory()?.filterCallsByProtocol('https:')
```
### filterCallsByHost
Filter MockCallHistoryLog by host.
> more details for the first parameter can be found [here](/docs/docs/api/MockCallHistory.md#filter-parameter)
```js
mockAgent.getCallHistory()?.filterCallsByHost(/localhost/)
mockAgent.getCallHistory()?.filterCallsByHost('localhost:3000')
```
### filterCallsByPort
Filter MockCallHistoryLog by port.
> more details for the first parameter can be found [here](/docs/docs/api/MockCallHistory.md#filter-parameter)
```js
mockAgent.getCallHistory()?.filterCallsByPort(/3000/)
mockAgent.getCallHistory()?.filterCallsByPort('3000')
mockAgent.getCallHistory()?.filterCallsByPort('')
```
### filterCallsByOrigin
Filter MockCallHistoryLog by origin.
> more details for the first parameter can be found [here](/docs/docs/api/MockCallHistory.md#filter-parameter)
```js
mockAgent.getCallHistory()?.filterCallsByOrigin(/http:\/\/localhost:3000/)
mockAgent.getCallHistory()?.filterCallsByOrigin('http://localhost:3000')
```
### filterCallsByPath
Filter MockCallHistoryLog by path.
> more details for the first parameter can be found [here](/docs/docs/api/MockCallHistory.md#filter-parameter)
```js
mockAgent.getCallHistory()?.filterCallsByPath(/api\/v1\/graphql/)
mockAgent.getCallHistory()?.filterCallsByPath('/api/v1/graphql')
```
### filterCallsByHash
Filter MockCallHistoryLog by hash.
> more details for the first parameter can be found [here](/docs/docs/api/MockCallHistory.md#filter-parameter)
```js
mockAgent.getCallHistory()?.filterCallsByPath(/hash/)
mockAgent.getCallHistory()?.filterCallsByPath('#hash')
```
### filterCallsByFullUrl
Filter MockCallHistoryLog by fullUrl. fullUrl contains protocol, host, port, path, hash, and query params
> more details for the first parameter can be found [here](/docs/docs/api/MockCallHistory.md#filter-parameter)
```js
mockAgent.getCallHistory()?.filterCallsByFullUrl(/https:\/\/localhost:3000\/\?query=value#hash/)
mockAgent.getCallHistory()?.filterCallsByFullUrl('https://localhost:3000/?query=value#hash')
```
### filterCallsByMethod
Filter MockCallHistoryLog by method.
> more details for the first parameter can be found [here](/docs/docs/api/MockCallHistory.md#filter-parameter)
```js
mockAgent.getCallHistory()?.filterCallsByMethod(/POST/)
mockAgent.getCallHistory()?.filterCallsByMethod('POST')
```
### filterCalls
This class method is a meta function / alias to apply complex filtering in a single way.
Parameters :
- criteria : the first parameter. a function, regexp or object.
- function : filter MockCallHistoryLog when the function returns false
- regexp : filter MockCallHistoryLog when the regexp does not match on MockCallHistoryLog.toString() ([see](./MockCallHistoryLog.md#to-string))
- object : an object with MockCallHistoryLog properties as keys to apply multiple filters. each values are a [filter parameter](/docs/docs/api/MockCallHistory.md#filter-parameter)
- options : the second parameter. an object.
- options.operator : `'AND'` or `'OR'` (default `'OR'`). Used only if criteria is an object. see below
```js
mockAgent.getCallHistory()?.filterCalls((log) => log.hash === value && log.headers?.['authorization'] !== undefined)
mockAgent.getCallHistory()?.filterCalls(/"data": "{ "errors": "wrong body" }"/)
// returns an Array of MockCallHistoryLog which all have
// - a hash containing my-hash
// - OR
// - a path equal to /endpoint
mockAgent.getCallHistory()?.filterCalls({ hash: /my-hash/, path: '/endpoint' })
// returns an Array of MockCallHistoryLog which all have
// - a hash containing my-hash
// - AND
// - a path equal to /endpoint
mockAgent.getCallHistory()?.filterCalls({ hash: /my-hash/, path: '/endpoint' }, { operator: 'AND' })
```
## filter parameter
Can be :
- string. MockCallHistoryLog filtered if `value !== parameterValue`
- null. MockCallHistoryLog filtered if `value !== parameterValue`
- undefined. MockCallHistoryLog filtered if `value !== parameterValue`
- regexp. MockCallHistoryLog filtered if `!parameterValue.test(value)`

View File

@@ -0,0 +1,43 @@
# Class: MockCallHistoryLog
Access to an instance with :
```js
const mockAgent = new MockAgent({ enableCallHistory: true })
mockAgent.getCallHistory()?.firstCall()
```
## class properties
- body `mockAgent.getCallHistory()?.firstCall()?.body`
- headers `mockAgent.getCallHistory()?.firstCall()?.headers` an object
- method `mockAgent.getCallHistory()?.firstCall()?.method` a string
- fullUrl `mockAgent.getCallHistory()?.firstCall()?.fullUrl` a string containing the protocol, origin, path, query and hash
- origin `mockAgent.getCallHistory()?.firstCall()?.origin` a string containing the protocol and the host
- headers `mockAgent.getCallHistory()?.firstCall()?.headers` an object
- path `mockAgent.getCallHistory()?.firstCall()?.path` a string always starting with `/`
- searchParams `mockAgent.getCallHistory()?.firstCall()?.searchParams` an object
- protocol `mockAgent.getCallHistory()?.firstCall()?.protocol` a string (`https:`)
- host `mockAgent.getCallHistory()?.firstCall()?.host` a string
- port `mockAgent.getCallHistory()?.firstCall()?.port` an empty string or a string containing numbers
- hash `mockAgent.getCallHistory()?.firstCall()?.hash` an empty string or a string starting with `#`
## class methods
### toMap
Returns a Map instance
```js
mockAgent.getCallHistory()?.firstCall()?.toMap()?.get('hash')
// #hash
```
### toString
Returns a string computed with any class property name and value pair
```js
mockAgent.getCallHistory()?.firstCall()?.toString()
// protocol->https:|host->localhost:4000|port->4000|origin->https://localhost:4000|path->/endpoint|hash->#here|searchParams->{"query":"value"}|fullUrl->https://localhost:4000/endpoint?query=value#here|method->PUT|body->"{ "data": "hello" }"|headers->{"content-type":"application/json"}
```

81
node_modules/undici/docs/docs/api/MockClient.md generated vendored Normal file
View File

@@ -0,0 +1,81 @@
# Class: MockClient
Extends: `undici.Client`
A mock client class that implements the same api as [MockPool](/docs/docs/api/MockPool.md).
## `new MockClient(origin, [options])`
Arguments:
* **origin** `string` - It should only include the **protocol, hostname, and port**.
* **options** `MockClientOptions` - It extends the `Client` options.
Returns: `MockClient`
### Parameter: `MockClientOptions`
Extends: `ClientOptions`
* **agent** `Agent` - the agent to associate this MockClient with.
### Example - Basic MockClient instantiation
We can use MockAgent to instantiate a MockClient ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered.
```js
import { MockAgent } from 'undici'
// Connections must be set to 1 to return a MockClient instance
const mockAgent = new MockAgent({ connections: 1 })
const mockClient = mockAgent.get('http://localhost:3000')
```
## Instance Methods
### `MockClient.intercept(options)`
Implements: [`MockPool.intercept(options)`](/docs/docs/api/MockPool.md#mockpoolinterceptoptions)
### `MockClient.cleanMocks()`
Implements: [`MockPool.cleanMocks()`](/docs/docs/api/MockPool.md#mockpoolcleanmocks)
### `MockClient.close()`
Implements: [`MockPool.close()`](/docs/docs/api/MockPool.md#mockpoolclose)
### `MockClient.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
### `MockClient.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
#### Example - MockClient request
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent({ connections: 1 })
const mockClient = mockAgent.get('http://localhost:3000')
mockClient.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await mockClient.request({
origin: 'http://localhost:3000',
path: '/foo',
method: 'GET'
})
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```

12
node_modules/undici/docs/docs/api/MockErrors.md generated vendored Normal file
View File

@@ -0,0 +1,12 @@
# MockErrors
Undici exposes a variety of mock error objects that you can use to enhance your mock error handling.
You can find all the mock error objects inside the `mockErrors` key.
```js
import { mockErrors } from 'undici'
```
| Mock Error | Mock Error Codes | Description |
| --------------------- | ------------------------------- | ---------------------------------------------------------- |
| `MockNotMatchedError` | `UND_MOCK_ERR_MOCK_NOT_MATCHED` | The request does not match any registered mock dispatches. |

554
node_modules/undici/docs/docs/api/MockPool.md generated vendored Normal file
View File

@@ -0,0 +1,554 @@
# Class: MockPool
Extends: `undici.Pool`
A mock Pool class that implements the Pool API and is used by MockAgent to intercept real requests and return mocked responses.
## `new MockPool(origin, [options])`
Arguments:
* **origin** `string` - It should only include the **protocol, hostname, and port**.
* **options** `MockPoolOptions` - It extends the `Pool` options.
Returns: `MockPool`
### Parameter: `MockPoolOptions`
Extends: `PoolOptions`
* **agent** `Agent` - the agent to associate this MockPool with.
### Example - Basic MockPool instantiation
We can use MockAgent to instantiate a MockPool ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered.
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
```
## Instance Methods
### `MockPool.intercept(options)`
This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance, but each intercept is only used once. For example if you expect to make 2 requests inside a test, you need to call `intercept()` twice. Assuming you use `disableNetConnect()` you will get `MockNotMatchedError` on the second request when you only call `intercept()` once.
When defining interception rules, all the rules must pass for a request to be intercepted. If a request is not intercepted, a real request will be attempted.
| Matcher type | Condition to pass |
|:------------:| -------------------------- |
| `string` | Exact match against string |
| `RegExp` | Regex must pass |
| `Function` | Function must return true |
Arguments:
* **options** `MockPoolInterceptOptions` - Interception options.
Returns: `MockInterceptor` corresponding to the input options.
### Parameter: `MockPoolInterceptOptions`
* **path** `string | RegExp | (path: string) => boolean` - a matcher for the HTTP request path. When a `RegExp` or callback is used, it will match against the request path including all query parameters in alphabetical order. When a `string` is provided, the query parameters can be conveniently specified through the `MockPoolInterceptOptions.query` setting.
* **method** `string | RegExp | (method: string) => boolean` - (optional) - a matcher for the HTTP request method. Defaults to `GET`.
* **body** `string | RegExp | (body: string) => boolean` - (optional) - a matcher for the HTTP request body.
* **headers** `Record<string, string | RegExp | (body: string) => boolean`> - (optional) - a matcher for the HTTP request headers. To be intercepted, a request must match all defined headers. Extra headers not defined here may (or may not) be included in the request and do not affect the interception in any way.
* **query** `Record<string, any> | null` - (optional) - a matcher for the HTTP request query string params. Only applies when a `string` was provided for `MockPoolInterceptOptions.path`.
* **ignoreTrailingSlash** `boolean` - (optional) - set to `true` if the matcher should also match by ignoring potential trailing slashes in `MockPoolInterceptOptions.path`.
### Return: `MockInterceptor`
We can define the behaviour of an intercepted request with the following options.
* **reply** `(statusCode: number, replyData: string | Buffer | object | MockInterceptor.MockResponseDataHandler, responseOptions?: MockResponseOptions) => MockScope` - define a reply for a matching request. You can define the replyData as a callback to read incoming request data. Default for `responseOptions` is `{}`.
* **reply** `(callback: MockInterceptor.MockReplyOptionsCallback) => MockScope` - define a reply for a matching request, allowing dynamic mocking of all reply options rather than just the data.
* **replyWithError** `(error: Error) => MockScope` - define an error for a matching request to throw.
* **defaultReplyHeaders** `(headers: Record<string, string>) => MockInterceptor` - define default headers to be included in subsequent replies. These are in addition to headers on a specific reply.
* **defaultReplyTrailers** `(trailers: Record<string, string>) => MockInterceptor` - define default trailers to be included in subsequent replies. These are in addition to trailers on a specific reply.
* **replyContentLength** `() => MockInterceptor` - define automatically calculated `content-length` headers to be included in subsequent replies.
The reply data of an intercepted request may either be a string, buffer, or JavaScript object. Objects are converted to JSON while strings and buffers are sent as-is.
By default, `reply` and `replyWithError` define the behaviour for the first matching request only. Subsequent requests will not be affected (this can be changed using the returned `MockScope`).
### Parameter: `MockResponseOptions`
* **headers** `Record<string, string>` - headers to be included on the mocked reply.
* **trailers** `Record<string, string>` - trailers to be included on the mocked reply.
### Return: `MockScope`
A `MockScope` is associated with a single `MockInterceptor`. With this, we can configure the default behaviour of an intercepted reply.
* **delay** `(waitInMs: number) => MockScope` - delay the associated reply by a set amount in ms.
* **persist** `() => MockScope` - any matching request will always reply with the defined response indefinitely.
* **times** `(repeatTimes: number) => MockScope` - any matching request will reply with the defined response a fixed amount of times. This is overridden by **persist**.
#### Example - Basic Mocked Request
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
// MockPool
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({ path: '/foo' }).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Mocked request using reply data callbacks
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/echo',
method: 'GET',
headers: {
'User-Agent': 'undici',
Host: 'example.com'
}
}).reply(200, ({ headers }) => ({ message: headers.get('message') }))
const { statusCode, body, headers } = await request('http://localhost:3000', {
headers: {
message: 'hello world!'
}
})
console.log('response received', statusCode) // response received 200
console.log('headers', headers) // { 'content-type': 'application/json' }
for await (const data of body) {
console.log('data', data.toString('utf8')) // { "message":"hello world!" }
}
```
#### Example - Mocked request using reply options callback
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/echo',
method: 'GET',
headers: {
'User-Agent': 'undici',
Host: 'example.com'
}
}).reply(({ headers }) => ({ statusCode: 200, data: { message: headers.get('message') }})))
const { statusCode, body, headers } = await request('http://localhost:3000', {
headers: {
message: 'hello world!'
}
})
console.log('response received', statusCode) // response received 200
console.log('headers', headers) // { 'content-type': 'application/json' }
for await (const data of body) {
console.log('data', data.toString('utf8')) // { "message":"hello world!" }
}
```
#### Example - Basic Mocked requests with multiple intercepts
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).reply(200, 'foo')
mockPool.intercept({
path: '/hello',
method: 'GET',
}).reply(200, 'hello')
const result1 = await request('http://localhost:3000/foo')
console.log('response received', result1.statusCode) // response received 200
for await (const data of result1.body) {
console.log('data', data.toString('utf8')) // data foo
}
const result2 = await request('http://localhost:3000/hello')
console.log('response received', result2.statusCode) // response received 200
for await (const data of result2.body) {
console.log('data', data.toString('utf8')) // data hello
}
```
#### Example - Mocked request with query body, request headers and response headers and trailers
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo?hello=there&see=ya',
method: 'POST',
body: 'form1=data1&form2=data2',
headers: {
'User-Agent': 'undici',
Host: 'example.com'
}
}).reply(200, { foo: 'bar' }, {
headers: { 'content-type': 'application/json' },
trailers: { 'Content-MD5': 'test' }
})
const {
statusCode,
headers,
trailers,
body
} = await request('http://localhost:3000/foo?hello=there&see=ya', {
method: 'POST',
body: 'form1=data1&form2=data2',
headers: {
foo: 'bar',
'User-Agent': 'undici',
Host: 'example.com'
}
})
console.log('response received', statusCode) // response received 200
console.log('headers', headers) // { 'content-type': 'application/json' }
for await (const data of body) {
console.log('data', data.toString('utf8')) // '{"foo":"bar"}'
}
console.log('trailers', trailers) // { 'content-md5': 'test' }
```
#### Example - Mocked request using different matchers
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: /^GET$/,
body: (value) => value === 'form=data',
headers: {
'User-Agent': 'undici',
Host: /^example.com$/
}
}).reply(200, 'foo')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', {
method: 'GET',
body: 'form=data',
headers: {
foo: 'bar',
'User-Agent': 'undici',
Host: 'example.com'
}
})
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Mocked request with reply with a defined error
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).replyWithError(new Error('kaboom'))
try {
await request('http://localhost:3000/foo', {
method: 'GET'
})
} catch (error) {
console.error(error) // Error: kaboom
}
```
#### Example - Mocked request with defaultReplyHeaders
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).defaultReplyHeaders({ foo: 'bar' })
.reply(200, 'foo')
const { headers } = await request('http://localhost:3000/foo')
console.log('headers', headers) // headers { foo: 'bar' }
```
#### Example - Mocked request with defaultReplyTrailers
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).defaultReplyTrailers({ foo: 'bar' })
.reply(200, 'foo')
const { trailers } = await request('http://localhost:3000/foo')
console.log('trailers', trailers) // trailers { foo: 'bar' }
```
#### Example - Mocked request with automatic content-length calculation
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).replyContentLength().reply(200, 'foo')
const { headers } = await request('http://localhost:3000/foo')
console.log('headers', headers) // headers { 'content-length': '3' }
```
#### Example - Mocked request with automatic content-length calculation on an object
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).replyContentLength().reply(200, { foo: 'bar' })
const { headers } = await request('http://localhost:3000/foo')
console.log('headers', headers) // headers { 'content-length': '13' }
```
#### Example - Mocked request with persist enabled
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).reply(200, 'foo').persist()
const result1 = await request('http://localhost:3000/foo')
// Will match and return mocked data
const result2 = await request('http://localhost:3000/foo')
// Will match and return mocked data
// Etc
```
#### Example - Mocked request with times enabled
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET'
}).reply(200, 'foo').times(2)
const result1 = await request('http://localhost:3000/foo')
// Will match and return mocked data
const result2 = await request('http://localhost:3000/foo')
// Will match and return mocked data
const result3 = await request('http://localhost:3000/foo')
// Will not match and make attempt a real request
```
#### Example - Mocked request with path callback
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
import querystring from 'querystring'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
const matchPath = requestPath => {
const [pathname, search] = requestPath.split('?')
const requestQuery = querystring.parse(search)
if (!pathname.startsWith('/foo')) {
return false
}
if (!Object.keys(requestQuery).includes('foo') || requestQuery.foo !== 'bar') {
return false
}
return true
}
mockPool.intercept({
path: matchPath,
method: 'GET'
}).reply(200, 'foo')
const result = await request('http://localhost:3000/foo?foo=bar')
// Will match and return mocked data
```
### `MockPool.close()`
Closes the mock pool and de-registers from associated MockAgent.
Returns: `Promise<void>`
#### Example - clean up after tests are complete
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
await mockPool.close()
```
### `MockPool.dispatch(options, handlers)`
Implements [`Dispatcher.dispatch(options, handlers)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
### `MockPool.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
#### Example - MockPool request
```js
import { MockAgent } from 'undici'
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('http://localhost:3000')
mockPool.intercept({
path: '/foo',
method: 'GET',
}).reply(200, 'foo')
const {
statusCode,
body
} = await mockPool.request({
origin: 'http://localhost:3000',
path: '/foo',
method: 'GET'
})
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
### `MockPool.cleanMocks()`
This method cleans up all the prepared mocks.
Returns: `void`

84
node_modules/undici/docs/docs/api/Pool.md generated vendored Normal file
View File

@@ -0,0 +1,84 @@
# Class: Pool
Extends: `undici.Dispatcher`
A pool of [Client](/docs/docs/api/Client.md) instances connected to the same upstream target.
Requests are not guaranteed to be dispatched in order of invocation.
## `new Pool(url[, options])`
Arguments:
* **url** `URL | string` - It should only include the **protocol, hostname, and port**.
* **options** `PoolOptions` (optional)
### Parameter: `PoolOptions`
Extends: [`ClientOptions`](/docs/docs/api/Client.md#parameter-clientoptions)
* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Client(origin, opts)`
* **connections** `number | null` (optional) - Default: `null` - The number of `Client` instances to create. When set to `null`, the `Pool` instance will create an unlimited amount of `Client` instances.
* **clientTtl** `number | null` (optional) - Default: `null` - The amount of time before a `Client` instance is removed from the `Pool` and closed. When set to `null`, `Client` instances will not be removed or closed based on age.
## Instance Properties
### `Pool.closed`
Implements [Client.closed](/docs/docs/api/Client.md#clientclosed)
### `Pool.destroyed`
Implements [Client.destroyed](/docs/docs/api/Client.md#clientdestroyed)
### `Pool.stats`
Returns [`PoolStats`](PoolStats.md) instance for this pool.
## Instance Methods
### `Pool.close([callback])`
Implements [`Dispatcher.close([callback])`](/docs/docs/api/Dispatcher.md#dispatcherclosecallback-promise).
### `Pool.destroy([error, callback])`
Implements [`Dispatcher.destroy([error, callback])`](/docs/docs/api/Dispatcher.md#dispatcherdestroyerror-callback-promise).
### `Pool.connect(options[, callback])`
See [`Dispatcher.connect(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherconnectoptions-callback).
### `Pool.dispatch(options, handler)`
Implements [`Dispatcher.dispatch(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler).
### `Pool.pipeline(options, handler)`
See [`Dispatcher.pipeline(options, handler)`](/docs/docs/api/Dispatcher.md#dispatcherpipelineoptions-handler).
### `Pool.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
### `Pool.stream(options, factory[, callback])`
See [`Dispatcher.stream(options, factory[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback).
### `Pool.upgrade(options[, callback])`
See [`Dispatcher.upgrade(options[, callback])`](/docs/docs/api/Dispatcher.md#dispatcherupgradeoptions-callback).
## Instance Events
### Event: `'connect'`
See [Dispatcher Event: `'connect'`](/docs/docs/api/Dispatcher.md#event-connect).
### Event: `'disconnect'`
See [Dispatcher Event: `'disconnect'`](/docs/docs/api/Dispatcher.md#event-disconnect).
### Event: `'drain'`
See [Dispatcher Event: `'drain'`](/docs/docs/api/Dispatcher.md#event-drain).

35
node_modules/undici/docs/docs/api/PoolStats.md generated vendored Normal file
View File

@@ -0,0 +1,35 @@
# Class: PoolStats
Aggregate stats for a [Pool](/docs/docs/api/Pool.md) or [BalancedPool](/docs/docs/api/BalancedPool.md).
## `new PoolStats(pool)`
Arguments:
* **pool** `Pool` - Pool or BalancedPool from which to return stats.
## Instance Properties
### `PoolStats.connected`
Number of open socket connections in this pool.
### `PoolStats.free`
Number of open socket connections in this pool that do not have an active request.
### `PoolStats.pending`
Number of pending requests across all clients in this pool.
### `PoolStats.queued`
Number of queued requests across all clients in this pool.
### `PoolStats.running`
Number of currently active requests across all clients in this pool.
### `PoolStats.size`
Number of active, pending, or queued requests across all clients in this pool.

229
node_modules/undici/docs/docs/api/ProxyAgent.md generated vendored Normal file
View File

@@ -0,0 +1,229 @@
# Class: ProxyAgent
Extends: `undici.Dispatcher`
A Proxy Agent class that implements the Agent API. It allows the connection through proxy in a simple way.
## `new ProxyAgent([options])`
Arguments:
* **options** `ProxyAgentOptions` (required) - It extends the `Agent` options.
Returns: `ProxyAgent`
### Parameter: `ProxyAgentOptions`
Extends: [`AgentOptions`](/docs/docs/api/Agent.md#parameter-agentoptions)
> It ommits `AgentOptions#connect`.
> **Note:** When `AgentOptions#connections` is set, and different from `0`, the non-standard [`proxy-connection` header](https://udger.com/resources/http-request-headers-detail?header=Proxy-Connection) will be set to `keep-alive` in the request.
* **uri** `string | URL` (required) - The URI of the proxy server. This can be provided as a string, as an instance of the URL class, or as an object with a `uri` property of type string.
If the `uri` is provided as a string or `uri` is an object with an `uri` property of type string, then it will be parsed into a `URL` object according to the [WHATWG URL Specification](https://url.spec.whatwg.org).
For detailed information on the parsing process and potential validation errors, please refer to the ["Writing" section](https://url.spec.whatwg.org/#writing) of the WHATWG URL Specification.
* **token** `string` (optional) - It can be passed by a string of token for authentication.
* **auth** `string` (**deprecated**) - Use token.
* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` (optional) - Default: `(origin, opts) => new Pool(origin, opts)`
* **requestTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the request. It extends from [`Client#ConnectOptions`](/docs/docs/api/Client.md#parameter-connectoptions).
* **proxyTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the proxy server. It extends from [`Client#ConnectOptions`](/docs/docs/api/Client.md#parameter-connectoptions).
* **proxyTunnel** `boolean` (optional) - For connections involving secure protocols, Undici will always establish a tunnel via the HTTP2 CONNECT extension. If proxyTunnel is set to true, this will occur for unsecured proxy/endpoint connections as well. Currently, there is no way to facilitate HTTP1 IP tunneling as described in https://www.rfc-editor.org/rfc/rfc9484.html#name-http-11-request. If proxyTunnel is set to false (the default), ProxyAgent connections where both the Proxy and Endpoint are unsecured will issue all requests to the Proxy, and prefix the endpoint request path with the endpoint origin address.
Examples:
```js
import { ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
// or
const proxyAgent = new ProxyAgent(new URL('my.proxy.server'))
// or
const proxyAgent = new ProxyAgent({ uri: 'my.proxy.server' })
// or
const proxyAgent = new ProxyAgent({
uri: new URL('my.proxy.server'),
proxyTls: {
signal: AbortSignal.timeout(1000)
}
})
```
#### Example - Basic ProxyAgent instantiation
This will instantiate the ProxyAgent. It will not do anything until registered as the agent to use with requests.
```js
import { ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
```
#### Example - Basic Proxy Request with global agent dispatcher
```js
import { setGlobalDispatcher, request, ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
setGlobalDispatcher(proxyAgent)
const { statusCode, body } = await request('http://localhost:3000/foo')
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Proxy Request with local agent dispatcher
```js
import { ProxyAgent, request } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
const {
statusCode,
body
} = await request('http://localhost:3000/foo', { dispatcher: proxyAgent })
console.log('response received', statusCode) // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')) // data foo
}
```
#### Example - Basic Proxy Request with authentication
```js
import { setGlobalDispatcher, request, ProxyAgent } from 'undici';
const proxyAgent = new ProxyAgent({
uri: 'my.proxy.server',
// token: 'Bearer xxxx'
token: `Basic ${Buffer.from('username:password').toString('base64')}`
});
setGlobalDispatcher(proxyAgent);
const { statusCode, body } = await request('http://localhost:3000/foo');
console.log('response received', statusCode); // response received 200
for await (const data of body) {
console.log('data', data.toString('utf8')); // data foo
}
```
### `ProxyAgent.close()`
Closes the proxy agent and waits for registered pools and clients to also close before resolving.
Returns: `Promise<void>`
#### Example - clean up after tests are complete
```js
import { ProxyAgent, setGlobalDispatcher } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
setGlobalDispatcher(proxyAgent)
await proxyAgent.close()
```
### `ProxyAgent.dispatch(options, handlers)`
Implements [`Agent.dispatch(options, handlers)`](/docs/docs/api/Agent.md#parameter-agentdispatchoptions).
### `ProxyAgent.request(options[, callback])`
See [`Dispatcher.request(options [, callback])`](/docs/docs/api/Dispatcher.md#dispatcherrequestoptions-callback).
#### Example - ProxyAgent with Fetch
This example demonstrates how to use `fetch` with a proxy via `ProxyAgent`. It is particularly useful for scenarios requiring proxy tunneling.
```javascript
import { ProxyAgent, fetch } from 'undici';
// Define the ProxyAgent
const proxyAgent = new ProxyAgent('http://localhost:8000');
// Make a GET request through the proxy
const response = await fetch('http://localhost:3000/foo', {
dispatcher: proxyAgent,
method: 'GET',
});
console.log('Response status:', response.status);
console.log('Response data:', await response.text());
```
---
#### Example - ProxyAgent with a Custom Proxy Server
This example shows how to create a custom proxy server and use it with `ProxyAgent`.
```javascript
import * as http from 'node:http';
import { createProxy } from 'proxy';
import { ProxyAgent, fetch } from 'undici';
// Create a proxy server
const proxyServer = createProxy(http.createServer());
proxyServer.listen(8000, () => {
console.log('Proxy server running on port 8000');
});
// Define and use the ProxyAgent
const proxyAgent = new ProxyAgent('http://localhost:8000');
const response = await fetch('http://example.com', {
dispatcher: proxyAgent,
method: 'GET',
});
console.log('Response status:', response.status);
console.log('Response data:', await response.text());
```
---
#### Example - ProxyAgent with HTTPS Tunneling
This example demonstrates how to perform HTTPS tunneling using a proxy.
```javascript
import { ProxyAgent, fetch } from 'undici';
// Define a ProxyAgent for HTTPS proxy
const proxyAgent = new ProxyAgent('https://secure.proxy.server');
// Make a request to an HTTPS endpoint via the proxy
const response = await fetch('https://secure.endpoint.com/api/data', {
dispatcher: proxyAgent,
method: 'GET',
});
console.log('Response status:', response.status);
console.log('Response data:', await response.json());
```
#### Example - ProxyAgent as a Global Dispatcher
`ProxyAgent` can be configured as a global dispatcher, making it available for all requests without explicitly passing it. This simplifies code and is useful when a single proxy configuration applies to all requests.
```javascript
import { ProxyAgent, setGlobalDispatcher, fetch } from 'undici';
// Define and configure the ProxyAgent
const proxyAgent = new ProxyAgent('http://localhost:8000');
setGlobalDispatcher(proxyAgent);
// Make requests without specifying the dispatcher
const response = await fetch('http://example.com');
console.log('Response status:', response.status);
console.log('Response data:', await response.text());

96
node_modules/undici/docs/docs/api/RedirectHandler.md generated vendored Normal file
View File

@@ -0,0 +1,96 @@
# Class: RedirectHandler
A class that handles redirection logic for HTTP requests.
## `new RedirectHandler(dispatch, maxRedirections, opts, handler, redirectionLimitReached)`
Arguments:
- **dispatch** `function` - The dispatch function to be called after every retry.
- **maxRedirections** `number` - Maximum number of redirections allowed.
- **opts** `object` - Options for handling redirection.
- **handler** `object` - An object containing handlers for different stages of the request lifecycle.
- **redirectionLimitReached** `boolean` (default: `false`) - A flag that the implementer can provide to enable or disable the feature. If set to `false`, it indicates that the caller doesn't want to use the feature and prefers the old behavior.
Returns: `RedirectHandler`
### Parameters
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandler) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every redirection.
- **maxRedirections** `number` (required) - Maximum number of redirections allowed.
- **opts** `object` (required) - Options for handling redirection.
- **handler** `object` (required) - Handlers for different stages of the request lifecycle.
- **redirectionLimitReached** `boolean` (default: `false`) - A flag that the implementer can provide to enable or disable the feature. If set to `false`, it indicates that the caller doesn't want to use the feature and prefers the old behavior.
### Properties
- **location** `string` - The current redirection location.
- **abort** `function` - The abort function.
- **opts** `object` - The options for handling redirection.
- **maxRedirections** `number` - Maximum number of redirections allowed.
- **handler** `object` - Handlers for different stages of the request lifecycle.
- **history** `Array` - An array representing the history of URLs during redirection.
- **redirectionLimitReached** `boolean` - Indicates whether the redirection limit has been reached.
### Methods
#### `onConnect(abort)`
Called when the connection is established.
Parameters:
- **abort** `function` - The abort function.
#### `onUpgrade(statusCode, headers, socket)`
Called when an upgrade is requested.
Parameters:
- **statusCode** `number` - The HTTP status code.
- **headers** `object` - The headers received in the response.
- **socket** `object` - The socket object.
#### `onError(error)`
Called when an error occurs.
Parameters:
- **error** `Error` - The error that occurred.
#### `onHeaders(statusCode, headers, resume, statusText)`
Called when headers are received.
Parameters:
- **statusCode** `number` - The HTTP status code.
- **headers** `object` - The headers received in the response.
- **resume** `function` - The resume function.
- **statusText** `string` - The status text.
#### `onData(chunk)`
Called when data is received.
Parameters:
- **chunk** `Buffer` - The data chunk received.
#### `onComplete(trailers)`
Called when the request is complete.
Parameters:
- **trailers** `object` - The trailers received.
#### `onBodySent(chunk)`
Called when the request body is sent.
Parameters:
- **chunk** `Buffer` - The chunk of the request body sent.

50
node_modules/undici/docs/docs/api/RetryAgent.md generated vendored Normal file
View File

@@ -0,0 +1,50 @@
# Class: RetryAgent
Extends: `undici.Dispatcher`
A `undici.Dispatcher` that allows to automatically retry a request.
Wraps a `undici.RetryHandler`.
## `new RetryAgent(dispatcher, [options])`
Arguments:
* **dispatcher** `undici.Dispatcher` (required) - the dispatcher to wrap
* **options** `RetryHandlerOptions` (optional) - the options
Returns: `ProxyAgent`
### Parameter: `RetryHandlerOptions`
- **throwOnError** `boolean` (optional) - Disable to prevent throwing error on last retry attept, useful if you need the body on errors from server or if you have custom error handler. Default: `true`
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2`
- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true`
-
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', 'UND_ERR_SOCKET']`
**`RetryContext`**
- `state`: `RetryState` - Current retry state. It can be mutated.
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
Example:
```js
import { Agent, RetryAgent } from 'undici'
const agent = new RetryAgent(new Agent())
const res = await agent.request({
method: 'GET',
origin: 'http://example.com',
path: '/',
})
console.log(res.statusCode)
console.log(await res.body.text())
```

118
node_modules/undici/docs/docs/api/RetryHandler.md generated vendored Normal file
View File

@@ -0,0 +1,118 @@
# Class: RetryHandler
Extends: `undici.DispatcherHandlers`
A handler class that implements the retry logic for a request.
## `new RetryHandler(dispatchOptions, retryHandlers, [retryOptions])`
Arguments:
- **options** `Dispatch.DispatchOptions & RetryOptions` (required) - It is an intersection of `Dispatcher.DispatchOptions` and `RetryOptions`.
- **retryHandlers** `RetryHandlers` (required) - Object containing the `dispatch` to be used on every retry, and `handler` for handling the `dispatch` lifecycle.
Returns: `retryHandler`
### Parameter: `Dispatch.DispatchOptions & RetryOptions`
Extends: [`Dispatch.DispatchOptions`](/docs/docs/api/Dispatcher.md#parameter-dispatchoptions).
#### `RetryOptions`
- **throwOnError** `boolean` (optional) - Disable to prevent throwing error on last retry attept, useful if you need the body on errors from server or if you have custom error handler.
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => number | null` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2`
- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true`
-
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', 'UND_ERR_SOCKET']`
**`RetryContext`**
- `state`: `RetryState` - Current retry state. It can be mutated.
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
**`RetryState`**
It represents the retry state for a given request.
- `counter`: `number` - Current retry attempt.
### Parameter `RetryHandlers`
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandler) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every retry.
- **handler** Extends [`Dispatch.DispatchHandler`](/docs/docs/api/Dispatcher.md#dispatcherdispatchoptions-handler) (required) - Handler function to be called after the request is successful or the retries are exhausted.
>__Note__: The `RetryHandler` does not retry over stateful bodies (e.g. streams, AsyncIterable) as those, once consumed, are left in a state that cannot be reutilized. For these situations the `RetryHandler` will identify
>the body as stateful and will not retry the request rejecting with the error `UND_ERR_REQ_RETRY`.
Examples:
```js
const client = new Client(`http://localhost:${server.address().port}`);
const chunks = [];
const handler = new RetryHandler(
{
...dispatchOptions,
retryOptions: {
// custom retry function
retry: function (err, state, callback) {
counter++;
if (err.code && err.code === "UND_ERR_DESTROYED") {
callback(err);
return;
}
if (err.statusCode === 206) {
callback(err);
return;
}
setTimeout(() => callback(null), 1000);
},
},
},
{
dispatch: (...args) => {
return client.dispatch(...args);
},
handler: {
onConnect() {},
onBodySent() {},
onHeaders(status, _rawHeaders, resume, _statusMessage) {
// do something with headers
},
onData(chunk) {
chunks.push(chunk);
return true;
},
onComplete() {},
onError() {
// handle error properly
},
},
}
);
```
#### Example - Basic RetryHandler with defaults
```js
const client = new Client(`http://localhost:${server.address().port}`);
const handler = new RetryHandler(dispatchOptions, {
dispatch: client.dispatch.bind(client),
handler: {
onConnect() {},
onBodySent() {},
onHeaders(status, _rawHeaders, resume, _statusMessage) {},
onData(chunk) {},
onComplete() {},
onError(err) {},
},
});
```

616
node_modules/undici/docs/docs/api/SnapshotAgent.md generated vendored Normal file
View File

@@ -0,0 +1,616 @@
# SnapshotAgent
The `SnapshotAgent` provides a powerful way to record and replay HTTP requests for testing purposes. It extends `MockAgent` to enable automatic snapshot testing, eliminating the need to manually define mock responses.
## Use Cases
- **Integration Testing**: Record real API interactions and replay them in tests
- **Offline Development**: Work with APIs without network connectivity
- **Consistent Test Data**: Ensure tests use the same responses across runs
- **API Contract Testing**: Capture and validate API behavior over time
## Constructor
```javascript
new SnapshotAgent([options])
```
### Parameters
- **options** `Object` (optional)
- **mode** `String` - The snapshot mode: `'record'`, `'playback'`, or `'update'`. Default: `'record'`
- **snapshotPath** `String` - Path to the snapshot file for loading/saving
- **maxSnapshots** `Number` - Maximum number of snapshots to keep in memory. Default: `Infinity`
- **autoFlush** `Boolean` - Whether to automatically save snapshots to disk. Default: `false`
- **flushInterval** `Number` - Interval in milliseconds for auto-flush. Default: `30000`
- **matchHeaders** `Array<String>` - Specific headers to include in request matching. Default: all headers
- **ignoreHeaders** `Array<String>` - Headers to ignore during request matching
- **excludeHeaders** `Array<String>` - Headers to exclude from snapshots (for security)
- **matchBody** `Boolean` - Whether to include request body in matching. Default: `true`
- **matchQuery** `Boolean` - Whether to include query parameters in matching. Default: `true`
- **caseSensitive** `Boolean` - Whether header matching is case-sensitive. Default: `false`
- **shouldRecord** `Function` - Callback to determine if a request should be recorded
- **shouldPlayback** `Function` - Callback to determine if a request should be played back
- **excludeUrls** `Array` - URL patterns (strings or RegExp) to exclude from recording/playback
- All other options from `MockAgent` are supported
### Modes
#### Record Mode (`'record'`)
Makes real HTTP requests and saves the responses to snapshots.
```javascript
import { SnapshotAgent, setGlobalDispatcher } from 'undici'
const agent = new SnapshotAgent({
mode: 'record',
snapshotPath: './test/snapshots/api-calls.json'
})
setGlobalDispatcher(agent)
// Makes real requests and records them
const response = await fetch('https://api.example.com/users')
const users = await response.json()
// Save recorded snapshots
await agent.saveSnapshots()
```
#### Playback Mode (`'playback'`)
Replays recorded responses without making real HTTP requests.
```javascript
import { SnapshotAgent, setGlobalDispatcher } from 'undici'
const agent = new SnapshotAgent({
mode: 'playback',
snapshotPath: './test/snapshots/api-calls.json'
})
setGlobalDispatcher(agent)
// Uses recorded response instead of real request
const response = await fetch('https://api.example.com/users')
```
#### Update Mode (`'update'`)
Uses existing snapshots when available, but records new ones for missing requests.
```javascript
import { SnapshotAgent, setGlobalDispatcher } from 'undici'
const agent = new SnapshotAgent({
mode: 'update',
snapshotPath: './test/snapshots/api-calls.json'
})
setGlobalDispatcher(agent)
// Uses snapshot if exists, otherwise makes real request and records it
const response = await fetch('https://api.example.com/new-endpoint')
```
## Instance Methods
### `agent.saveSnapshots([filePath])`
Saves all recorded snapshots to a file.
#### Parameters
- **filePath** `String` (optional) - Path to save snapshots. Uses constructor `snapshotPath` if not provided.
#### Returns
`Promise<void>`
```javascript
await agent.saveSnapshots('./custom-snapshots.json')
```
## Advanced Configuration
### Header Filtering
Control which headers are used for request matching and what gets stored in snapshots:
```javascript
const agent = new SnapshotAgent({
mode: 'record',
snapshotPath: './snapshots.json',
// Only match these specific headers
matchHeaders: ['content-type', 'accept'],
// Ignore these headers during matching (but still store them)
ignoreHeaders: ['user-agent', 'date'],
// Exclude sensitive headers from snapshots entirely
excludeHeaders: ['authorization', 'x-api-key', 'cookie']
})
```
### Custom Request/Response Filtering
Use callback functions to determine what gets recorded or played back:
```javascript
const agent = new SnapshotAgent({
mode: 'record',
snapshotPath: './snapshots.json',
// Only record GET requests to specific endpoints
shouldRecord: (requestOpts) => {
const url = new URL(requestOpts.path, requestOpts.origin)
return requestOpts.method === 'GET' && url.pathname.startsWith('/api/v1/')
},
// Skip authentication endpoints during playback
shouldPlayback: (requestOpts) => {
const url = new URL(requestOpts.path, requestOpts.origin)
return !url.pathname.includes('/auth/')
}
})
```
### URL Pattern Exclusion
Exclude specific URLs from recording/playback using patterns:
```javascript
const agent = new SnapshotAgent({
mode: 'record',
snapshotPath: './snapshots.json',
excludeUrls: [
'https://analytics.example.com', // String match
/\/api\/v\d+\/health/, // Regex pattern
'telemetry' // Substring match
]
})
```
### Memory Management
Configure automatic memory and disk management:
```javascript
const agent = new SnapshotAgent({
mode: 'record',
snapshotPath: './snapshots.json',
// Keep only 1000 snapshots in memory
maxSnapshots: 1000,
// Automatically save to disk every 30 seconds
autoFlush: true,
flushInterval: 30000
})
```
### Sequential Response Handling
Handle multiple responses for the same request (similar to nock):
```javascript
// In record mode, multiple identical requests get recorded as separate responses
const agent = new SnapshotAgent({ mode: 'record', snapshotPath: './sequential.json' })
// First call returns response A
await fetch('https://api.example.com/random')
// Second call returns response B
await fetch('https://api.example.com/random')
await agent.saveSnapshots()
// In playback mode, calls return responses in sequence
const playbackAgent = new SnapshotAgent({ mode: 'playback', snapshotPath: './sequential.json' })
// Returns response A
const first = await fetch('https://api.example.com/random')
// Returns response B
const second = await fetch('https://api.example.com/random')
// Third call repeats the last response (B)
const third = await fetch('https://api.example.com/random')
```
## Managing Snapshots
### Replacing Existing Snapshots
```javascript
// Load existing snapshots
await agent.loadSnapshots('./old-snapshots.json')
// Get snapshot data
const recorder = agent.getRecorder()
const snapshots = recorder.getSnapshots()
// Modify or filter snapshots
const filteredSnapshots = snapshots.filter(s =>
!s.request.url.includes('deprecated')
)
// Replace all snapshots
agent.replaceSnapshots(filteredSnapshots.map((snapshot, index) => ({
hash: `new-hash-${index}`,
snapshot
})))
// Save updated snapshots
await agent.saveSnapshots('./updated-snapshots.json')
```
### `agent.loadSnapshots([filePath])`
Loads snapshots from a file.
#### Parameters
- **filePath** `String` (optional) - Path to load snapshots from. Uses constructor `snapshotPath` if not provided.
#### Returns
`Promise<void>`
```javascript
await agent.loadSnapshots('./existing-snapshots.json')
```
### `agent.getRecorder()`
Gets the underlying `SnapshotRecorder` instance.
#### Returns
`SnapshotRecorder`
```javascript
const recorder = agent.getRecorder()
console.log(`Recorded ${recorder.size()} interactions`)
```
### `agent.getMode()`
Gets the current snapshot mode.
#### Returns
`String` - The current mode (`'record'`, `'playback'`, or `'update'`)
### `agent.clearSnapshots()`
Clears all recorded snapshots from memory.
```javascript
agent.clearSnapshots()
```
## Working with Different Request Types
### GET Requests
```javascript
// Record mode
const agent = new SnapshotAgent({ mode: 'record', snapshotPath: './get-snapshots.json' })
setGlobalDispatcher(agent)
const response = await fetch('https://jsonplaceholder.typicode.com/posts/1')
const post = await response.json()
await agent.saveSnapshots()
```
### POST Requests with Body
```javascript
// Record mode
const agent = new SnapshotAgent({ mode: 'record', snapshotPath: './post-snapshots.json' })
setGlobalDispatcher(agent)
const response = await fetch('https://jsonplaceholder.typicode.com/posts', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ title: 'Test Post', body: 'Content' })
})
await agent.saveSnapshots()
```
### Using with `undici.request`
SnapshotAgent works with all undici APIs, not just fetch:
```javascript
import { SnapshotAgent, request, setGlobalDispatcher } from 'undici'
const agent = new SnapshotAgent({ mode: 'record', snapshotPath: './request-snapshots.json' })
setGlobalDispatcher(agent)
const { statusCode, headers, body } = await request('https://api.example.com/data')
const data = await body.json()
await agent.saveSnapshots()
```
## Test Integration
### Basic Test Setup
```javascript
import { test } from 'node:test'
import { SnapshotAgent, setGlobalDispatcher, getGlobalDispatcher } from 'undici'
test('API integration test', async (t) => {
const originalDispatcher = getGlobalDispatcher()
const agent = new SnapshotAgent({
mode: 'playback',
snapshotPath: './test/snapshots/api-test.json'
})
setGlobalDispatcher(agent)
t.after(() => setGlobalDispatcher(originalDispatcher))
// This will use recorded data
const response = await fetch('https://api.example.com/users')
const users = await response.json()
assert(Array.isArray(users))
assert(users.length > 0)
})
```
### Environment-Based Mode Selection
```javascript
const mode = process.env.SNAPSHOT_MODE || 'playback'
const agent = new SnapshotAgent({
mode,
snapshotPath: './test/snapshots/integration.json'
})
// Run with: SNAPSHOT_MODE=record npm test (to record)
// Run with: npm test (to playback)
```
### Test Helper Function
```javascript
function createSnapshotAgent(testName, mode = 'playback') {
return new SnapshotAgent({
mode,
snapshotPath: `./test/snapshots/${testName}.json`
})
}
test('user API test', async (t) => {
const agent = createSnapshotAgent('user-api')
setGlobalDispatcher(agent)
// Test implementation...
})
```
## Snapshot File Format
Snapshots are stored as JSON with the following structure:
```json
[
{
"hash": "dGVzdC1oYXNo...",
"snapshot": {
"request": {
"method": "GET",
"url": "https://api.example.com/users",
"headers": {
"authorization": "Bearer token"
},
"body": undefined
},
"response": {
"statusCode": 200,
"headers": {
"content-type": "application/json"
},
"body": "eyJkYXRhIjoidGVzdCJ9", // base64 encoded
"trailers": {}
},
"timestamp": "2024-01-01T00:00:00.000Z"
}
}
]
```
## Security Considerations
### Sensitive Data in Snapshots
By default, SnapshotAgent records all headers and request/response data. For production use, always exclude sensitive information:
```javascript
const agent = new SnapshotAgent({
mode: 'record',
snapshotPath: './snapshots.json',
// Exclude sensitive headers from snapshots
excludeHeaders: [
'authorization',
'x-api-key',
'cookie',
'set-cookie',
'x-auth-token',
'x-csrf-token'
],
// Filter out requests with sensitive data
shouldRecord: (requestOpts) => {
const url = new URL(requestOpts.path, requestOpts.origin)
// Don't record authentication endpoints
if (url.pathname.includes('/auth/') || url.pathname.includes('/login')) {
return false
}
// Don't record if request contains sensitive body data
if (requestOpts.body && typeof requestOpts.body === 'string') {
const body = requestOpts.body.toLowerCase()
if (body.includes('password') || body.includes('secret')) {
return false
}
}
return true
}
})
```
### Snapshot File Security
**Important**: Snapshot files may contain sensitive data. Handle them securely:
- ✅ Add snapshot files to `.gitignore` if they contain real API data
- ✅ Use environment-specific snapshots (dev/staging/prod)
- ✅ Regularly review snapshot contents for sensitive information
- ✅ Use the `excludeHeaders` option for production snapshots
- ❌ Never commit snapshots with real authentication tokens
- ❌ Don't share snapshot files containing personal data
```gitignore
# Exclude snapshots with real data
/test/snapshots/production-*.json
/test/snapshots/*-real-data.json
# Include sanitized test snapshots
!/test/snapshots/mock-*.json
```
## Error Handling
### Missing Snapshots in Playback Mode
```javascript
try {
const response = await fetch('https://api.example.com/nonexistent')
} catch (error) {
if (error.message.includes('No snapshot found')) {
// Handle missing snapshot
console.log('Snapshot not found for this request')
}
}
```
### Handling Network Errors in Record Mode
```javascript
const agent = new SnapshotAgent({ mode: 'record', snapshotPath: './snapshots.json' })
try {
const response = await fetch('https://nonexistent-api.example.com/data')
} catch (error) {
// Network errors are not recorded as snapshots
console.log('Network error:', error.message)
}
```
## Best Practices
### 1. Organize Snapshots by Test Suite
```javascript
// Use descriptive snapshot file names
const agent = new SnapshotAgent({
mode: 'playback',
snapshotPath: `./test/snapshots/${testSuiteName}-${testName}.json`
})
```
### 2. Version Control Snapshots
Add snapshot files to version control to ensure consistent test behavior across environments:
```gitignore
# Include snapshots in version control
!/test/snapshots/*.json
```
### 3. Clean Up Test Data
```javascript
test('API test', async (t) => {
const agent = new SnapshotAgent({
mode: 'playback',
snapshotPath: './test/snapshots/temp-test.json'
})
// Clean up after test
t.after(() => {
agent.clearSnapshots()
})
})
```
### 4. Snapshot Validation
```javascript
test('validate snapshot contents', async (t) => {
const agent = new SnapshotAgent({
mode: 'playback',
snapshotPath: './test/snapshots/validation.json'
})
const recorder = agent.getRecorder()
const snapshots = recorder.getSnapshots()
// Validate snapshot structure
assert(snapshots.length > 0, 'Should have recorded snapshots')
assert(snapshots[0].request.url.startsWith('https://'), 'Should use HTTPS')
})
```
## Comparison with Other Tools
### vs Manual MockAgent Setup
**Manual MockAgent:**
```javascript
const mockAgent = new MockAgent()
const mockPool = mockAgent.get('https://api.example.com')
mockPool.intercept({
path: '/users',
method: 'GET'
}).reply(200, [
{ id: 1, name: 'User 1' },
{ id: 2, name: 'User 2' }
])
```
**SnapshotAgent:**
```javascript
// Record once
const agent = new SnapshotAgent({ mode: 'record', snapshotPath: './snapshots.json' })
// Real API call gets recorded automatically
// Use in tests
const agent = new SnapshotAgent({ mode: 'playback', snapshotPath: './snapshots.json' })
// Automatically replays recorded response
```
### vs nock
SnapshotAgent provides similar functionality to nock but is specifically designed for undici:
- ✅ Works with all undici APIs (`request`, `stream`, `pipeline`, etc.)
- ✅ Supports undici-specific features (RetryAgent, connection pooling)
- ✅ Better TypeScript integration
- ✅ More efficient for high-performance scenarios
## See Also
- [MockAgent](./MockAgent.md) - Manual mocking for more control
- [MockCallHistory](./MockCallHistory.md) - Inspecting request history
- [Testing Best Practices](../best-practices/writing-tests.md) - General testing guidance

25
node_modules/undici/docs/docs/api/Util.md generated vendored Normal file
View File

@@ -0,0 +1,25 @@
# Util
Utility API for third-party implementations of the dispatcher API.
## `parseHeaders(headers, [obj])`
Receives a header object and returns the parsed value.
Arguments:
- **headers** `(Buffer | string | (Buffer | string)[])[]` (required) - Header object.
- **obj** `Record<string, string | string[]>` (optional) - Object to specify a proxy object. The parsed value is assigned to this object. But, if **headers** is an object, it is not used.
Returns: `Record<string, string | string[]>` If **obj** is specified, it is equivalent to **obj**.
## `headerNameToString(value)`
Retrieves a header name and returns its lowercase value.
Arguments:
- **value** `string | Buffer` (required) - Header name.
Returns: `string`

112
node_modules/undici/docs/docs/api/WebSocket.md generated vendored Normal file
View File

@@ -0,0 +1,112 @@
# Class: WebSocket
Extends: [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget)
The WebSocket object provides a way to manage a WebSocket connection to a server, allowing bidirectional communication. The API follows the [WebSocket spec](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) and [RFC 6455](https://datatracker.ietf.org/doc/html/rfc6455).
## `new WebSocket(url[, protocol])`
Arguments:
* **url** `URL | string`
* **protocol** `string | string[] | WebSocketInit` (optional) - Subprotocol(s) to request the server use, or a [`Dispatcher`](/docs/docs/api/Dispatcher.md).
### Example:
This example will not work in browsers or other platforms that don't allow passing an object.
```mjs
import { WebSocket, ProxyAgent } from 'undici'
const proxyAgent = new ProxyAgent('my.proxy.server')
const ws = new WebSocket('wss://echo.websocket.events', {
dispatcher: proxyAgent,
protocols: ['echo', 'chat']
})
```
If you do not need a custom Dispatcher, it's recommended to use the following pattern:
```mjs
import { WebSocket } from 'undici'
const ws = new WebSocket('wss://echo.websocket.events', ['echo', 'chat'])
```
# Class: WebSocketStream
> ⚠️ Warning: the WebSocketStream API has not been finalized and is likely to change.
See [MDN](https://developer.mozilla.org/en-US/docs/Web/API/WebSocketStream) for more information.
## `new WebSocketStream(url[, protocol])`
Arguments:
* **url** `URL | string`
* **options** `WebSocketStreamOptions` (optional)
### WebSocketStream Example
```js
const stream = new WebSocketStream('https://echo.websocket.org/')
const { readable, writable } = await stream.opened
async function read () {
/** @type {ReadableStreamReader} */
const reader = readable.getReader()
while (true) {
const { done, value } = await reader.read()
if (done) break
// do something with value
}
}
async function write () {
/** @type {WritableStreamDefaultWriter} */
const writer = writable.getWriter()
writer.write('Hello, world!')
writer.releaseLock()
}
read()
setInterval(() => write(), 5000)
```
## ping(websocket, payload)
Arguments:
* **websocket** `WebSocket` - The WebSocket instance to send the ping frame on
* **payload** `Buffer|undefined` (optional) - Optional payload data to include with the ping frame. Must not exceed 125 bytes.
Sends a ping frame to the WebSocket server. The server must respond with a pong frame containing the same payload data. This can be used for keepalive purposes or to verify that the connection is still active.
### Example:
```js
import { WebSocket, ping } from 'undici'
const ws = new WebSocket('wss://echo.websocket.events')
ws.addEventListener('open', () => {
// Send ping with no payload
ping(ws)
// Send ping with payload
const payload = Buffer.from('hello')
ping(ws, payload)
})
```
**Note**: A ping frame cannot have a payload larger than 125 bytes. The ping will only be sent if the WebSocket connection is in the OPEN state.
## Read More
- [MDN - WebSocket](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket)
- [The WebSocket Specification](https://www.rfc-editor.org/rfc/rfc6455)
- [The WHATWG WebSocket Specification](https://websockets.spec.whatwg.org/)

91
node_modules/undici/docs/docs/api/api-lifecycle.md generated vendored Normal file
View File

@@ -0,0 +1,91 @@
# Client Lifecycle
An Undici [Client](/docs/docs/api/Client.md) can be best described as a state machine. The following list is a summary of the various state transitions the `Client` will go through in its lifecycle. This document also contains detailed breakdowns of each state.
> This diagram is not a perfect representation of the undici Client. Since the Client class is not actually implemented as a state-machine, actual execution may deviate slightly from what is described below. Consider this as a general resource for understanding the inner workings of the Undici client rather than some kind of formal specification.
## State Transition Overview
* A `Client` begins in the **idle** state with no socket connection and no requests in queue.
* The *connect* event transitions the `Client` to the **pending** state where requests can be queued prior to processing.
* The *close* and *destroy* events transition the `Client` to the **destroyed** state. Since there are no requests in the queue, the *close* event immediately transitions to the **destroyed** state.
* The **pending** state indicates the underlying socket connection has been successfully established and requests are queueing.
* The *process* event transitions the `Client` to the **processing** state where requests are processed.
* If requests are queued, the *close* event transitions to the **processing** state; otherwise, it transitions to the **destroyed** state.
* The *destroy* event transitions to the **destroyed** state.
* The **processing** state initializes to the **processing.running** state.
* If the current request requires draining, the *needDrain* event transitions the `Client` into the **processing.busy** state which will return to the **processing.running** state with the *drainComplete* event.
* After all queued requests are completed, the *keepalive* event transitions the `Client` back to the **pending** state. If no requests are queued during the timeout, the **close** event transitions the `Client` to the **destroyed** state.
* If the *close* event is fired while the `Client` still has queued requests, the `Client` transitions to the **process.closing** state where it will complete all existing requests before firing the *done* event.
* The *done* event gracefully transitions the `Client` to the **destroyed** state.
* At any point in time, the *destroy* event will transition the `Client` from the **processing** state to the **destroyed** state, destroying any queued requests.
* The **destroyed** state is a final state and the `Client` is no longer functional.
A state diagram representing an Undici Client instance:
```mermaid
stateDiagram-v2
[*] --> idle
idle --> pending : connect
idle --> destroyed : destroy/close
pending --> idle : timeout
pending --> destroyed : destroy
state close_fork <<fork>>
pending --> close_fork : close
close_fork --> processing
close_fork --> destroyed
pending --> processing : process
processing --> pending : keepalive
processing --> destroyed : done
processing --> destroyed : destroy
destroyed --> [*]
state processing {
[*] --> running
running --> closing : close
running --> busy : needDrain
busy --> running : drainComplete
running --> [*] : keepalive
closing --> [*] : done
}
```
## State details
### idle
The **idle** state is the initial state of a `Client` instance. While an `origin` is required for instantiating a `Client` instance, the underlying socket connection will not be established until a request is queued using [`Client.dispatch()`](/docs/docs/api/Client.md#clientdispatchoptions-handlers). By calling `Client.dispatch()` directly or using one of the multiple implementations ([`Client.connect()`](Client.md#clientconnectoptions-callback), [`Client.pipeline()`](Client.md#clientpipelineoptions-handler), [`Client.request()`](Client.md#clientrequestoptions-callback), [`Client.stream()`](Client.md#clientstreamoptions-factory-callback), and [`Client.upgrade()`](/docs/docs/api/Client.md#clientupgradeoptions-callback)), the `Client` instance will transition from **idle** to [**pending**](/docs/docs/api/Client.md#pending) and then most likely directly to [**processing**](/docs/docs/api/Client.md#processing).
Calling [`Client.close()`](/docs/docs/api/Client.md#clientclosecallback) or [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](/docs/docs/api/Client.md#destroyed) state since the `Client` instance will have no queued requests in this state.
### pending
The **pending** state signifies a non-processing `Client`. Upon entering this state, the `Client` establishes a socket connection and emits the [`'connect'`](/docs/docs/api/Client.md#event-connect) event signalling a connection was successfully established with the `origin` provided during `Client` instantiation. The internal queue is initially empty, and requests can start queueing.
Calling [`Client.close()`](/docs/docs/api/Client.md#clientclosecallback) with queued requests, transitions the `Client` to the [**processing**](/docs/docs/api/Client.md#processing) state. Without queued requests, it transitions to the [**destroyed**](/docs/docs/api/Client.md#destroyed) state.
Calling [`Client.destroy()`](/docs/docs/api/Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](/docs/docs/api/Client.md#destroyed) state regardless of existing requests.
### processing
The **processing** state is a state machine within itself. It initializes to the [**processing.running**](/docs/docs/api/Client.md#running) state. The [`Client.dispatch()`](/docs/docs/api/Client.md#clientdispatchoptions-handlers), [`Client.close()`](Client.md#clientclosecallback), and [`Client.destroy()`](Client.md#clientdestroyerror-callback) can be called at any time while the `Client` is in this state. `Client.dispatch()` will add more requests to the queue while existing requests continue to be processed. `Client.close()` will transition to the [**processing.closing**](/docs/docs/api/Client.md#closing) state. And `Client.destroy()` will transition to [**destroyed**](/docs/docs/api/Client.md#destroyed).
#### running
In the **processing.running** sub-state, queued requests are being processed in a FIFO order. If a request body requires draining, the *needDrain* event transitions to the [**processing.busy**](/docs/docs/api/Client.md#busy) sub-state. The *close* event transitions the Client to the [**process.closing**](/docs/docs/api/Client.md#closing) sub-state. If all queued requests are processed and neither [`Client.close()`](/docs/docs/api/Client.md#clientclosecallback) nor [`Client.destroy()`](Client.md#clientdestroyerror-callback) are called, then the [**processing**](/docs/docs/api/Client.md#processing) machine will trigger a *keepalive* event transitioning the `Client` back to the [**pending**](/docs/docs/api/Client.md#pending) state. During this time, the `Client` is waiting for the socket connection to timeout, and once it does, it triggers the *timeout* event and transitions to the [**idle**](/docs/docs/api/Client.md#idle) state.
#### busy
This sub-state is only entered when a request body is an instance of [Stream](https://nodejs.org/api/stream.html) and requires draining. The `Client` cannot process additional requests while in this state and must wait until the currently processing request body is completely drained before transitioning back to [**processing.running**](/docs/docs/api/Client.md#running).
#### closing
This sub-state is only entered when a `Client` instance has queued requests and the [`Client.close()`](/docs/docs/api/Client.md#clientclosecallback) method is called. In this state, the `Client` instance continues to process requests as usual, with the one exception that no additional requests can be queued. Once all of the queued requests are processed, the `Client` will trigger the *done* event gracefully entering the [**destroyed**](/docs/docs/api/Client.md#destroyed) state without an error.
### destroyed
The **destroyed** state is a final state for the `Client` instance. Once in this state, a `Client` is nonfunctional. Calling any other `Client` methods will result in an `ClientDestroyedError`.

View File

@@ -0,0 +1,64 @@
# Client certificate
Client certificate authentication can be configured with the `Client`, the required options are passed along through the `connect` option.
The client certificates must be signed by a trusted CA. The Node.js default is to trust the well-known CAs curated by Mozilla.
Setting the server option `requestCert: true` tells the server to request the client certificate.
The server option `rejectUnauthorized: false` allows us to handle any invalid certificate errors in client code. The `authorized` property on the socket of the incoming request will show if the client certificate was valid. The `authorizationError` property will give the reason if the certificate was not valid.
### Client Certificate Authentication
```js
const { readFileSync } = require('node:fs')
const { join } = require('node:path')
const { createServer } = require('node:https')
const { Client } = require('undici')
const serverOptions = {
ca: [
readFileSync(join(__dirname, 'client-ca-crt.pem'), 'utf8')
],
key: readFileSync(join(__dirname, 'server-key.pem'), 'utf8'),
cert: readFileSync(join(__dirname, 'server-crt.pem'), 'utf8'),
requestCert: true,
rejectUnauthorized: false
}
const server = createServer(serverOptions, (req, res) => {
// true if client cert is valid
if(req.client.authorized === true) {
console.log('valid')
} else {
console.error(req.client.authorizationError)
}
res.end()
})
server.listen(0, function () {
const tls = {
ca: [
readFileSync(join(__dirname, 'server-ca-crt.pem'), 'utf8')
],
key: readFileSync(join(__dirname, 'client-key.pem'), 'utf8'),
cert: readFileSync(join(__dirname, 'client-crt.pem'), 'utf8'),
rejectUnauthorized: false,
servername: 'agent1'
}
const client = new Client(`https://localhost:${server.address().port}`, {
connect: tls
})
client.request({
path: '/',
method: 'GET'
}, (err, { body }) => {
body.on('data', (buf) => {})
body.on('end', () => {
client.close()
server.close()
})
})
})
```

View File

@@ -0,0 +1,190 @@
# Mocking Request
Undici has its own mocking [utility](/docs/docs/api/MockAgent.md). It allow us to intercept undici HTTP requests and return mocked values instead. It can be useful for testing purposes.
Example:
```js
// bank.mjs
import { request } from 'undici'
export async function bankTransfer(recipient, amount) {
const { body } = await request('http://localhost:3000/bank-transfer',
{
method: 'POST',
headers: {
'X-TOKEN-SECRET': 'SuperSecretToken',
},
body: JSON.stringify({
recipient,
amount
})
}
)
return await body.json()
}
```
And this is what the test file looks like:
```js
// index.test.mjs
import { strict as assert } from 'node:assert'
import { MockAgent, setGlobalDispatcher, } from 'undici'
import { bankTransfer } from './bank.mjs'
const mockAgent = new MockAgent();
setGlobalDispatcher(mockAgent);
// Provide the base url to the request
const mockPool = mockAgent.get('http://localhost:3000');
// intercept the request
mockPool.intercept({
path: '/bank-transfer',
method: 'POST',
headers: {
'X-TOKEN-SECRET': 'SuperSecretToken',
},
body: JSON.stringify({
recipient: '1234567890',
amount: '100'
})
}).reply(200, {
message: 'transaction processed'
})
const success = await bankTransfer('1234567890', '100')
assert.deepEqual(success, { message: 'transaction processed' })
// if you dont want to check whether the body or the headers contain the same value
// just remove it from interceptor
mockPool.intercept({
path: '/bank-transfer',
method: 'POST',
}).reply(400, {
message: 'bank account not found'
})
const badRequest = await bankTransfer('1234567890', '100')
assert.deepEqual(badRequest, { message: 'bank account not found' })
```
Explore other MockAgent functionality [here](/docs/docs/api/MockAgent.md)
## Access agent call history
Using a MockAgent also allows you to make assertions on the configuration used to make your request in your application.
Here is an example :
```js
// index.test.mjs
import { strict as assert } from 'node:assert'
import { MockAgent, setGlobalDispatcher, fetch } from 'undici'
import { app } from './app.mjs'
// given an application server running on http://localhost:3000
await app.start()
// enable call history at instantiation
const mockAgent = new MockAgent({ enableCallHistory: true })
// or after instantiation
mockAgent.enableCallHistory()
setGlobalDispatcher(mockAgent)
// this call is made (not intercepted)
await fetch(`http://localhost:3000/endpoint?query='hello'`, {
method: 'POST',
headers: { 'content-type': 'application/json' }
body: JSON.stringify({ data: '' })
})
// access to the call history of the MockAgent (which register every call made intercepted or not)
assert.ok(mockAgent.getCallHistory()?.calls().length === 1)
assert.strictEqual(mockAgent.getCallHistory()?.firstCall()?.fullUrl, `http://localhost:3000/endpoint?query='hello'`)
assert.strictEqual(mockAgent.getCallHistory()?.firstCall()?.body, JSON.stringify({ data: '' }))
assert.deepStrictEqual(mockAgent.getCallHistory()?.firstCall()?.searchParams, { query: 'hello' })
assert.strictEqual(mockAgent.getCallHistory()?.firstCall()?.port, '3000')
assert.strictEqual(mockAgent.getCallHistory()?.firstCall()?.host, 'localhost:3000')
assert.strictEqual(mockAgent.getCallHistory()?.firstCall()?.method, 'POST')
assert.strictEqual(mockAgent.getCallHistory()?.firstCall()?.path, '/endpoint')
assert.deepStrictEqual(mockAgent.getCallHistory()?.firstCall()?.headers, { 'content-type': 'application/json' })
// clear all call history logs
mockAgent.clearCallHistory()
assert.ok(mockAgent.getCallHistory()?.calls().length === 0)
```
Calling `mockAgent.close()` will automatically clear and delete every call history for you.
Explore other MockAgent functionality [here](/docs/docs/api/MockAgent.md)
Explore other MockCallHistory functionality [here](/docs/docs/api/MockCallHistory.md)
Explore other MockCallHistoryLog functionality [here](/docs/docs/api/MockCallHistoryLog.md)
## Debug Mock Value
When the interceptor and the request options are not the same, undici will automatically make a real HTTP request. To prevent real requests from being made, use `mockAgent.disableNetConnect()`:
```js
const mockAgent = new MockAgent();
setGlobalDispatcher(mockAgent);
mockAgent.disableNetConnect()
// Provide the base url to the request
const mockPool = mockAgent.get('http://localhost:3000');
mockPool.intercept({
path: '/bank-transfer',
method: 'POST',
}).reply(200, {
message: 'transaction processed'
})
const badRequest = await bankTransfer('1234567890', '100')
// Will throw an error
// MockNotMatchedError: Mock dispatch not matched for path '/bank-transfer':
// subsequent request to origin http://localhost:3000 was not allowed (net.connect disabled)
```
## Reply with data based on request
If the mocked response needs to be dynamically derived from the request parameters, you can provide a function instead of an object to `reply`:
```js
mockPool.intercept({
path: '/bank-transfer',
method: 'POST',
headers: {
'X-TOKEN-SECRET': 'SuperSecretToken',
},
body: JSON.stringify({
recipient: '1234567890',
amount: '100'
})
}).reply(200, (opts) => {
// do something with opts
return { message: 'transaction processed' }
})
```
in this case opts will be
```
{
method: 'POST',
headers: { 'X-TOKEN-SECRET': 'SuperSecretToken' },
body: '{"recipient":"1234567890","amount":"100"}',
origin: 'http://localhost:3000',
path: '/bank-transfer'
}
```

127
node_modules/undici/docs/docs/best-practices/proxy.md generated vendored Normal file
View File

@@ -0,0 +1,127 @@
# Connecting through a proxy
Connecting through a proxy is possible by:
- Using [ProxyAgent](/docs/docs/api/ProxyAgent.md).
- Configuring `Client` or `Pool` constructor.
The proxy url should be passed to the `Client` or `Pool` constructor, while the upstream server url
should be added to every request call in the `path`.
For instance, if you need to send a request to the `/hello` route of your upstream server,
the `path` should be `path: 'http://upstream.server:port/hello?foo=bar'`.
If you proxy requires basic authentication, you can send it via the `proxy-authorization` header.
### Connect without authentication
```js
import { Client } from 'undici'
import { createServer } from 'http'
import { createProxy } from 'proxy'
const server = await buildServer()
const proxyServer = await buildProxy()
const serverUrl = `http://localhost:${server.address().port}`
const proxyUrl = `http://localhost:${proxyServer.address().port}`
server.on('request', (req, res) => {
console.log(req.url) // '/hello?foo=bar'
res.setHeader('content-type', 'application/json')
res.end(JSON.stringify({ hello: 'world' }))
})
const client = new Client(proxyUrl)
const response = await client.request({
method: 'GET',
path: serverUrl + '/hello?foo=bar'
})
response.body.setEncoding('utf8')
let data = ''
for await (const chunk of response.body) {
data += chunk
}
console.log(response.statusCode) // 200
console.log(JSON.parse(data)) // { hello: 'world' }
server.close()
proxyServer.close()
client.close()
function buildServer () {
return new Promise((resolve, reject) => {
const server = createServer()
server.listen(0, () => resolve(server))
})
}
function buildProxy () {
return new Promise((resolve, reject) => {
const server = createProxy(createServer())
server.listen(0, () => resolve(server))
})
}
```
### Connect with authentication
```js
import { Client } from 'undici'
import { createServer } from 'http'
import { createProxy } from 'proxy'
const server = await buildServer()
const proxyServer = await buildProxy()
const serverUrl = `http://localhost:${server.address().port}`
const proxyUrl = `http://localhost:${proxyServer.address().port}`
proxyServer.authenticate = function (req) {
return req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`
}
server.on('request', (req, res) => {
console.log(req.url) // '/hello?foo=bar'
res.setHeader('content-type', 'application/json')
res.end(JSON.stringify({ hello: 'world' }))
})
const client = new Client(proxyUrl)
const response = await client.request({
method: 'GET',
path: serverUrl + '/hello?foo=bar',
headers: {
'proxy-authorization': `Basic ${Buffer.from('user:pass').toString('base64')}`
}
})
response.body.setEncoding('utf8')
let data = ''
for await (const chunk of response.body) {
data += chunk
}
console.log(response.statusCode) // 200
console.log(JSON.parse(data)) // { hello: 'world' }
server.close()
proxyServer.close()
client.close()
function buildServer () {
return new Promise((resolve, reject) => {
const server = createServer()
server.listen(0, () => resolve(server))
})
}
function buildProxy () {
return new Promise((resolve, reject) => {
const server = createProxy(createServer())
server.listen(0, () => resolve(server))
})
}
```

View File

@@ -0,0 +1,20 @@
# Writing tests
Undici is tuned for a production use case and its default will keep
a socket open for a few seconds after an HTTP request is completed to
remove the overhead of opening up a new socket. These settings that makes
Undici shine in production are not a good fit for using Undici in automated
tests, as it will result in longer execution times.
The following are good defaults that will keep the socket open for only 10ms:
```js
import { request, setGlobalDispatcher, Agent } from 'undici'
const agent = new Agent({
keepAliveTimeout: 10, // milliseconds
keepAliveMaxTimeout: 10 // milliseconds
})
setGlobalDispatcher(agent)
```

35
node_modules/undici/index-fetch.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
'use strict'
const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
const EnvHttpProxyAgent = require('./lib/dispatcher/env-http-proxy-agent')
const fetchImpl = require('./lib/web/fetch').fetch
module.exports.fetch = function fetch (init, options = undefined) {
return fetchImpl(init, options).catch(err => {
if (err && typeof err === 'object') {
Error.captureStackTrace(err)
}
throw err
})
}
module.exports.FormData = require('./lib/web/fetch/formdata').FormData
module.exports.Headers = require('./lib/web/fetch/headers').Headers
module.exports.Response = require('./lib/web/fetch/response').Response
module.exports.Request = require('./lib/web/fetch/request').Request
const { CloseEvent, ErrorEvent, MessageEvent, createFastMessageEvent } = require('./lib/web/websocket/events')
module.exports.WebSocket = require('./lib/web/websocket/websocket').WebSocket
module.exports.CloseEvent = CloseEvent
module.exports.ErrorEvent = ErrorEvent
module.exports.MessageEvent = MessageEvent
module.exports.createFastMessageEvent = createFastMessageEvent
module.exports.EventSource = require('./lib/web/eventsource/eventsource').EventSource
const api = require('./lib/api')
const Dispatcher = require('./lib/dispatcher/dispatcher')
Object.assign(Dispatcher.prototype, api)
// Expose the fetch implementation to be enabled in Node.js core via a flag
module.exports.EnvHttpProxyAgent = EnvHttpProxyAgent
module.exports.getGlobalDispatcher = getGlobalDispatcher
module.exports.setGlobalDispatcher = setGlobalDispatcher

3
node_modules/undici/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
import Undici from './types/index'
export default Undici
export * from './types/index'

199
node_modules/undici/index.js generated vendored Normal file
View File

@@ -0,0 +1,199 @@
'use strict'
const Client = require('./lib/dispatcher/client')
const Dispatcher = require('./lib/dispatcher/dispatcher')
const Pool = require('./lib/dispatcher/pool')
const BalancedPool = require('./lib/dispatcher/balanced-pool')
const Agent = require('./lib/dispatcher/agent')
const ProxyAgent = require('./lib/dispatcher/proxy-agent')
const EnvHttpProxyAgent = require('./lib/dispatcher/env-http-proxy-agent')
const RetryAgent = require('./lib/dispatcher/retry-agent')
const H2CClient = require('./lib/dispatcher/h2c-client')
const errors = require('./lib/core/errors')
const util = require('./lib/core/util')
const { InvalidArgumentError } = errors
const api = require('./lib/api')
const buildConnector = require('./lib/core/connect')
const MockClient = require('./lib/mock/mock-client')
const { MockCallHistory, MockCallHistoryLog } = require('./lib/mock/mock-call-history')
const MockAgent = require('./lib/mock/mock-agent')
const MockPool = require('./lib/mock/mock-pool')
const SnapshotAgent = require('./lib/mock/snapshot-agent')
const mockErrors = require('./lib/mock/mock-errors')
const RetryHandler = require('./lib/handler/retry-handler')
const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
const DecoratorHandler = require('./lib/handler/decorator-handler')
const RedirectHandler = require('./lib/handler/redirect-handler')
Object.assign(Dispatcher.prototype, api)
module.exports.Dispatcher = Dispatcher
module.exports.Client = Client
module.exports.Pool = Pool
module.exports.BalancedPool = BalancedPool
module.exports.Agent = Agent
module.exports.ProxyAgent = ProxyAgent
module.exports.EnvHttpProxyAgent = EnvHttpProxyAgent
module.exports.RetryAgent = RetryAgent
module.exports.H2CClient = H2CClient
module.exports.RetryHandler = RetryHandler
module.exports.DecoratorHandler = DecoratorHandler
module.exports.RedirectHandler = RedirectHandler
module.exports.interceptors = {
redirect: require('./lib/interceptor/redirect'),
responseError: require('./lib/interceptor/response-error'),
retry: require('./lib/interceptor/retry'),
dump: require('./lib/interceptor/dump'),
dns: require('./lib/interceptor/dns'),
cache: require('./lib/interceptor/cache'),
decompress: require('./lib/interceptor/decompress')
}
module.exports.cacheStores = {
MemoryCacheStore: require('./lib/cache/memory-cache-store')
}
const SqliteCacheStore = require('./lib/cache/sqlite-cache-store')
module.exports.cacheStores.SqliteCacheStore = SqliteCacheStore
module.exports.buildConnector = buildConnector
module.exports.errors = errors
module.exports.util = {
parseHeaders: util.parseHeaders,
headerNameToString: util.headerNameToString
}
function makeDispatcher (fn) {
return (url, opts, handler) => {
if (typeof opts === 'function') {
handler = opts
opts = null
}
if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) {
throw new InvalidArgumentError('invalid url')
}
if (opts != null && typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
if (opts && opts.path != null) {
if (typeof opts.path !== 'string') {
throw new InvalidArgumentError('invalid opts.path')
}
let path = opts.path
if (!opts.path.startsWith('/')) {
path = `/${path}`
}
url = new URL(util.parseOrigin(url).origin + path)
} else {
if (!opts) {
opts = typeof url === 'object' ? url : {}
}
url = util.parseURL(url)
}
const { agent, dispatcher = getGlobalDispatcher() } = opts
if (agent) {
throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?')
}
return fn.call(dispatcher, {
...opts,
origin: url.origin,
path: url.search ? `${url.pathname}${url.search}` : url.pathname,
method: opts.method || (opts.body ? 'PUT' : 'GET')
}, handler)
}
}
module.exports.setGlobalDispatcher = setGlobalDispatcher
module.exports.getGlobalDispatcher = getGlobalDispatcher
const fetchImpl = require('./lib/web/fetch').fetch
module.exports.fetch = function fetch (init, options = undefined) {
return fetchImpl(init, options).catch(err => {
if (err && typeof err === 'object') {
Error.captureStackTrace(err)
}
throw err
})
}
module.exports.Headers = require('./lib/web/fetch/headers').Headers
module.exports.Response = require('./lib/web/fetch/response').Response
module.exports.Request = require('./lib/web/fetch/request').Request
module.exports.FormData = require('./lib/web/fetch/formdata').FormData
const { setGlobalOrigin, getGlobalOrigin } = require('./lib/web/fetch/global')
module.exports.setGlobalOrigin = setGlobalOrigin
module.exports.getGlobalOrigin = getGlobalOrigin
const { CacheStorage } = require('./lib/web/cache/cachestorage')
const { kConstruct } = require('./lib/core/symbols')
module.exports.caches = new CacheStorage(kConstruct)
const { deleteCookie, getCookies, getSetCookies, setCookie, parseCookie } = require('./lib/web/cookies')
module.exports.deleteCookie = deleteCookie
module.exports.getCookies = getCookies
module.exports.getSetCookies = getSetCookies
module.exports.setCookie = setCookie
module.exports.parseCookie = parseCookie
const { parseMIMEType, serializeAMimeType } = require('./lib/web/fetch/data-url')
module.exports.parseMIMEType = parseMIMEType
module.exports.serializeAMimeType = serializeAMimeType
const { CloseEvent, ErrorEvent, MessageEvent } = require('./lib/web/websocket/events')
const { WebSocket, ping } = require('./lib/web/websocket/websocket')
module.exports.WebSocket = WebSocket
module.exports.CloseEvent = CloseEvent
module.exports.ErrorEvent = ErrorEvent
module.exports.MessageEvent = MessageEvent
module.exports.ping = ping
module.exports.WebSocketStream = require('./lib/web/websocket/stream/websocketstream').WebSocketStream
module.exports.WebSocketError = require('./lib/web/websocket/stream/websocketerror').WebSocketError
module.exports.request = makeDispatcher(api.request)
module.exports.stream = makeDispatcher(api.stream)
module.exports.pipeline = makeDispatcher(api.pipeline)
module.exports.connect = makeDispatcher(api.connect)
module.exports.upgrade = makeDispatcher(api.upgrade)
module.exports.MockClient = MockClient
module.exports.MockCallHistory = MockCallHistory
module.exports.MockCallHistoryLog = MockCallHistoryLog
module.exports.MockPool = MockPool
module.exports.MockAgent = MockAgent
module.exports.SnapshotAgent = SnapshotAgent
module.exports.mockErrors = mockErrors
const { EventSource } = require('./lib/web/eventsource/eventsource')
module.exports.EventSource = EventSource
function install () {
globalThis.fetch = module.exports.fetch
globalThis.Headers = module.exports.Headers
globalThis.Response = module.exports.Response
globalThis.Request = module.exports.Request
globalThis.FormData = module.exports.FormData
globalThis.WebSocket = module.exports.WebSocket
globalThis.CloseEvent = module.exports.CloseEvent
globalThis.ErrorEvent = module.exports.ErrorEvent
globalThis.MessageEvent = module.exports.MessageEvent
globalThis.EventSource = module.exports.EventSource
}
module.exports.install = install

59
node_modules/undici/lib/api/abort-signal.js generated vendored Normal file
View File

@@ -0,0 +1,59 @@
'use strict'
const { addAbortListener } = require('../core/util')
const { RequestAbortedError } = require('../core/errors')
const kListener = Symbol('kListener')
const kSignal = Symbol('kSignal')
function abort (self) {
if (self.abort) {
self.abort(self[kSignal]?.reason)
} else {
self.reason = self[kSignal]?.reason ?? new RequestAbortedError()
}
removeSignal(self)
}
function addSignal (self, signal) {
self.reason = null
self[kSignal] = null
self[kListener] = null
if (!signal) {
return
}
if (signal.aborted) {
abort(self)
return
}
self[kSignal] = signal
self[kListener] = () => {
abort(self)
}
addAbortListener(self[kSignal], self[kListener])
}
function removeSignal (self) {
if (!self[kSignal]) {
return
}
if ('removeEventListener' in self[kSignal]) {
self[kSignal].removeEventListener('abort', self[kListener])
} else {
self[kSignal].removeListener('abort', self[kListener])
}
self[kSignal] = null
self[kListener] = null
}
module.exports = {
addSignal,
removeSignal
}

110
node_modules/undici/lib/api/api-connect.js generated vendored Normal file
View File

@@ -0,0 +1,110 @@
'use strict'
const assert = require('node:assert')
const { AsyncResource } = require('node:async_hooks')
const { InvalidArgumentError, SocketError } = require('../core/errors')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
class ConnectHandler extends AsyncResource {
constructor (opts, callback) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
const { signal, opaque, responseHeaders } = opts
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
super('UNDICI_CONNECT')
this.opaque = opaque || null
this.responseHeaders = responseHeaders || null
this.callback = callback
this.abort = null
addSignal(this, signal)
}
onConnect (abort, context) {
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
onHeaders () {
throw new SocketError('bad connect', null)
}
onUpgrade (statusCode, rawHeaders, socket) {
const { callback, opaque, context } = this
removeSignal(this)
this.callback = null
let headers = rawHeaders
// Indicates is an HTTP2Session
if (headers != null) {
headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
}
this.runInAsyncScope(callback, null, null, {
statusCode,
headers,
socket,
opaque,
context
})
}
onError (err) {
const { callback, opaque } = this
removeSignal(this)
if (callback) {
this.callback = null
queueMicrotask(() => {
this.runInAsyncScope(callback, null, err, { opaque })
})
}
}
}
function connect (opts, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
connect.call(this, opts, (err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
try {
const connectHandler = new ConnectHandler(opts, callback)
const connectOptions = { ...opts, method: 'CONNECT' }
this.dispatch(connectOptions, connectHandler)
} catch (err) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}
module.exports = connect

252
node_modules/undici/lib/api/api-pipeline.js generated vendored Normal file
View File

@@ -0,0 +1,252 @@
'use strict'
const {
Readable,
Duplex,
PassThrough
} = require('node:stream')
const assert = require('node:assert')
const { AsyncResource } = require('node:async_hooks')
const {
InvalidArgumentError,
InvalidReturnValueError,
RequestAbortedError
} = require('../core/errors')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
function noop () {}
const kResume = Symbol('resume')
class PipelineRequest extends Readable {
constructor () {
super({ autoDestroy: true })
this[kResume] = null
}
_read () {
const { [kResume]: resume } = this
if (resume) {
this[kResume] = null
resume()
}
}
_destroy (err, callback) {
this._read()
callback(err)
}
}
class PipelineResponse extends Readable {
constructor (resume) {
super({ autoDestroy: true })
this[kResume] = resume
}
_read () {
this[kResume]()
}
_destroy (err, callback) {
if (!err && !this._readableState.endEmitted) {
err = new RequestAbortedError()
}
callback(err)
}
}
class PipelineHandler extends AsyncResource {
constructor (opts, handler) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
if (typeof handler !== 'function') {
throw new InvalidArgumentError('invalid handler')
}
const { signal, method, opaque, onInfo, responseHeaders } = opts
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
if (method === 'CONNECT') {
throw new InvalidArgumentError('invalid method')
}
if (onInfo && typeof onInfo !== 'function') {
throw new InvalidArgumentError('invalid onInfo callback')
}
super('UNDICI_PIPELINE')
this.opaque = opaque || null
this.responseHeaders = responseHeaders || null
this.handler = handler
this.abort = null
this.context = null
this.onInfo = onInfo || null
this.req = new PipelineRequest().on('error', noop)
this.ret = new Duplex({
readableObjectMode: opts.objectMode,
autoDestroy: true,
read: () => {
const { body } = this
if (body?.resume) {
body.resume()
}
},
write: (chunk, encoding, callback) => {
const { req } = this
if (req.push(chunk, encoding) || req._readableState.destroyed) {
callback()
} else {
req[kResume] = callback
}
},
destroy: (err, callback) => {
const { body, req, res, ret, abort } = this
if (!err && !ret._readableState.endEmitted) {
err = new RequestAbortedError()
}
if (abort && err) {
abort()
}
util.destroy(body, err)
util.destroy(req, err)
util.destroy(res, err)
removeSignal(this)
callback(err)
}
}).on('prefinish', () => {
const { req } = this
// Node < 15 does not call _final in same tick.
req.push(null)
})
this.res = null
addSignal(this, signal)
}
onConnect (abort, context) {
const { res } = this
if (this.reason) {
abort(this.reason)
return
}
assert(!res, 'pipeline cannot be retried')
this.abort = abort
this.context = context
}
onHeaders (statusCode, rawHeaders, resume) {
const { opaque, handler, context } = this
if (statusCode < 200) {
if (this.onInfo) {
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
this.onInfo({ statusCode, headers })
}
return
}
this.res = new PipelineResponse(resume)
let body
try {
this.handler = null
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
body = this.runInAsyncScope(handler, null, {
statusCode,
headers,
opaque,
body: this.res,
context
})
} catch (err) {
this.res.on('error', noop)
throw err
}
if (!body || typeof body.on !== 'function') {
throw new InvalidReturnValueError('expected Readable')
}
body
.on('data', (chunk) => {
const { ret, body } = this
if (!ret.push(chunk) && body.pause) {
body.pause()
}
})
.on('error', (err) => {
const { ret } = this
util.destroy(ret, err)
})
.on('end', () => {
const { ret } = this
ret.push(null)
})
.on('close', () => {
const { ret } = this
if (!ret._readableState.ended) {
util.destroy(ret, new RequestAbortedError())
}
})
this.body = body
}
onData (chunk) {
const { res } = this
return res.push(chunk)
}
onComplete (trailers) {
const { res } = this
res.push(null)
}
onError (err) {
const { ret } = this
this.handler = null
util.destroy(ret, err)
}
}
function pipeline (opts, handler) {
try {
const pipelineHandler = new PipelineHandler(opts, handler)
this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)
return pipelineHandler.ret
} catch (err) {
return new PassThrough().destroy(err)
}
}
module.exports = pipeline

213
node_modules/undici/lib/api/api-request.js generated vendored Normal file
View File

@@ -0,0 +1,213 @@
'use strict'
const assert = require('node:assert')
const { AsyncResource } = require('node:async_hooks')
const { Readable } = require('./readable')
const { InvalidArgumentError, RequestAbortedError } = require('../core/errors')
const util = require('../core/util')
function noop () {}
class RequestHandler extends AsyncResource {
constructor (opts, callback) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
const { signal, method, opaque, body, onInfo, responseHeaders, highWaterMark } = opts
try {
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
throw new InvalidArgumentError('invalid highWaterMark')
}
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
if (method === 'CONNECT') {
throw new InvalidArgumentError('invalid method')
}
if (onInfo && typeof onInfo !== 'function') {
throw new InvalidArgumentError('invalid onInfo callback')
}
super('UNDICI_REQUEST')
} catch (err) {
if (util.isStream(body)) {
util.destroy(body.on('error', noop), err)
}
throw err
}
this.method = method
this.responseHeaders = responseHeaders || null
this.opaque = opaque || null
this.callback = callback
this.res = null
this.abort = null
this.body = body
this.trailers = {}
this.context = null
this.onInfo = onInfo || null
this.highWaterMark = highWaterMark
this.reason = null
this.removeAbortListener = null
if (signal?.aborted) {
this.reason = signal.reason ?? new RequestAbortedError()
} else if (signal) {
this.removeAbortListener = util.addAbortListener(signal, () => {
this.reason = signal.reason ?? new RequestAbortedError()
if (this.res) {
util.destroy(this.res.on('error', noop), this.reason)
} else if (this.abort) {
this.abort(this.reason)
}
})
}
}
onConnect (abort, context) {
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
if (statusCode < 200) {
if (this.onInfo) {
this.onInfo({ statusCode, headers })
}
return
}
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
const contentType = parsedHeaders['content-type']
const contentLength = parsedHeaders['content-length']
const res = new Readable({
resume,
abort,
contentType,
contentLength: this.method !== 'HEAD' && contentLength
? Number(contentLength)
: null,
highWaterMark
})
if (this.removeAbortListener) {
res.on('close', this.removeAbortListener)
this.removeAbortListener = null
}
this.callback = null
this.res = res
if (callback !== null) {
try {
this.runInAsyncScope(callback, null, null, {
statusCode,
headers,
trailers: this.trailers,
opaque,
body: res,
context
})
} catch (err) {
// If the callback throws synchronously, we need to handle it
// Remove reference to res to allow res being garbage collected
this.res = null
// Destroy the response stream
util.destroy(res.on('error', noop), err)
// Use queueMicrotask to re-throw the error so it reaches uncaughtException
queueMicrotask(() => {
throw err
})
}
}
}
onData (chunk) {
return this.res.push(chunk)
}
onComplete (trailers) {
util.parseHeaders(trailers, this.trailers)
this.res.push(null)
}
onError (err) {
const { res, callback, body, opaque } = this
if (callback) {
// TODO: Does this need queueMicrotask?
this.callback = null
queueMicrotask(() => {
this.runInAsyncScope(callback, null, err, { opaque })
})
}
if (res) {
this.res = null
// Ensure all queued handlers are invoked before destroying res.
queueMicrotask(() => {
util.destroy(res.on('error', noop), err)
})
}
if (body) {
this.body = null
if (util.isStream(body)) {
body.on('error', noop)
util.destroy(body, err)
}
}
if (this.removeAbortListener) {
this.removeAbortListener()
this.removeAbortListener = null
}
}
}
function request (opts, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
request.call(this, opts, (err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
try {
const handler = new RequestHandler(opts, callback)
this.dispatch(opts, handler)
} catch (err) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}
module.exports = request
module.exports.RequestHandler = RequestHandler

209
node_modules/undici/lib/api/api-stream.js generated vendored Normal file
View File

@@ -0,0 +1,209 @@
'use strict'
const assert = require('node:assert')
const { finished } = require('node:stream')
const { AsyncResource } = require('node:async_hooks')
const { InvalidArgumentError, InvalidReturnValueError } = require('../core/errors')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
function noop () {}
class StreamHandler extends AsyncResource {
constructor (opts, factory, callback) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
const { signal, method, opaque, body, onInfo, responseHeaders } = opts
try {
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (typeof factory !== 'function') {
throw new InvalidArgumentError('invalid factory')
}
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
if (method === 'CONNECT') {
throw new InvalidArgumentError('invalid method')
}
if (onInfo && typeof onInfo !== 'function') {
throw new InvalidArgumentError('invalid onInfo callback')
}
super('UNDICI_STREAM')
} catch (err) {
if (util.isStream(body)) {
util.destroy(body.on('error', noop), err)
}
throw err
}
this.responseHeaders = responseHeaders || null
this.opaque = opaque || null
this.factory = factory
this.callback = callback
this.res = null
this.abort = null
this.context = null
this.trailers = null
this.body = body
this.onInfo = onInfo || null
if (util.isStream(body)) {
body.on('error', (err) => {
this.onError(err)
})
}
addSignal(this, signal)
}
onConnect (abort, context) {
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = context
}
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
const { factory, opaque, context, responseHeaders } = this
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
if (statusCode < 200) {
if (this.onInfo) {
this.onInfo({ statusCode, headers })
}
return
}
this.factory = null
if (factory === null) {
return
}
const res = this.runInAsyncScope(factory, null, {
statusCode,
headers,
opaque,
context
})
if (
!res ||
typeof res.write !== 'function' ||
typeof res.end !== 'function' ||
typeof res.on !== 'function'
) {
throw new InvalidReturnValueError('expected Writable')
}
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
finished(res, { readable: false }, (err) => {
const { callback, res, opaque, trailers, abort } = this
this.res = null
if (err || !res?.readable) {
util.destroy(res, err)
}
this.callback = null
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
if (err) {
abort()
}
})
res.on('drain', resume)
this.res = res
const needDrain = res.writableNeedDrain !== undefined
? res.writableNeedDrain
: res._writableState?.needDrain
return needDrain !== true
}
onData (chunk) {
const { res } = this
return res ? res.write(chunk) : true
}
onComplete (trailers) {
const { res } = this
removeSignal(this)
if (!res) {
return
}
this.trailers = util.parseHeaders(trailers)
res.end()
}
onError (err) {
const { res, callback, opaque, body } = this
removeSignal(this)
this.factory = null
if (res) {
this.res = null
util.destroy(res, err)
} else if (callback) {
this.callback = null
queueMicrotask(() => {
this.runInAsyncScope(callback, null, err, { opaque })
})
}
if (body) {
this.body = null
util.destroy(body, err)
}
}
}
function stream (opts, factory, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
stream.call(this, opts, factory, (err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
try {
const handler = new StreamHandler(opts, factory, callback)
this.dispatch(opts, handler)
} catch (err) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}
module.exports = stream

110
node_modules/undici/lib/api/api-upgrade.js generated vendored Normal file
View File

@@ -0,0 +1,110 @@
'use strict'
const { InvalidArgumentError, SocketError } = require('../core/errors')
const { AsyncResource } = require('node:async_hooks')
const assert = require('node:assert')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
class UpgradeHandler extends AsyncResource {
constructor (opts, callback) {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('invalid opts')
}
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
const { signal, opaque, responseHeaders } = opts
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
super('UNDICI_UPGRADE')
this.responseHeaders = responseHeaders || null
this.opaque = opaque || null
this.callback = callback
this.abort = null
this.context = null
addSignal(this, signal)
}
onConnect (abort, context) {
if (this.reason) {
abort(this.reason)
return
}
assert(this.callback)
this.abort = abort
this.context = null
}
onHeaders () {
throw new SocketError('bad upgrade', null)
}
onUpgrade (statusCode, rawHeaders, socket) {
assert(statusCode === 101)
const { callback, opaque, context } = this
removeSignal(this)
this.callback = null
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
this.runInAsyncScope(callback, null, null, {
headers,
socket,
opaque,
context
})
}
onError (err) {
const { callback, opaque } = this
removeSignal(this)
if (callback) {
this.callback = null
queueMicrotask(() => {
this.runInAsyncScope(callback, null, err, { opaque })
})
}
}
}
function upgrade (opts, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
upgrade.call(this, opts, (err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
try {
const upgradeHandler = new UpgradeHandler(opts, callback)
const upgradeOpts = {
...opts,
method: opts.method || 'GET',
upgrade: opts.protocol || 'Websocket'
}
this.dispatch(upgradeOpts, upgradeHandler)
} catch (err) {
if (typeof callback !== 'function') {
throw err
}
const opaque = opts?.opaque
queueMicrotask(() => callback(err, { opaque }))
}
}
module.exports = upgrade

7
node_modules/undici/lib/api/index.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
'use strict'
module.exports.request = require('./api-request')
module.exports.stream = require('./api-stream')
module.exports.pipeline = require('./api-pipeline')
module.exports.upgrade = require('./api-upgrade')
module.exports.connect = require('./api-connect')

580
node_modules/undici/lib/api/readable.js generated vendored Normal file
View File

@@ -0,0 +1,580 @@
'use strict'
const assert = require('node:assert')
const { Readable } = require('node:stream')
const { RequestAbortedError, NotSupportedError, InvalidArgumentError, AbortError } = require('../core/errors')
const util = require('../core/util')
const { ReadableStreamFrom } = require('../core/util')
const kConsume = Symbol('kConsume')
const kReading = Symbol('kReading')
const kBody = Symbol('kBody')
const kAbort = Symbol('kAbort')
const kContentType = Symbol('kContentType')
const kContentLength = Symbol('kContentLength')
const kUsed = Symbol('kUsed')
const kBytesRead = Symbol('kBytesRead')
const noop = () => {}
/**
* @class
* @extends {Readable}
* @see https://fetch.spec.whatwg.org/#body
*/
class BodyReadable extends Readable {
/**
* @param {object} opts
* @param {(this: Readable, size: number) => void} opts.resume
* @param {() => (void | null)} opts.abort
* @param {string} [opts.contentType = '']
* @param {number} [opts.contentLength]
* @param {number} [opts.highWaterMark = 64 * 1024]
*/
constructor ({
resume,
abort,
contentType = '',
contentLength,
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
}) {
super({
autoDestroy: true,
read: resume,
highWaterMark
})
this._readableState.dataEmitted = false
this[kAbort] = abort
/** @type {Consume | null} */
this[kConsume] = null
/** @type {number} */
this[kBytesRead] = 0
/** @type {ReadableStream|null} */
this[kBody] = null
/** @type {boolean} */
this[kUsed] = false
/** @type {string} */
this[kContentType] = contentType
/** @type {number|null} */
this[kContentLength] = Number.isFinite(contentLength) ? contentLength : null
/**
* Is stream being consumed through Readable API?
* This is an optimization so that we avoid checking
* for 'data' and 'readable' listeners in the hot path
* inside push().
*
* @type {boolean}
*/
this[kReading] = false
}
/**
* @param {Error|null} err
* @param {(error:(Error|null)) => void} callback
* @returns {void}
*/
_destroy (err, callback) {
if (!err && !this._readableState.endEmitted) {
err = new RequestAbortedError()
}
if (err) {
this[kAbort]()
}
// Workaround for Node "bug". If the stream is destroyed in same
// tick as it is created, then a user who is waiting for a
// promise (i.e micro tick) for installing an 'error' listener will
// never get a chance and will always encounter an unhandled exception.
if (!this[kUsed]) {
setImmediate(callback, err)
} else {
callback(err)
}
}
/**
* @param {string|symbol} event
* @param {(...args: any[]) => void} listener
* @returns {this}
*/
on (event, listener) {
if (event === 'data' || event === 'readable') {
this[kReading] = true
this[kUsed] = true
}
return super.on(event, listener)
}
/**
* @param {string|symbol} event
* @param {(...args: any[]) => void} listener
* @returns {this}
*/
addListener (event, listener) {
return this.on(event, listener)
}
/**
* @param {string|symbol} event
* @param {(...args: any[]) => void} listener
* @returns {this}
*/
off (event, listener) {
const ret = super.off(event, listener)
if (event === 'data' || event === 'readable') {
this[kReading] = (
this.listenerCount('data') > 0 ||
this.listenerCount('readable') > 0
)
}
return ret
}
/**
* @param {string|symbol} event
* @param {(...args: any[]) => void} listener
* @returns {this}
*/
removeListener (event, listener) {
return this.off(event, listener)
}
/**
* @param {Buffer|null} chunk
* @returns {boolean}
*/
push (chunk) {
if (chunk) {
this[kBytesRead] += chunk.length
if (this[kConsume]) {
consumePush(this[kConsume], chunk)
return this[kReading] ? super.push(chunk) : true
}
}
return super.push(chunk)
}
/**
* Consumes and returns the body as a string.
*
* @see https://fetch.spec.whatwg.org/#dom-body-text
* @returns {Promise<string>}
*/
text () {
return consume(this, 'text')
}
/**
* Consumes and returns the body as a JavaScript Object.
*
* @see https://fetch.spec.whatwg.org/#dom-body-json
* @returns {Promise<unknown>}
*/
json () {
return consume(this, 'json')
}
/**
* Consumes and returns the body as a Blob
*
* @see https://fetch.spec.whatwg.org/#dom-body-blob
* @returns {Promise<Blob>}
*/
blob () {
return consume(this, 'blob')
}
/**
* Consumes and returns the body as an Uint8Array.
*
* @see https://fetch.spec.whatwg.org/#dom-body-bytes
* @returns {Promise<Uint8Array>}
*/
bytes () {
return consume(this, 'bytes')
}
/**
* Consumes and returns the body as an ArrayBuffer.
*
* @see https://fetch.spec.whatwg.org/#dom-body-arraybuffer
* @returns {Promise<ArrayBuffer>}
*/
arrayBuffer () {
return consume(this, 'arrayBuffer')
}
/**
* Not implemented
*
* @see https://fetch.spec.whatwg.org/#dom-body-formdata
* @throws {NotSupportedError}
*/
async formData () {
// TODO: Implement.
throw new NotSupportedError()
}
/**
* Returns true if the body is not null and the body has been consumed.
* Otherwise, returns false.
*
* @see https://fetch.spec.whatwg.org/#dom-body-bodyused
* @readonly
* @returns {boolean}
*/
get bodyUsed () {
return util.isDisturbed(this)
}
/**
* @see https://fetch.spec.whatwg.org/#dom-body-body
* @readonly
* @returns {ReadableStream}
*/
get body () {
if (!this[kBody]) {
this[kBody] = ReadableStreamFrom(this)
if (this[kConsume]) {
// TODO: Is this the best way to force a lock?
this[kBody].getReader() // Ensure stream is locked.
assert(this[kBody].locked)
}
}
return this[kBody]
}
/**
* Dumps the response body by reading `limit` number of bytes.
* @param {object} opts
* @param {number} [opts.limit = 131072] Number of bytes to read.
* @param {AbortSignal} [opts.signal] An AbortSignal to cancel the dump.
* @returns {Promise<null>}
*/
dump (opts) {
const signal = opts?.signal
if (signal != null && (typeof signal !== 'object' || !('aborted' in signal))) {
return Promise.reject(new InvalidArgumentError('signal must be an AbortSignal'))
}
const limit = opts?.limit && Number.isFinite(opts.limit)
? opts.limit
: 128 * 1024
if (signal?.aborted) {
return Promise.reject(signal.reason ?? new AbortError())
}
if (this._readableState.closeEmitted) {
return Promise.resolve(null)
}
return new Promise((resolve, reject) => {
if (
(this[kContentLength] && (this[kContentLength] > limit)) ||
this[kBytesRead] > limit
) {
this.destroy(new AbortError())
}
if (signal) {
const onAbort = () => {
this.destroy(signal.reason ?? new AbortError())
}
signal.addEventListener('abort', onAbort)
this
.on('close', function () {
signal.removeEventListener('abort', onAbort)
if (signal.aborted) {
reject(signal.reason ?? new AbortError())
} else {
resolve(null)
}
})
} else {
this.on('close', resolve)
}
this
.on('error', noop)
.on('data', () => {
if (this[kBytesRead] > limit) {
this.destroy()
}
})
.resume()
})
}
/**
* @param {BufferEncoding} encoding
* @returns {this}
*/
setEncoding (encoding) {
if (Buffer.isEncoding(encoding)) {
this._readableState.encoding = encoding
}
return this
}
}
/**
* @see https://streams.spec.whatwg.org/#readablestream-locked
* @param {BodyReadable} bodyReadable
* @returns {boolean}
*/
function isLocked (bodyReadable) {
// Consume is an implicit lock.
return bodyReadable[kBody]?.locked === true || bodyReadable[kConsume] !== null
}
/**
* @see https://fetch.spec.whatwg.org/#body-unusable
* @param {BodyReadable} bodyReadable
* @returns {boolean}
*/
function isUnusable (bodyReadable) {
return util.isDisturbed(bodyReadable) || isLocked(bodyReadable)
}
/**
* @typedef {'text' | 'json' | 'blob' | 'bytes' | 'arrayBuffer'} ConsumeType
*/
/**
* @template {ConsumeType} T
* @typedef {T extends 'text' ? string :
* T extends 'json' ? unknown :
* T extends 'blob' ? Blob :
* T extends 'arrayBuffer' ? ArrayBuffer :
* T extends 'bytes' ? Uint8Array :
* never
* } ConsumeReturnType
*/
/**
* @typedef {object} Consume
* @property {ConsumeType} type
* @property {BodyReadable} stream
* @property {((value?: any) => void)} resolve
* @property {((err: Error) => void)} reject
* @property {number} length
* @property {Buffer[]} body
*/
/**
* @template {ConsumeType} T
* @param {BodyReadable} stream
* @param {T} type
* @returns {Promise<ConsumeReturnType<T>>}
*/
function consume (stream, type) {
assert(!stream[kConsume])
return new Promise((resolve, reject) => {
if (isUnusable(stream)) {
const rState = stream._readableState
if (rState.destroyed && rState.closeEmitted === false) {
stream
.on('error', reject)
.on('close', () => {
reject(new TypeError('unusable'))
})
} else {
reject(rState.errored ?? new TypeError('unusable'))
}
} else {
queueMicrotask(() => {
stream[kConsume] = {
type,
stream,
resolve,
reject,
length: 0,
body: []
}
stream
.on('error', function (err) {
consumeFinish(this[kConsume], err)
})
.on('close', function () {
if (this[kConsume].body !== null) {
consumeFinish(this[kConsume], new RequestAbortedError())
}
})
consumeStart(stream[kConsume])
})
}
})
}
/**
* @param {Consume} consume
* @returns {void}
*/
function consumeStart (consume) {
if (consume.body === null) {
return
}
const { _readableState: state } = consume.stream
if (state.bufferIndex) {
const start = state.bufferIndex
const end = state.buffer.length
for (let n = start; n < end; n++) {
consumePush(consume, state.buffer[n])
}
} else {
for (const chunk of state.buffer) {
consumePush(consume, chunk)
}
}
if (state.endEmitted) {
consumeEnd(this[kConsume], this._readableState.encoding)
} else {
consume.stream.on('end', function () {
consumeEnd(this[kConsume], this._readableState.encoding)
})
}
consume.stream.resume()
while (consume.stream.read() != null) {
// Loop
}
}
/**
* @param {Buffer[]} chunks
* @param {number} length
* @param {BufferEncoding} [encoding='utf8']
* @returns {string}
*/
function chunksDecode (chunks, length, encoding) {
if (chunks.length === 0 || length === 0) {
return ''
}
const buffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks, length)
const bufferLength = buffer.length
// Skip BOM.
const start =
bufferLength > 2 &&
buffer[0] === 0xef &&
buffer[1] === 0xbb &&
buffer[2] === 0xbf
? 3
: 0
if (!encoding || encoding === 'utf8' || encoding === 'utf-8') {
return buffer.utf8Slice(start, bufferLength)
} else {
return buffer.subarray(start, bufferLength).toString(encoding)
}
}
/**
* @param {Buffer[]} chunks
* @param {number} length
* @returns {Uint8Array}
*/
function chunksConcat (chunks, length) {
if (chunks.length === 0 || length === 0) {
return new Uint8Array(0)
}
if (chunks.length === 1) {
// fast-path
return new Uint8Array(chunks[0])
}
const buffer = new Uint8Array(Buffer.allocUnsafeSlow(length).buffer)
let offset = 0
for (let i = 0; i < chunks.length; ++i) {
const chunk = chunks[i]
buffer.set(chunk, offset)
offset += chunk.length
}
return buffer
}
/**
* @param {Consume} consume
* @param {BufferEncoding} encoding
* @returns {void}
*/
function consumeEnd (consume, encoding) {
const { type, body, resolve, stream, length } = consume
try {
if (type === 'text') {
resolve(chunksDecode(body, length, encoding))
} else if (type === 'json') {
resolve(JSON.parse(chunksDecode(body, length, encoding)))
} else if (type === 'arrayBuffer') {
resolve(chunksConcat(body, length).buffer)
} else if (type === 'blob') {
resolve(new Blob(body, { type: stream[kContentType] }))
} else if (type === 'bytes') {
resolve(chunksConcat(body, length))
}
consumeFinish(consume)
} catch (err) {
stream.destroy(err)
}
}
/**
* @param {Consume} consume
* @param {Buffer} chunk
* @returns {void}
*/
function consumePush (consume, chunk) {
consume.length += chunk.length
consume.body.push(chunk)
}
/**
* @param {Consume} consume
* @param {Error} [err]
* @returns {void}
*/
function consumeFinish (consume, err) {
if (consume.body === null) {
return
}
if (err) {
consume.reject(err)
} else {
consume.resolve()
}
// Reset the consume object to allow for garbage collection.
consume.type = null
consume.stream = null
consume.resolve = null
consume.reject = null
consume.length = 0
consume.body = null
}
module.exports = {
Readable: BodyReadable,
chunksDecode
}

234
node_modules/undici/lib/cache/memory-cache-store.js generated vendored Normal file
View File

@@ -0,0 +1,234 @@
'use strict'
const { Writable } = require('node:stream')
const { EventEmitter } = require('node:events')
const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
/**
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheKey} CacheKey
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheValue} CacheValue
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
* @typedef {import('../../types/cache-interceptor.d.ts').default.GetResult} GetResult
*/
/**
* @implements {CacheStore}
* @extends {EventEmitter}
*/
class MemoryCacheStore extends EventEmitter {
#maxCount = 1024
#maxSize = 104857600 // 100MB
#maxEntrySize = 5242880 // 5MB
#size = 0
#count = 0
#entries = new Map()
#hasEmittedMaxSizeEvent = false
/**
* @param {import('../../types/cache-interceptor.d.ts').default.MemoryCacheStoreOpts | undefined} [opts]
*/
constructor (opts) {
super()
if (opts) {
if (typeof opts !== 'object') {
throw new TypeError('MemoryCacheStore options must be an object')
}
if (opts.maxCount !== undefined) {
if (
typeof opts.maxCount !== 'number' ||
!Number.isInteger(opts.maxCount) ||
opts.maxCount < 0
) {
throw new TypeError('MemoryCacheStore options.maxCount must be a non-negative integer')
}
this.#maxCount = opts.maxCount
}
if (opts.maxSize !== undefined) {
if (
typeof opts.maxSize !== 'number' ||
!Number.isInteger(opts.maxSize) ||
opts.maxSize < 0
) {
throw new TypeError('MemoryCacheStore options.maxSize must be a non-negative integer')
}
this.#maxSize = opts.maxSize
}
if (opts.maxEntrySize !== undefined) {
if (
typeof opts.maxEntrySize !== 'number' ||
!Number.isInteger(opts.maxEntrySize) ||
opts.maxEntrySize < 0
) {
throw new TypeError('MemoryCacheStore options.maxEntrySize must be a non-negative integer')
}
this.#maxEntrySize = opts.maxEntrySize
}
}
}
/**
* Get the current size of the cache in bytes
* @returns {number} The current size of the cache in bytes
*/
get size () {
return this.#size
}
/**
* Check if the cache is full (either max size or max count reached)
* @returns {boolean} True if the cache is full, false otherwise
*/
isFull () {
return this.#size >= this.#maxSize || this.#count >= this.#maxCount
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} req
* @returns {import('../../types/cache-interceptor.d.ts').default.GetResult | undefined}
*/
get (key) {
assertCacheKey(key)
const topLevelKey = `${key.origin}:${key.path}`
const now = Date.now()
const entries = this.#entries.get(topLevelKey)
const entry = entries ? findEntry(key, entries, now) : null
return entry == null
? undefined
: {
statusMessage: entry.statusMessage,
statusCode: entry.statusCode,
headers: entry.headers,
body: entry.body,
vary: entry.vary ? entry.vary : undefined,
etag: entry.etag,
cacheControlDirectives: entry.cacheControlDirectives,
cachedAt: entry.cachedAt,
staleAt: entry.staleAt,
deleteAt: entry.deleteAt
}
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue} val
* @returns {Writable | undefined}
*/
createWriteStream (key, val) {
assertCacheKey(key)
assertCacheValue(val)
const topLevelKey = `${key.origin}:${key.path}`
const store = this
const entry = { ...key, ...val, body: [], size: 0 }
return new Writable({
write (chunk, encoding, callback) {
if (typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding)
}
entry.size += chunk.byteLength
if (entry.size >= store.#maxEntrySize) {
this.destroy()
} else {
entry.body.push(chunk)
}
callback(null)
},
final (callback) {
let entries = store.#entries.get(topLevelKey)
if (!entries) {
entries = []
store.#entries.set(topLevelKey, entries)
}
const previousEntry = findEntry(key, entries, Date.now())
if (previousEntry) {
const index = entries.indexOf(previousEntry)
entries.splice(index, 1, entry)
store.#size -= previousEntry.size
} else {
entries.push(entry)
store.#count += 1
}
store.#size += entry.size
// Check if cache is full and emit event if needed
if (store.#size > store.#maxSize || store.#count > store.#maxCount) {
// Emit maxSizeExceeded event if we haven't already
if (!store.#hasEmittedMaxSizeEvent) {
store.emit('maxSizeExceeded', {
size: store.#size,
maxSize: store.#maxSize,
count: store.#count,
maxCount: store.#maxCount
})
store.#hasEmittedMaxSizeEvent = true
}
// Perform eviction
for (const [key, entries] of store.#entries) {
for (const entry of entries.splice(0, entries.length / 2)) {
store.#size -= entry.size
store.#count -= 1
}
if (entries.length === 0) {
store.#entries.delete(key)
}
}
// Reset the event flag after eviction
if (store.#size < store.#maxSize && store.#count < store.#maxCount) {
store.#hasEmittedMaxSizeEvent = false
}
}
callback(null)
}
})
}
/**
* @param {CacheKey} key
*/
delete (key) {
if (typeof key !== 'object') {
throw new TypeError(`expected key to be object, got ${typeof key}`)
}
const topLevelKey = `${key.origin}:${key.path}`
for (const entry of this.#entries.get(topLevelKey) ?? []) {
this.#size -= entry.size
this.#count -= 1
}
this.#entries.delete(topLevelKey)
}
}
function findEntry (key, entries, now) {
return entries.find((entry) => (
entry.deleteAt > now &&
entry.method === key.method &&
(entry.vary == null || Object.keys(entry.vary).every(headerName => {
if (entry.vary[headerName] === null) {
return key.headers[headerName] === undefined
}
return entry.vary[headerName] === key.headers[headerName]
}))
))
}
module.exports = MemoryCacheStore

461
node_modules/undici/lib/cache/sqlite-cache-store.js generated vendored Normal file
View File

@@ -0,0 +1,461 @@
'use strict'
const { Writable } = require('node:stream')
const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
let DatabaseSync
const VERSION = 3
// 2gb
const MAX_ENTRY_SIZE = 2 * 1000 * 1000 * 1000
/**
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
* @implements {CacheStore}
*
* @typedef {{
* id: Readonly<number>,
* body?: Uint8Array
* statusCode: number
* statusMessage: string
* headers?: string
* vary?: string
* etag?: string
* cacheControlDirectives?: string
* cachedAt: number
* staleAt: number
* deleteAt: number
* }} SqliteStoreValue
*/
module.exports = class SqliteCacheStore {
#maxEntrySize = MAX_ENTRY_SIZE
#maxCount = Infinity
/**
* @type {import('node:sqlite').DatabaseSync}
*/
#db
/**
* @type {import('node:sqlite').StatementSync}
*/
#getValuesQuery
/**
* @type {import('node:sqlite').StatementSync}
*/
#updateValueQuery
/**
* @type {import('node:sqlite').StatementSync}
*/
#insertValueQuery
/**
* @type {import('node:sqlite').StatementSync}
*/
#deleteExpiredValuesQuery
/**
* @type {import('node:sqlite').StatementSync}
*/
#deleteByUrlQuery
/**
* @type {import('node:sqlite').StatementSync}
*/
#countEntriesQuery
/**
* @type {import('node:sqlite').StatementSync | null}
*/
#deleteOldValuesQuery
/**
* @param {import('../../types/cache-interceptor.d.ts').default.SqliteCacheStoreOpts | undefined} opts
*/
constructor (opts) {
if (opts) {
if (typeof opts !== 'object') {
throw new TypeError('SqliteCacheStore options must be an object')
}
if (opts.maxEntrySize !== undefined) {
if (
typeof opts.maxEntrySize !== 'number' ||
!Number.isInteger(opts.maxEntrySize) ||
opts.maxEntrySize < 0
) {
throw new TypeError('SqliteCacheStore options.maxEntrySize must be a non-negative integer')
}
if (opts.maxEntrySize > MAX_ENTRY_SIZE) {
throw new TypeError('SqliteCacheStore options.maxEntrySize must be less than 2gb')
}
this.#maxEntrySize = opts.maxEntrySize
}
if (opts.maxCount !== undefined) {
if (
typeof opts.maxCount !== 'number' ||
!Number.isInteger(opts.maxCount) ||
opts.maxCount < 0
) {
throw new TypeError('SqliteCacheStore options.maxCount must be a non-negative integer')
}
this.#maxCount = opts.maxCount
}
}
if (!DatabaseSync) {
DatabaseSync = require('node:sqlite').DatabaseSync
}
this.#db = new DatabaseSync(opts?.location ?? ':memory:')
this.#db.exec(`
PRAGMA journal_mode = WAL;
PRAGMA synchronous = NORMAL;
PRAGMA temp_store = memory;
PRAGMA optimize;
CREATE TABLE IF NOT EXISTS cacheInterceptorV${VERSION} (
-- Data specific to us
id INTEGER PRIMARY KEY AUTOINCREMENT,
url TEXT NOT NULL,
method TEXT NOT NULL,
-- Data returned to the interceptor
body BUF NULL,
deleteAt INTEGER NOT NULL,
statusCode INTEGER NOT NULL,
statusMessage TEXT NOT NULL,
headers TEXT NULL,
cacheControlDirectives TEXT NULL,
etag TEXT NULL,
vary TEXT NULL,
cachedAt INTEGER NOT NULL,
staleAt INTEGER NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_getValuesQuery ON cacheInterceptorV${VERSION}(url, method, deleteAt);
CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_deleteByUrlQuery ON cacheInterceptorV${VERSION}(deleteAt);
`)
this.#getValuesQuery = this.#db.prepare(`
SELECT
id,
body,
deleteAt,
statusCode,
statusMessage,
headers,
etag,
cacheControlDirectives,
vary,
cachedAt,
staleAt
FROM cacheInterceptorV${VERSION}
WHERE
url = ?
AND method = ?
ORDER BY
deleteAt ASC
`)
this.#updateValueQuery = this.#db.prepare(`
UPDATE cacheInterceptorV${VERSION} SET
body = ?,
deleteAt = ?,
statusCode = ?,
statusMessage = ?,
headers = ?,
etag = ?,
cacheControlDirectives = ?,
cachedAt = ?,
staleAt = ?
WHERE
id = ?
`)
this.#insertValueQuery = this.#db.prepare(`
INSERT INTO cacheInterceptorV${VERSION} (
url,
method,
body,
deleteAt,
statusCode,
statusMessage,
headers,
etag,
cacheControlDirectives,
vary,
cachedAt,
staleAt
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`)
this.#deleteByUrlQuery = this.#db.prepare(
`DELETE FROM cacheInterceptorV${VERSION} WHERE url = ?`
)
this.#countEntriesQuery = this.#db.prepare(
`SELECT COUNT(*) AS total FROM cacheInterceptorV${VERSION}`
)
this.#deleteExpiredValuesQuery = this.#db.prepare(
`DELETE FROM cacheInterceptorV${VERSION} WHERE deleteAt <= ?`
)
this.#deleteOldValuesQuery = this.#maxCount === Infinity
? null
: this.#db.prepare(`
DELETE FROM cacheInterceptorV${VERSION}
WHERE id IN (
SELECT
id
FROM cacheInterceptorV${VERSION}
ORDER BY cachedAt DESC
LIMIT ?
)
`)
}
close () {
this.#db.close()
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
* @returns {(import('../../types/cache-interceptor.d.ts').default.GetResult & { body?: Buffer }) | undefined}
*/
get (key) {
assertCacheKey(key)
const value = this.#findValue(key)
return value
? {
body: value.body ? Buffer.from(value.body.buffer, value.body.byteOffset, value.body.byteLength) : undefined,
statusCode: value.statusCode,
statusMessage: value.statusMessage,
headers: value.headers ? JSON.parse(value.headers) : undefined,
etag: value.etag ? value.etag : undefined,
vary: value.vary ? JSON.parse(value.vary) : undefined,
cacheControlDirectives: value.cacheControlDirectives
? JSON.parse(value.cacheControlDirectives)
: undefined,
cachedAt: value.cachedAt,
staleAt: value.staleAt,
deleteAt: value.deleteAt
}
: undefined
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue & { body: null | Buffer | Array<Buffer>}} value
*/
set (key, value) {
assertCacheKey(key)
const url = this.#makeValueUrl(key)
const body = Array.isArray(value.body) ? Buffer.concat(value.body) : value.body
const size = body?.byteLength
if (size && size > this.#maxEntrySize) {
return
}
const existingValue = this.#findValue(key, true)
if (existingValue) {
// Updating an existing response, let's overwrite it
this.#updateValueQuery.run(
body,
value.deleteAt,
value.statusCode,
value.statusMessage,
value.headers ? JSON.stringify(value.headers) : null,
value.etag ? value.etag : null,
value.cacheControlDirectives ? JSON.stringify(value.cacheControlDirectives) : null,
value.cachedAt,
value.staleAt,
existingValue.id
)
} else {
this.#prune()
// New response, let's insert it
this.#insertValueQuery.run(
url,
key.method,
body,
value.deleteAt,
value.statusCode,
value.statusMessage,
value.headers ? JSON.stringify(value.headers) : null,
value.etag ? value.etag : null,
value.cacheControlDirectives ? JSON.stringify(value.cacheControlDirectives) : null,
value.vary ? JSON.stringify(value.vary) : null,
value.cachedAt,
value.staleAt
)
}
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
* @param {import('../../types/cache-interceptor.d.ts').default.CacheValue} value
* @returns {Writable | undefined}
*/
createWriteStream (key, value) {
assertCacheKey(key)
assertCacheValue(value)
let size = 0
/**
* @type {Buffer[] | null}
*/
const body = []
const store = this
return new Writable({
decodeStrings: true,
write (chunk, encoding, callback) {
size += chunk.byteLength
if (size < store.#maxEntrySize) {
body.push(chunk)
} else {
this.destroy()
}
callback()
},
final (callback) {
store.set(key, { ...value, body })
callback()
}
})
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
*/
delete (key) {
if (typeof key !== 'object') {
throw new TypeError(`expected key to be object, got ${typeof key}`)
}
this.#deleteByUrlQuery.run(this.#makeValueUrl(key))
}
#prune () {
if (Number.isFinite(this.#maxCount) && this.size <= this.#maxCount) {
return 0
}
{
const removed = this.#deleteExpiredValuesQuery.run(Date.now()).changes
if (removed) {
return removed
}
}
{
const removed = this.#deleteOldValuesQuery?.run(Math.max(Math.floor(this.#maxCount * 0.1), 1)).changes
if (removed) {
return removed
}
}
return 0
}
/**
* Counts the number of rows in the cache
* @returns {Number}
*/
get size () {
const { total } = this.#countEntriesQuery.get()
return total
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
* @returns {string}
*/
#makeValueUrl (key) {
return `${key.origin}/${key.path}`
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
* @param {boolean} [canBeExpired=false]
* @returns {SqliteStoreValue | undefined}
*/
#findValue (key, canBeExpired = false) {
const url = this.#makeValueUrl(key)
const { headers, method } = key
/**
* @type {SqliteStoreValue[]}
*/
const values = this.#getValuesQuery.all(url, method)
if (values.length === 0) {
return undefined
}
const now = Date.now()
for (const value of values) {
if (now >= value.deleteAt && !canBeExpired) {
return undefined
}
let matches = true
if (value.vary) {
const vary = JSON.parse(value.vary)
for (const header in vary) {
if (!headerValueEquals(headers[header], vary[header])) {
matches = false
break
}
}
}
if (matches) {
return value
}
}
return undefined
}
}
/**
* @param {string|string[]|null|undefined} lhs
* @param {string|string[]|null|undefined} rhs
* @returns {boolean}
*/
function headerValueEquals (lhs, rhs) {
if (lhs == null && rhs == null) {
return true
}
if ((lhs == null && rhs != null) ||
(lhs != null && rhs == null)) {
return false
}
if (Array.isArray(lhs) && Array.isArray(rhs)) {
if (lhs.length !== rhs.length) {
return false
}
return lhs.every((x, i) => x === rhs[i])
}
return lhs === rhs
}

134
node_modules/undici/lib/core/connect.js generated vendored Normal file
View File

@@ -0,0 +1,134 @@
'use strict'
const net = require('node:net')
const assert = require('node:assert')
const util = require('./util')
const { InvalidArgumentError } = require('./errors')
let tls // include tls conditionally since it is not always available
// TODO: session re-use does not wait for the first
// connection to resolve the session and might therefore
// resolve the same servername multiple times even when
// re-use is enabled.
const SessionCache = class WeakSessionCache {
constructor (maxCachedSessions) {
this._maxCachedSessions = maxCachedSessions
this._sessionCache = new Map()
this._sessionRegistry = new FinalizationRegistry((key) => {
if (this._sessionCache.size < this._maxCachedSessions) {
return
}
const ref = this._sessionCache.get(key)
if (ref !== undefined && ref.deref() === undefined) {
this._sessionCache.delete(key)
}
})
}
get (sessionKey) {
const ref = this._sessionCache.get(sessionKey)
return ref ? ref.deref() : null
}
set (sessionKey, session) {
if (this._maxCachedSessions === 0) {
return
}
this._sessionCache.set(sessionKey, new WeakRef(session))
this._sessionRegistry.register(session, sessionKey)
}
}
function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, session: customSession, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
}
const options = { path: socketPath, ...opts }
const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
timeout = timeout == null ? 10e3 : timeout
allowH2 = allowH2 != null ? allowH2 : false
return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
let socket
if (protocol === 'https:') {
if (!tls) {
tls = require('node:tls')
}
servername = servername || options.servername || util.getServerName(host) || null
const sessionKey = servername || hostname
assert(sessionKey)
const session = customSession || sessionCache.get(sessionKey) || null
port = port || 443
socket = tls.connect({
highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
...options,
servername,
session,
localAddress,
ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
socket: httpSocket, // upgrade socket connection
port,
host: hostname
})
socket
.on('session', function (session) {
// TODO (fix): Can a session become invalid once established? Don't think so?
sessionCache.set(sessionKey, session)
})
} else {
assert(!httpSocket, 'httpSocket can only be sent on TLS update')
port = port || 80
socket = net.connect({
highWaterMark: 64 * 1024, // Same as nodejs fs streams.
...options,
localAddress,
port,
host: hostname
})
}
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
if (options.keepAlive == null || options.keepAlive) {
const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
socket.setKeepAlive(true, keepAliveInitialDelay)
}
const clearConnectTimeout = util.setupConnectTimeout(new WeakRef(socket), { timeout, hostname, port })
socket
.setNoDelay(true)
.once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
queueMicrotask(clearConnectTimeout)
if (callback) {
const cb = callback
callback = null
cb(null, this)
}
})
.on('error', function (err) {
queueMicrotask(clearConnectTimeout)
if (callback) {
const cb = callback
callback = null
cb(err)
}
})
return socket
}
}
module.exports = buildConnector

143
node_modules/undici/lib/core/constants.js generated vendored Normal file
View File

@@ -0,0 +1,143 @@
'use strict'
/**
* @see https://developer.mozilla.org/docs/Web/HTTP/Headers
*/
const wellknownHeaderNames = /** @type {const} */ ([
'Accept',
'Accept-Encoding',
'Accept-Language',
'Accept-Ranges',
'Access-Control-Allow-Credentials',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods',
'Access-Control-Allow-Origin',
'Access-Control-Expose-Headers',
'Access-Control-Max-Age',
'Access-Control-Request-Headers',
'Access-Control-Request-Method',
'Age',
'Allow',
'Alt-Svc',
'Alt-Used',
'Authorization',
'Cache-Control',
'Clear-Site-Data',
'Connection',
'Content-Disposition',
'Content-Encoding',
'Content-Language',
'Content-Length',
'Content-Location',
'Content-Range',
'Content-Security-Policy',
'Content-Security-Policy-Report-Only',
'Content-Type',
'Cookie',
'Cross-Origin-Embedder-Policy',
'Cross-Origin-Opener-Policy',
'Cross-Origin-Resource-Policy',
'Date',
'Device-Memory',
'Downlink',
'ECT',
'ETag',
'Expect',
'Expect-CT',
'Expires',
'Forwarded',
'From',
'Host',
'If-Match',
'If-Modified-Since',
'If-None-Match',
'If-Range',
'If-Unmodified-Since',
'Keep-Alive',
'Last-Modified',
'Link',
'Location',
'Max-Forwards',
'Origin',
'Permissions-Policy',
'Pragma',
'Proxy-Authenticate',
'Proxy-Authorization',
'RTT',
'Range',
'Referer',
'Referrer-Policy',
'Refresh',
'Retry-After',
'Sec-WebSocket-Accept',
'Sec-WebSocket-Extensions',
'Sec-WebSocket-Key',
'Sec-WebSocket-Protocol',
'Sec-WebSocket-Version',
'Server',
'Server-Timing',
'Service-Worker-Allowed',
'Service-Worker-Navigation-Preload',
'Set-Cookie',
'SourceMap',
'Strict-Transport-Security',
'Supports-Loading-Mode',
'TE',
'Timing-Allow-Origin',
'Trailer',
'Transfer-Encoding',
'Upgrade',
'Upgrade-Insecure-Requests',
'User-Agent',
'Vary',
'Via',
'WWW-Authenticate',
'X-Content-Type-Options',
'X-DNS-Prefetch-Control',
'X-Frame-Options',
'X-Permitted-Cross-Domain-Policies',
'X-Powered-By',
'X-Requested-With',
'X-XSS-Protection'
])
/** @type {Record<typeof wellknownHeaderNames[number]|Lowercase<typeof wellknownHeaderNames[number]>, string>} */
const headerNameLowerCasedRecord = {}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
/**
* @type {Record<Lowercase<typeof wellknownHeaderNames[number]>, Buffer>}
*/
const wellknownHeaderNameBuffers = {}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(wellknownHeaderNameBuffers, null)
/**
* @param {string} header Lowercased header
* @returns {Buffer}
*/
function getHeaderNameAsBuffer (header) {
let buffer = wellknownHeaderNameBuffers[header]
if (buffer === undefined) {
buffer = Buffer.from(header)
}
return buffer
}
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = wellknownHeaderNames[i]
const lowerCasedKey = key.toLowerCase()
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
lowerCasedKey
}
module.exports = {
wellknownHeaderNames,
headerNameLowerCasedRecord,
getHeaderNameAsBuffer
}

198
node_modules/undici/lib/core/diagnostics.js generated vendored Normal file
View File

@@ -0,0 +1,198 @@
'use strict'
const diagnosticsChannel = require('node:diagnostics_channel')
const util = require('node:util')
const undiciDebugLog = util.debuglog('undici')
const fetchDebuglog = util.debuglog('fetch')
const websocketDebuglog = util.debuglog('websocket')
const channels = {
// Client
beforeConnect: diagnosticsChannel.channel('undici:client:beforeConnect'),
connected: diagnosticsChannel.channel('undici:client:connected'),
connectError: diagnosticsChannel.channel('undici:client:connectError'),
sendHeaders: diagnosticsChannel.channel('undici:client:sendHeaders'),
// Request
create: diagnosticsChannel.channel('undici:request:create'),
bodySent: diagnosticsChannel.channel('undici:request:bodySent'),
bodyChunkSent: diagnosticsChannel.channel('undici:request:bodyChunkSent'),
bodyChunkReceived: diagnosticsChannel.channel('undici:request:bodyChunkReceived'),
headers: diagnosticsChannel.channel('undici:request:headers'),
trailers: diagnosticsChannel.channel('undici:request:trailers'),
error: diagnosticsChannel.channel('undici:request:error'),
// WebSocket
open: diagnosticsChannel.channel('undici:websocket:open'),
close: diagnosticsChannel.channel('undici:websocket:close'),
socketError: diagnosticsChannel.channel('undici:websocket:socket_error'),
ping: diagnosticsChannel.channel('undici:websocket:ping'),
pong: diagnosticsChannel.channel('undici:websocket:pong')
}
let isTrackingClientEvents = false
function trackClientEvents (debugLog = undiciDebugLog) {
if (isTrackingClientEvents) {
return
}
isTrackingClientEvents = true
diagnosticsChannel.subscribe('undici:client:beforeConnect',
evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debugLog(
'connecting to %s%s using %s%s',
host,
port ? `:${port}` : '',
protocol,
version
)
})
diagnosticsChannel.subscribe('undici:client:connected',
evt => {
const {
connectParams: { version, protocol, port, host }
} = evt
debugLog(
'connected to %s%s using %s%s',
host,
port ? `:${port}` : '',
protocol,
version
)
})
diagnosticsChannel.subscribe('undici:client:connectError',
evt => {
const {
connectParams: { version, protocol, port, host },
error
} = evt
debugLog(
'connection to %s%s using %s%s errored - %s',
host,
port ? `:${port}` : '',
protocol,
version,
error.message
)
})
diagnosticsChannel.subscribe('undici:client:sendHeaders',
evt => {
const {
request: { method, path, origin }
} = evt
debugLog('sending request to %s %s%s', method, origin, path)
})
}
let isTrackingRequestEvents = false
function trackRequestEvents (debugLog = undiciDebugLog) {
if (isTrackingRequestEvents) {
return
}
isTrackingRequestEvents = true
diagnosticsChannel.subscribe('undici:request:headers',
evt => {
const {
request: { method, path, origin },
response: { statusCode }
} = evt
debugLog(
'received response to %s %s%s - HTTP %d',
method,
origin,
path,
statusCode
)
})
diagnosticsChannel.subscribe('undici:request:trailers',
evt => {
const {
request: { method, path, origin }
} = evt
debugLog('trailers received from %s %s%s', method, origin, path)
})
diagnosticsChannel.subscribe('undici:request:error',
evt => {
const {
request: { method, path, origin },
error
} = evt
debugLog(
'request to %s %s%s errored - %s',
method,
origin,
path,
error.message
)
})
}
let isTrackingWebSocketEvents = false
function trackWebSocketEvents (debugLog = websocketDebuglog) {
if (isTrackingWebSocketEvents) {
return
}
isTrackingWebSocketEvents = true
diagnosticsChannel.subscribe('undici:websocket:open',
evt => {
const {
address: { address, port }
} = evt
debugLog('connection opened %s%s', address, port ? `:${port}` : '')
})
diagnosticsChannel.subscribe('undici:websocket:close',
evt => {
const { websocket, code, reason } = evt
debugLog(
'closed connection to %s - %s %s',
websocket.url,
code,
reason
)
})
diagnosticsChannel.subscribe('undici:websocket:socket_error',
err => {
debugLog('connection errored - %s', err.message)
})
diagnosticsChannel.subscribe('undici:websocket:ping',
evt => {
debugLog('ping received')
})
diagnosticsChannel.subscribe('undici:websocket:pong',
evt => {
debugLog('pong received')
})
}
if (undiciDebugLog.enabled || fetchDebuglog.enabled) {
trackClientEvents(fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog)
trackRequestEvents(fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog)
}
if (websocketDebuglog.enabled) {
trackClientEvents(undiciDebugLog.enabled ? undiciDebugLog : websocketDebuglog)
trackWebSocketEvents(websocketDebuglog)
}
module.exports = {
channels
}

448
node_modules/undici/lib/core/errors.js generated vendored Normal file
View File

@@ -0,0 +1,448 @@
'use strict'
const kUndiciError = Symbol.for('undici.error.UND_ERR')
class UndiciError extends Error {
constructor (message, options) {
super(message, options)
this.name = 'UndiciError'
this.code = 'UND_ERR'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kUndiciError] === true
}
get [kUndiciError] () {
return true
}
}
const kConnectTimeoutError = Symbol.for('undici.error.UND_ERR_CONNECT_TIMEOUT')
class ConnectTimeoutError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ConnectTimeoutError'
this.message = message || 'Connect Timeout Error'
this.code = 'UND_ERR_CONNECT_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kConnectTimeoutError] === true
}
get [kConnectTimeoutError] () {
return true
}
}
const kHeadersTimeoutError = Symbol.for('undici.error.UND_ERR_HEADERS_TIMEOUT')
class HeadersTimeoutError extends UndiciError {
constructor (message) {
super(message)
this.name = 'HeadersTimeoutError'
this.message = message || 'Headers Timeout Error'
this.code = 'UND_ERR_HEADERS_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHeadersTimeoutError] === true
}
get [kHeadersTimeoutError] () {
return true
}
}
const kHeadersOverflowError = Symbol.for('undici.error.UND_ERR_HEADERS_OVERFLOW')
class HeadersOverflowError extends UndiciError {
constructor (message) {
super(message)
this.name = 'HeadersOverflowError'
this.message = message || 'Headers Overflow Error'
this.code = 'UND_ERR_HEADERS_OVERFLOW'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHeadersOverflowError] === true
}
get [kHeadersOverflowError] () {
return true
}
}
const kBodyTimeoutError = Symbol.for('undici.error.UND_ERR_BODY_TIMEOUT')
class BodyTimeoutError extends UndiciError {
constructor (message) {
super(message)
this.name = 'BodyTimeoutError'
this.message = message || 'Body Timeout Error'
this.code = 'UND_ERR_BODY_TIMEOUT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kBodyTimeoutError] === true
}
get [kBodyTimeoutError] () {
return true
}
}
const kInvalidArgumentError = Symbol.for('undici.error.UND_ERR_INVALID_ARG')
class InvalidArgumentError extends UndiciError {
constructor (message) {
super(message)
this.name = 'InvalidArgumentError'
this.message = message || 'Invalid Argument Error'
this.code = 'UND_ERR_INVALID_ARG'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInvalidArgumentError] === true
}
get [kInvalidArgumentError] () {
return true
}
}
const kInvalidReturnValueError = Symbol.for('undici.error.UND_ERR_INVALID_RETURN_VALUE')
class InvalidReturnValueError extends UndiciError {
constructor (message) {
super(message)
this.name = 'InvalidReturnValueError'
this.message = message || 'Invalid Return Value Error'
this.code = 'UND_ERR_INVALID_RETURN_VALUE'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInvalidReturnValueError] === true
}
get [kInvalidReturnValueError] () {
return true
}
}
const kAbortError = Symbol.for('undici.error.UND_ERR_ABORT')
class AbortError extends UndiciError {
constructor (message) {
super(message)
this.name = 'AbortError'
this.message = message || 'The operation was aborted'
this.code = 'UND_ERR_ABORT'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kAbortError] === true
}
get [kAbortError] () {
return true
}
}
const kRequestAbortedError = Symbol.for('undici.error.UND_ERR_ABORTED')
class RequestAbortedError extends AbortError {
constructor (message) {
super(message)
this.name = 'AbortError'
this.message = message || 'Request aborted'
this.code = 'UND_ERR_ABORTED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestAbortedError] === true
}
get [kRequestAbortedError] () {
return true
}
}
const kInformationalError = Symbol.for('undici.error.UND_ERR_INFO')
class InformationalError extends UndiciError {
constructor (message) {
super(message)
this.name = 'InformationalError'
this.message = message || 'Request information'
this.code = 'UND_ERR_INFO'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kInformationalError] === true
}
get [kInformationalError] () {
return true
}
}
const kRequestContentLengthMismatchError = Symbol.for('undici.error.UND_ERR_REQ_CONTENT_LENGTH_MISMATCH')
class RequestContentLengthMismatchError extends UndiciError {
constructor (message) {
super(message)
this.name = 'RequestContentLengthMismatchError'
this.message = message || 'Request body length does not match content-length header'
this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestContentLengthMismatchError] === true
}
get [kRequestContentLengthMismatchError] () {
return true
}
}
const kResponseContentLengthMismatchError = Symbol.for('undici.error.UND_ERR_RES_CONTENT_LENGTH_MISMATCH')
class ResponseContentLengthMismatchError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ResponseContentLengthMismatchError'
this.message = message || 'Response body length does not match content-length header'
this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseContentLengthMismatchError] === true
}
get [kResponseContentLengthMismatchError] () {
return true
}
}
const kClientDestroyedError = Symbol.for('undici.error.UND_ERR_DESTROYED')
class ClientDestroyedError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ClientDestroyedError'
this.message = message || 'The client is destroyed'
this.code = 'UND_ERR_DESTROYED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kClientDestroyedError] === true
}
get [kClientDestroyedError] () {
return true
}
}
const kClientClosedError = Symbol.for('undici.error.UND_ERR_CLOSED')
class ClientClosedError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ClientClosedError'
this.message = message || 'The client is closed'
this.code = 'UND_ERR_CLOSED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kClientClosedError] === true
}
get [kClientClosedError] () {
return true
}
}
const kSocketError = Symbol.for('undici.error.UND_ERR_SOCKET')
class SocketError extends UndiciError {
constructor (message, socket) {
super(message)
this.name = 'SocketError'
this.message = message || 'Socket error'
this.code = 'UND_ERR_SOCKET'
this.socket = socket
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kSocketError] === true
}
get [kSocketError] () {
return true
}
}
const kNotSupportedError = Symbol.for('undici.error.UND_ERR_NOT_SUPPORTED')
class NotSupportedError extends UndiciError {
constructor (message) {
super(message)
this.name = 'NotSupportedError'
this.message = message || 'Not supported error'
this.code = 'UND_ERR_NOT_SUPPORTED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kNotSupportedError] === true
}
get [kNotSupportedError] () {
return true
}
}
const kBalancedPoolMissingUpstreamError = Symbol.for('undici.error.UND_ERR_BPL_MISSING_UPSTREAM')
class BalancedPoolMissingUpstreamError extends UndiciError {
constructor (message) {
super(message)
this.name = 'MissingUpstreamError'
this.message = message || 'No upstream has been added to the BalancedPool'
this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kBalancedPoolMissingUpstreamError] === true
}
get [kBalancedPoolMissingUpstreamError] () {
return true
}
}
const kHTTPParserError = Symbol.for('undici.error.UND_ERR_HTTP_PARSER')
class HTTPParserError extends Error {
constructor (message, code, data) {
super(message)
this.name = 'HTTPParserError'
this.code = code ? `HPE_${code}` : undefined
this.data = data ? data.toString() : undefined
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kHTTPParserError] === true
}
get [kHTTPParserError] () {
return true
}
}
const kResponseExceededMaxSizeError = Symbol.for('undici.error.UND_ERR_RES_EXCEEDED_MAX_SIZE')
class ResponseExceededMaxSizeError extends UndiciError {
constructor (message) {
super(message)
this.name = 'ResponseExceededMaxSizeError'
this.message = message || 'Response content exceeded max size'
this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseExceededMaxSizeError] === true
}
get [kResponseExceededMaxSizeError] () {
return true
}
}
const kRequestRetryError = Symbol.for('undici.error.UND_ERR_REQ_RETRY')
class RequestRetryError extends UndiciError {
constructor (message, code, { headers, data }) {
super(message)
this.name = 'RequestRetryError'
this.message = message || 'Request retry error'
this.code = 'UND_ERR_REQ_RETRY'
this.statusCode = code
this.data = data
this.headers = headers
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kRequestRetryError] === true
}
get [kRequestRetryError] () {
return true
}
}
const kResponseError = Symbol.for('undici.error.UND_ERR_RESPONSE')
class ResponseError extends UndiciError {
constructor (message, code, { headers, body }) {
super(message)
this.name = 'ResponseError'
this.message = message || 'Response error'
this.code = 'UND_ERR_RESPONSE'
this.statusCode = code
this.body = body
this.headers = headers
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kResponseError] === true
}
get [kResponseError] () {
return true
}
}
const kSecureProxyConnectionError = Symbol.for('undici.error.UND_ERR_PRX_TLS')
class SecureProxyConnectionError extends UndiciError {
constructor (cause, message, options = {}) {
super(message, { cause, ...options })
this.name = 'SecureProxyConnectionError'
this.message = message || 'Secure Proxy Connection failed'
this.code = 'UND_ERR_PRX_TLS'
this.cause = cause
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kSecureProxyConnectionError] === true
}
get [kSecureProxyConnectionError] () {
return true
}
}
const kMaxOriginsReachedError = Symbol.for('undici.error.UND_ERR_MAX_ORIGINS_REACHED')
class MaxOriginsReachedError extends UndiciError {
constructor (message) {
super(message)
this.name = 'MaxOriginsReachedError'
this.message = message || 'Maximum allowed origins reached'
this.code = 'UND_ERR_MAX_ORIGINS_REACHED'
}
static [Symbol.hasInstance] (instance) {
return instance && instance[kMaxOriginsReachedError] === true
}
get [kMaxOriginsReachedError] () {
return true
}
}
module.exports = {
AbortError,
HTTPParserError,
UndiciError,
HeadersTimeoutError,
HeadersOverflowError,
BodyTimeoutError,
RequestContentLengthMismatchError,
ConnectTimeoutError,
InvalidArgumentError,
InvalidReturnValueError,
RequestAbortedError,
ClientDestroyedError,
ClientClosedError,
InformationalError,
SocketError,
NotSupportedError,
ResponseContentLengthMismatchError,
BalancedPoolMissingUpstreamError,
ResponseExceededMaxSizeError,
RequestRetryError,
ResponseError,
SecureProxyConnectionError,
MaxOriginsReachedError
}

412
node_modules/undici/lib/core/request.js generated vendored Normal file
View File

@@ -0,0 +1,412 @@
'use strict'
const {
InvalidArgumentError,
NotSupportedError
} = require('./errors')
const assert = require('node:assert')
const {
isValidHTTPToken,
isValidHeaderValue,
isStream,
destroy,
isBuffer,
isFormDataLike,
isIterable,
isBlobLike,
serializePathWithQuery,
assertRequestHandler,
getServerName,
normalizedMethodRecords,
getProtocolFromUrlString
} = require('./util')
const { channels } = require('./diagnostics.js')
const { headerNameLowerCasedRecord } = require('./constants')
// Verifies that a given path is valid does not contain control chars \x00 to \x20
const invalidPathRegex = /[^\u0021-\u00ff]/
const kHandler = Symbol('handler')
class Request {
constructor (origin, {
path,
method,
body,
headers,
query,
idempotent,
blocking,
upgrade,
headersTimeout,
bodyTimeout,
reset,
expectContinue,
servername,
throwOnError,
maxRedirections
}, handler) {
if (typeof path !== 'string') {
throw new InvalidArgumentError('path must be a string')
} else if (
path[0] !== '/' &&
!(path.startsWith('http://') || path.startsWith('https://')) &&
method !== 'CONNECT'
) {
throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
} else if (invalidPathRegex.test(path)) {
throw new InvalidArgumentError('invalid request path')
}
if (typeof method !== 'string') {
throw new InvalidArgumentError('method must be a string')
} else if (normalizedMethodRecords[method] === undefined && !isValidHTTPToken(method)) {
throw new InvalidArgumentError('invalid request method')
}
if (upgrade && typeof upgrade !== 'string') {
throw new InvalidArgumentError('upgrade must be a string')
}
if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {
throw new InvalidArgumentError('invalid headersTimeout')
}
if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {
throw new InvalidArgumentError('invalid bodyTimeout')
}
if (reset != null && typeof reset !== 'boolean') {
throw new InvalidArgumentError('invalid reset')
}
if (expectContinue != null && typeof expectContinue !== 'boolean') {
throw new InvalidArgumentError('invalid expectContinue')
}
if (throwOnError != null) {
throw new InvalidArgumentError('invalid throwOnError')
}
if (maxRedirections != null && maxRedirections !== 0) {
throw new InvalidArgumentError('maxRedirections is not supported, use the redirect interceptor')
}
this.headersTimeout = headersTimeout
this.bodyTimeout = bodyTimeout
this.method = method
this.abort = null
if (body == null) {
this.body = null
} else if (isStream(body)) {
this.body = body
const rState = this.body._readableState
if (!rState || !rState.autoDestroy) {
this.endHandler = function autoDestroy () {
destroy(this)
}
this.body.on('end', this.endHandler)
}
this.errorHandler = err => {
if (this.abort) {
this.abort(err)
} else {
this.error = err
}
}
this.body.on('error', this.errorHandler)
} else if (isBuffer(body)) {
this.body = body.byteLength ? body : null
} else if (ArrayBuffer.isView(body)) {
this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
} else if (body instanceof ArrayBuffer) {
this.body = body.byteLength ? Buffer.from(body) : null
} else if (typeof body === 'string') {
this.body = body.length ? Buffer.from(body) : null
} else if (isFormDataLike(body) || isIterable(body) || isBlobLike(body)) {
this.body = body
} else {
throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
}
this.completed = false
this.aborted = false
this.upgrade = upgrade || null
this.path = query ? serializePathWithQuery(path, query) : path
// TODO: shall we maybe standardize it to an URL object?
this.origin = origin
this.protocol = getProtocolFromUrlString(origin)
this.idempotent = idempotent == null
? method === 'HEAD' || method === 'GET'
: idempotent
this.blocking = blocking ?? this.method !== 'HEAD'
this.reset = reset == null ? null : reset
this.host = null
this.contentLength = null
this.contentType = null
this.headers = []
// Only for H2
this.expectContinue = expectContinue != null ? expectContinue : false
if (Array.isArray(headers)) {
if (headers.length % 2 !== 0) {
throw new InvalidArgumentError('headers array must be even')
}
for (let i = 0; i < headers.length; i += 2) {
processHeader(this, headers[i], headers[i + 1])
}
} else if (headers && typeof headers === 'object') {
if (headers[Symbol.iterator]) {
for (const header of headers) {
if (!Array.isArray(header) || header.length !== 2) {
throw new InvalidArgumentError('headers must be in key-value pair format')
}
processHeader(this, header[0], header[1])
}
} else {
const keys = Object.keys(headers)
for (let i = 0; i < keys.length; ++i) {
processHeader(this, keys[i], headers[keys[i]])
}
}
} else if (headers != null) {
throw new InvalidArgumentError('headers must be an object or an array')
}
assertRequestHandler(handler, method, upgrade)
this.servername = servername || getServerName(this.host) || null
this[kHandler] = handler
if (channels.create.hasSubscribers) {
channels.create.publish({ request: this })
}
}
onBodySent (chunk) {
if (channels.bodyChunkSent.hasSubscribers) {
channels.bodyChunkSent.publish({ request: this, chunk })
}
if (this[kHandler].onBodySent) {
try {
return this[kHandler].onBodySent(chunk)
} catch (err) {
this.abort(err)
}
}
}
onRequestSent () {
if (channels.bodySent.hasSubscribers) {
channels.bodySent.publish({ request: this })
}
if (this[kHandler].onRequestSent) {
try {
return this[kHandler].onRequestSent()
} catch (err) {
this.abort(err)
}
}
}
onConnect (abort) {
assert(!this.aborted)
assert(!this.completed)
if (this.error) {
abort(this.error)
} else {
this.abort = abort
return this[kHandler].onConnect(abort)
}
}
onResponseStarted () {
return this[kHandler].onResponseStarted?.()
}
onHeaders (statusCode, headers, resume, statusText) {
assert(!this.aborted)
assert(!this.completed)
if (channels.headers.hasSubscribers) {
channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
}
try {
return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
} catch (err) {
this.abort(err)
}
}
onData (chunk) {
assert(!this.aborted)
assert(!this.completed)
if (channels.bodyChunkReceived.hasSubscribers) {
channels.bodyChunkReceived.publish({ request: this, chunk })
}
try {
return this[kHandler].onData(chunk)
} catch (err) {
this.abort(err)
return false
}
}
onUpgrade (statusCode, headers, socket) {
assert(!this.aborted)
assert(!this.completed)
return this[kHandler].onUpgrade(statusCode, headers, socket)
}
onComplete (trailers) {
this.onFinally()
assert(!this.aborted)
assert(!this.completed)
this.completed = true
if (channels.trailers.hasSubscribers) {
channels.trailers.publish({ request: this, trailers })
}
try {
return this[kHandler].onComplete(trailers)
} catch (err) {
// TODO (fix): This might be a bad idea?
this.onError(err)
}
}
onError (error) {
this.onFinally()
if (channels.error.hasSubscribers) {
channels.error.publish({ request: this, error })
}
if (this.aborted) {
return
}
this.aborted = true
return this[kHandler].onError(error)
}
onFinally () {
if (this.errorHandler) {
this.body.off('error', this.errorHandler)
this.errorHandler = null
}
if (this.endHandler) {
this.body.off('end', this.endHandler)
this.endHandler = null
}
}
addHeader (key, value) {
processHeader(this, key, value)
return this
}
}
function processHeader (request, key, val) {
if (val && (typeof val === 'object' && !Array.isArray(val))) {
throw new InvalidArgumentError(`invalid ${key} header`)
} else if (val === undefined) {
return
}
let headerName = headerNameLowerCasedRecord[key]
if (headerName === undefined) {
headerName = key.toLowerCase()
if (headerNameLowerCasedRecord[headerName] === undefined && !isValidHTTPToken(headerName)) {
throw new InvalidArgumentError('invalid header key')
}
}
if (Array.isArray(val)) {
const arr = []
for (let i = 0; i < val.length; i++) {
if (typeof val[i] === 'string') {
if (!isValidHeaderValue(val[i])) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
arr.push(val[i])
} else if (val[i] === null) {
arr.push('')
} else if (typeof val[i] === 'object') {
throw new InvalidArgumentError(`invalid ${key} header`)
} else {
arr.push(`${val[i]}`)
}
}
val = arr
} else if (typeof val === 'string') {
if (!isValidHeaderValue(val)) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
} else if (val === null) {
val = ''
} else {
val = `${val}`
}
if (request.host === null && headerName === 'host') {
if (typeof val !== 'string') {
throw new InvalidArgumentError('invalid host header')
}
// Consumed by Client
request.host = val
} else if (request.contentLength === null && headerName === 'content-length') {
request.contentLength = parseInt(val, 10)
if (!Number.isFinite(request.contentLength)) {
throw new InvalidArgumentError('invalid content-length header')
}
} else if (request.contentType === null && headerName === 'content-type') {
request.contentType = val
request.headers.push(key, val)
} else if (headerName === 'transfer-encoding' || headerName === 'keep-alive' || headerName === 'upgrade') {
throw new InvalidArgumentError(`invalid ${headerName} header`)
} else if (headerName === 'connection') {
const value = typeof val === 'string' ? val.toLowerCase() : null
if (value !== 'close' && value !== 'keep-alive') {
throw new InvalidArgumentError('invalid connection header')
}
if (value === 'close') {
request.reset = true
}
} else if (headerName === 'expect') {
throw new NotSupportedError('expect header not supported')
} else {
request.headers.push(key, val)
}
}
module.exports = Request

68
node_modules/undici/lib/core/symbols.js generated vendored Normal file
View File

@@ -0,0 +1,68 @@
'use strict'
module.exports = {
kClose: Symbol('close'),
kDestroy: Symbol('destroy'),
kDispatch: Symbol('dispatch'),
kUrl: Symbol('url'),
kWriting: Symbol('writing'),
kResuming: Symbol('resuming'),
kQueue: Symbol('queue'),
kConnect: Symbol('connect'),
kConnecting: Symbol('connecting'),
kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
kKeepAliveTimeoutValue: Symbol('keep alive timeout'),
kKeepAlive: Symbol('keep alive'),
kHeadersTimeout: Symbol('headers timeout'),
kBodyTimeout: Symbol('body timeout'),
kServerName: Symbol('server name'),
kLocalAddress: Symbol('local address'),
kHost: Symbol('host'),
kNoRef: Symbol('no ref'),
kBodyUsed: Symbol('used'),
kBody: Symbol('abstracted request body'),
kRunning: Symbol('running'),
kBlocking: Symbol('blocking'),
kPending: Symbol('pending'),
kSize: Symbol('size'),
kBusy: Symbol('busy'),
kQueued: Symbol('queued'),
kFree: Symbol('free'),
kConnected: Symbol('connected'),
kClosed: Symbol('closed'),
kNeedDrain: Symbol('need drain'),
kReset: Symbol('reset'),
kDestroyed: Symbol.for('nodejs.stream.destroyed'),
kResume: Symbol('resume'),
kOnError: Symbol('on error'),
kMaxHeadersSize: Symbol('max headers size'),
kRunningIdx: Symbol('running index'),
kPendingIdx: Symbol('pending index'),
kError: Symbol('error'),
kClients: Symbol('clients'),
kClient: Symbol('client'),
kParser: Symbol('parser'),
kOnDestroyed: Symbol('destroy callbacks'),
kPipelining: Symbol('pipelining'),
kSocket: Symbol('socket'),
kHostHeader: Symbol('host header'),
kConnector: Symbol('connector'),
kStrictContentLength: Symbol('strict content length'),
kMaxRedirections: Symbol('maxRedirections'),
kMaxRequests: Symbol('maxRequestsPerClient'),
kProxy: Symbol('proxy agent options'),
kCounter: Symbol('socket request counter'),
kMaxResponseSize: Symbol('max response size'),
kHTTP2Session: Symbol('http2Session'),
kHTTP2SessionState: Symbol('http2Session state'),
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
kConstruct: Symbol('constructable'),
kListeners: Symbol('listeners'),
kHTTPContext: Symbol('http context'),
kMaxConcurrentStreams: Symbol('max concurrent streams'),
kNoProxyAgent: Symbol('no proxy agent'),
kHttpProxyAgent: Symbol('http proxy agent'),
kHttpsProxyAgent: Symbol('https proxy agent')
}

160
node_modules/undici/lib/core/tree.js generated vendored Normal file
View File

@@ -0,0 +1,160 @@
'use strict'
const {
wellknownHeaderNames,
headerNameLowerCasedRecord
} = require('./constants')
class TstNode {
/** @type {any} */
value = null
/** @type {null | TstNode} */
left = null
/** @type {null | TstNode} */
middle = null
/** @type {null | TstNode} */
right = null
/** @type {number} */
code
/**
* @param {string} key
* @param {any} value
* @param {number} index
*/
constructor (key, value, index) {
if (index === undefined || index >= key.length) {
throw new TypeError('Unreachable')
}
const code = this.code = key.charCodeAt(index)
// check code is ascii string
if (code > 0x7F) {
throw new TypeError('key must be ascii string')
}
if (key.length !== ++index) {
this.middle = new TstNode(key, value, index)
} else {
this.value = value
}
}
/**
* @param {string} key
* @param {any} value
* @returns {void}
*/
add (key, value) {
const length = key.length
if (length === 0) {
throw new TypeError('Unreachable')
}
let index = 0
/**
* @type {TstNode}
*/
let node = this
while (true) {
const code = key.charCodeAt(index)
// check code is ascii string
if (code > 0x7F) {
throw new TypeError('key must be ascii string')
}
if (node.code === code) {
if (length === ++index) {
node.value = value
break
} else if (node.middle !== null) {
node = node.middle
} else {
node.middle = new TstNode(key, value, index)
break
}
} else if (node.code < code) {
if (node.left !== null) {
node = node.left
} else {
node.left = new TstNode(key, value, index)
break
}
} else if (node.right !== null) {
node = node.right
} else {
node.right = new TstNode(key, value, index)
break
}
}
}
/**
* @param {Uint8Array} key
* @returns {TstNode | null}
*/
search (key) {
const keylength = key.length
let index = 0
/**
* @type {TstNode|null}
*/
let node = this
while (node !== null && index < keylength) {
let code = key[index]
// A-Z
// First check if it is bigger than 0x5a.
// Lowercase letters have higher char codes than uppercase ones.
// Also we assume that headers will mostly contain lowercase characters.
if (code <= 0x5a && code >= 0x41) {
// Lowercase for uppercase.
code |= 32
}
while (node !== null) {
if (code === node.code) {
if (keylength === ++index) {
// Returns Node since it is the last key.
return node
}
node = node.middle
break
}
node = node.code < code ? node.left : node.right
}
}
return null
}
}
class TernarySearchTree {
/** @type {TstNode | null} */
node = null
/**
* @param {string} key
* @param {any} value
* @returns {void}
* */
insert (key, value) {
if (this.node === null) {
this.node = new TstNode(key, value, 0)
} else {
this.node.add(key, value)
}
}
/**
* @param {Uint8Array} key
* @returns {any}
*/
lookup (key) {
return this.node?.search(key)?.value ?? null
}
}
const tree = new TernarySearchTree()
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = headerNameLowerCasedRecord[wellknownHeaderNames[i]]
tree.insert(key, key)
}
module.exports = {
TernarySearchTree,
tree
}

976
node_modules/undici/lib/core/util.js generated vendored Normal file
View File

@@ -0,0 +1,976 @@
'use strict'
const assert = require('node:assert')
const { kDestroyed, kBodyUsed, kListeners, kBody } = require('./symbols')
const { IncomingMessage } = require('node:http')
const stream = require('node:stream')
const net = require('node:net')
const { stringify } = require('node:querystring')
const { EventEmitter: EE } = require('node:events')
const timers = require('../util/timers')
const { InvalidArgumentError, ConnectTimeoutError } = require('./errors')
const { headerNameLowerCasedRecord } = require('./constants')
const { tree } = require('./tree')
const [nodeMajor, nodeMinor] = process.versions.node.split('.', 2).map(v => Number(v))
class BodyAsyncIterable {
constructor (body) {
this[kBody] = body
this[kBodyUsed] = false
}
async * [Symbol.asyncIterator] () {
assert(!this[kBodyUsed], 'disturbed')
this[kBodyUsed] = true
yield * this[kBody]
}
}
function noop () {}
/**
* @param {*} body
* @returns {*}
*/
function wrapRequestBody (body) {
if (isStream(body)) {
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
// so that it can be dispatched again?
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
if (bodyLength(body) === 0) {
body
.on('data', function () {
assert(false)
})
}
if (typeof body.readableDidRead !== 'boolean') {
body[kBodyUsed] = false
EE.prototype.on.call(body, 'data', function () {
this[kBodyUsed] = true
})
}
return body
} else if (body && typeof body.pipeTo === 'function') {
// TODO (fix): We can't access ReadableStream internal state
// to determine whether or not it has been disturbed. This is just
// a workaround.
return new BodyAsyncIterable(body)
} else if (
body &&
typeof body !== 'string' &&
!ArrayBuffer.isView(body) &&
isIterable(body)
) {
// TODO: Should we allow re-using iterable if !this.opts.idempotent
// or through some other flag?
return new BodyAsyncIterable(body)
} else {
return body
}
}
/**
* @param {*} obj
* @returns {obj is import('node:stream').Stream}
*/
function isStream (obj) {
return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
}
/**
* @param {*} object
* @returns {object is Blob}
* based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
*/
function isBlobLike (object) {
if (object === null) {
return false
} else if (object instanceof Blob) {
return true
} else if (typeof object !== 'object') {
return false
} else {
const sTag = object[Symbol.toStringTag]
return (sTag === 'Blob' || sTag === 'File') && (
('stream' in object && typeof object.stream === 'function') ||
('arrayBuffer' in object && typeof object.arrayBuffer === 'function')
)
}
}
/**
* @param {string} url The path to check for query strings or fragments.
* @returns {boolean} Returns true if the path contains a query string or fragment.
*/
function pathHasQueryOrFragment (url) {
return (
url.includes('?') ||
url.includes('#')
)
}
/**
* @param {string} url The URL to add the query params to
* @param {import('node:querystring').ParsedUrlQueryInput} queryParams The object to serialize into a URL query string
* @returns {string} The URL with the query params added
*/
function serializePathWithQuery (url, queryParams) {
if (pathHasQueryOrFragment(url)) {
throw new Error('Query params cannot be passed when url already contains "?" or "#".')
}
const stringified = stringify(queryParams)
if (stringified) {
url += '?' + stringified
}
return url
}
/**
* @param {number|string|undefined} port
* @returns {boolean}
*/
function isValidPort (port) {
const value = parseInt(port, 10)
return (
value === Number(port) &&
value >= 0 &&
value <= 65535
)
}
/**
* Check if the value is a valid http or https prefixed string.
*
* @param {string} value
* @returns {boolean}
*/
function isHttpOrHttpsPrefixed (value) {
return (
value != null &&
value[0] === 'h' &&
value[1] === 't' &&
value[2] === 't' &&
value[3] === 'p' &&
(
value[4] === ':' ||
(
value[4] === 's' &&
value[5] === ':'
)
)
)
}
/**
* @param {string|URL|Record<string,string>} url
* @returns {URL}
*/
function parseURL (url) {
if (typeof url === 'string') {
/**
* @type {URL}
*/
url = new URL(url)
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
return url
}
if (!url || typeof url !== 'object') {
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
}
if (!(url instanceof URL)) {
if (url.port != null && url.port !== '' && isValidPort(url.port) === false) {
throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
}
if (url.path != null && typeof url.path !== 'string') {
throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
}
if (url.pathname != null && typeof url.pathname !== 'string') {
throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
}
if (url.hostname != null && typeof url.hostname !== 'string') {
throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
}
if (url.origin != null && typeof url.origin !== 'string') {
throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
}
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
const port = url.port != null
? url.port
: (url.protocol === 'https:' ? 443 : 80)
let origin = url.origin != null
? url.origin
: `${url.protocol || ''}//${url.hostname || ''}:${port}`
let path = url.path != null
? url.path
: `${url.pathname || ''}${url.search || ''}`
if (origin[origin.length - 1] === '/') {
origin = origin.slice(0, origin.length - 1)
}
if (path && path[0] !== '/') {
path = `/${path}`
}
// new URL(path, origin) is unsafe when `path` contains an absolute URL
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
// If first parameter is an absolute URL, a given second param will be ignored.
return new URL(`${origin}${path}`)
}
if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
return url
}
/**
* @param {string|URL|Record<string, string>} url
* @returns {URL}
*/
function parseOrigin (url) {
url = parseURL(url)
if (url.pathname !== '/' || url.search || url.hash) {
throw new InvalidArgumentError('invalid url')
}
return url
}
/**
* @param {string} host
* @returns {string}
*/
function getHostname (host) {
if (host[0] === '[') {
const idx = host.indexOf(']')
assert(idx !== -1)
return host.substring(1, idx)
}
const idx = host.indexOf(':')
if (idx === -1) return host
return host.substring(0, idx)
}
/**
* IP addresses are not valid server names per RFC6066
* Currently, the only server names supported are DNS hostnames
* @param {string|null} host
* @returns {string|null}
*/
function getServerName (host) {
if (!host) {
return null
}
assert(typeof host === 'string')
const servername = getHostname(host)
if (net.isIP(servername)) {
return ''
}
return servername
}
/**
* @function
* @template T
* @param {T} obj
* @returns {T}
*/
function deepClone (obj) {
return JSON.parse(JSON.stringify(obj))
}
/**
* @param {*} obj
* @returns {obj is AsyncIterable}
*/
function isAsyncIterable (obj) {
return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
}
/**
* @param {*} obj
* @returns {obj is Iterable}
*/
function isIterable (obj) {
return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
}
/**
* @param {Blob|Buffer|import ('stream').Stream} body
* @returns {number|null}
*/
function bodyLength (body) {
if (body == null) {
return 0
} else if (isStream(body)) {
const state = body._readableState
return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
? state.length
: null
} else if (isBlobLike(body)) {
return body.size != null ? body.size : null
} else if (isBuffer(body)) {
return body.byteLength
}
return null
}
/**
* @param {import ('stream').Stream} body
* @returns {boolean}
*/
function isDestroyed (body) {
return body && !!(body.destroyed || body[kDestroyed] || (stream.isDestroyed?.(body)))
}
/**
* @param {import ('stream').Stream} stream
* @param {Error} [err]
* @returns {void}
*/
function destroy (stream, err) {
if (stream == null || !isStream(stream) || isDestroyed(stream)) {
return
}
if (typeof stream.destroy === 'function') {
if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {
// See: https://github.com/nodejs/node/pull/38505/files
stream.socket = null
}
stream.destroy(err)
} else if (err) {
queueMicrotask(() => {
stream.emit('error', err)
})
}
if (stream.destroyed !== true) {
stream[kDestroyed] = true
}
}
const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
/**
* @param {string} val
* @returns {number | null}
*/
function parseKeepAliveTimeout (val) {
const m = val.match(KEEPALIVE_TIMEOUT_EXPR)
return m ? parseInt(m[1], 10) * 1000 : null
}
/**
* Retrieves a header name and returns its lowercase value.
* @param {string | Buffer} value Header name
* @returns {string}
*/
function headerNameToString (value) {
return typeof value === 'string'
? headerNameLowerCasedRecord[value] ?? value.toLowerCase()
: tree.lookup(value) ?? value.toString('latin1').toLowerCase()
}
/**
* Receive the buffer as a string and return its lowercase value.
* @param {Buffer} value Header name
* @returns {string}
*/
function bufferToLowerCasedHeaderName (value) {
return tree.lookup(value) ?? value.toString('latin1').toLowerCase()
}
/**
* @param {(Buffer | string)[]} headers
* @param {Record<string, string | string[]>} [obj]
* @returns {Record<string, string | string[]>}
*/
function parseHeaders (headers, obj) {
if (obj === undefined) obj = {}
for (let i = 0; i < headers.length; i += 2) {
const key = headerNameToString(headers[i])
let val = obj[key]
if (val) {
if (typeof val === 'string') {
val = [val]
obj[key] = val
}
val.push(headers[i + 1].toString('utf8'))
} else {
const headersValue = headers[i + 1]
if (typeof headersValue === 'string') {
obj[key] = headersValue
} else {
obj[key] = Array.isArray(headersValue) ? headersValue.map(x => x.toString('utf8')) : headersValue.toString('utf8')
}
}
}
// See https://github.com/nodejs/node/pull/46528
if ('content-length' in obj && 'content-disposition' in obj) {
obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')
}
return obj
}
/**
* @param {Buffer[]} headers
* @returns {string[]}
*/
function parseRawHeaders (headers) {
const headersLength = headers.length
/**
* @type {string[]}
*/
const ret = new Array(headersLength)
let hasContentLength = false
let contentDispositionIdx = -1
let key
let val
let kLen = 0
for (let n = 0; n < headersLength; n += 2) {
key = headers[n]
val = headers[n + 1]
typeof key !== 'string' && (key = key.toString())
typeof val !== 'string' && (val = val.toString('utf8'))
kLen = key.length
if (kLen === 14 && key[7] === '-' && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
hasContentLength = true
} else if (kLen === 19 && key[7] === '-' && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
contentDispositionIdx = n + 1
}
ret[n] = key
ret[n + 1] = val
}
// See https://github.com/nodejs/node/pull/46528
if (hasContentLength && contentDispositionIdx !== -1) {
ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')
}
return ret
}
/**
* @param {string[]} headers
* @param {Buffer[]} headers
*/
function encodeRawHeaders (headers) {
if (!Array.isArray(headers)) {
throw new TypeError('expected headers to be an array')
}
return headers.map(x => Buffer.from(x))
}
/**
* @param {*} buffer
* @returns {buffer is Buffer}
*/
function isBuffer (buffer) {
// See, https://github.com/mcollina/undici/pull/319
return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
}
/**
* Asserts that the handler object is a request handler.
*
* @param {object} handler
* @param {string} method
* @param {string} [upgrade]
* @returns {asserts handler is import('../api/api-request').RequestHandler}
*/
function assertRequestHandler (handler, method, upgrade) {
if (!handler || typeof handler !== 'object') {
throw new InvalidArgumentError('handler must be an object')
}
if (typeof handler.onRequestStart === 'function') {
// TODO (fix): More checks...
return
}
if (typeof handler.onConnect !== 'function') {
throw new InvalidArgumentError('invalid onConnect method')
}
if (typeof handler.onError !== 'function') {
throw new InvalidArgumentError('invalid onError method')
}
if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {
throw new InvalidArgumentError('invalid onBodySent method')
}
if (upgrade || method === 'CONNECT') {
if (typeof handler.onUpgrade !== 'function') {
throw new InvalidArgumentError('invalid onUpgrade method')
}
} else {
if (typeof handler.onHeaders !== 'function') {
throw new InvalidArgumentError('invalid onHeaders method')
}
if (typeof handler.onData !== 'function') {
throw new InvalidArgumentError('invalid onData method')
}
if (typeof handler.onComplete !== 'function') {
throw new InvalidArgumentError('invalid onComplete method')
}
}
}
/**
* A body is disturbed if it has been read from and it cannot be re-used without
* losing state or data.
* @param {import('node:stream').Readable} body
* @returns {boolean}
*/
function isDisturbed (body) {
// TODO (fix): Why is body[kBodyUsed] needed?
return !!(body && (stream.isDisturbed(body) || body[kBodyUsed]))
}
/**
* @typedef {object} SocketInfo
* @property {string} [localAddress]
* @property {number} [localPort]
* @property {string} [remoteAddress]
* @property {number} [remotePort]
* @property {string} [remoteFamily]
* @property {number} [timeout]
* @property {number} bytesWritten
* @property {number} bytesRead
*/
/**
* @param {import('net').Socket} socket
* @returns {SocketInfo}
*/
function getSocketInfo (socket) {
return {
localAddress: socket.localAddress,
localPort: socket.localPort,
remoteAddress: socket.remoteAddress,
remotePort: socket.remotePort,
remoteFamily: socket.remoteFamily,
timeout: socket.timeout,
bytesWritten: socket.bytesWritten,
bytesRead: socket.bytesRead
}
}
/**
* @param {Iterable} iterable
* @returns {ReadableStream}
*/
function ReadableStreamFrom (iterable) {
// We cannot use ReadableStream.from here because it does not return a byte stream.
let iterator
return new ReadableStream(
{
start () {
iterator = iterable[Symbol.asyncIterator]()
},
pull (controller) {
return iterator.next().then(({ done, value }) => {
if (done) {
queueMicrotask(() => {
controller.close()
controller.byobRequest?.respond(0)
})
} else {
const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
if (buf.byteLength) {
controller.enqueue(new Uint8Array(buf))
} else {
return this.pull(controller)
}
}
})
},
cancel () {
return iterator.return()
},
type: 'bytes'
}
)
}
/**
* The object should be a FormData instance and contains all the required
* methods.
* @param {*} object
* @returns {object is FormData}
*/
function isFormDataLike (object) {
return (
object &&
typeof object === 'object' &&
typeof object.append === 'function' &&
typeof object.delete === 'function' &&
typeof object.get === 'function' &&
typeof object.getAll === 'function' &&
typeof object.has === 'function' &&
typeof object.set === 'function' &&
object[Symbol.toStringTag] === 'FormData'
)
}
function addAbortListener (signal, listener) {
if ('addEventListener' in signal) {
signal.addEventListener('abort', listener, { once: true })
return () => signal.removeEventListener('abort', listener)
}
signal.once('abort', listener)
return () => signal.removeListener('abort', listener)
}
/**
* @see https://tools.ietf.org/html/rfc7230#section-3.2.6
* @param {number} c
* @returns {boolean}
*/
function isTokenCharCode (c) {
switch (c) {
case 0x22:
case 0x28:
case 0x29:
case 0x2c:
case 0x2f:
case 0x3a:
case 0x3b:
case 0x3c:
case 0x3d:
case 0x3e:
case 0x3f:
case 0x40:
case 0x5b:
case 0x5c:
case 0x5d:
case 0x7b:
case 0x7d:
// DQUOTE and "(),/:;<=>?@[\]{}"
return false
default:
// VCHAR %x21-7E
return c >= 0x21 && c <= 0x7e
}
}
/**
* @param {string} characters
* @returns {boolean}
*/
function isValidHTTPToken (characters) {
if (characters.length === 0) {
return false
}
for (let i = 0; i < characters.length; ++i) {
if (!isTokenCharCode(characters.charCodeAt(i))) {
return false
}
}
return true
}
// headerCharRegex have been lifted from
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
/**
* Matches if val contains an invalid field-vchar
* field-value = *( field-content / obs-fold )
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
* field-vchar = VCHAR / obs-text
*/
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
/**
* @param {string} characters
* @returns {boolean}
*/
function isValidHeaderValue (characters) {
return !headerCharRegex.test(characters)
}
const rangeHeaderRegex = /^bytes (\d+)-(\d+)\/(\d+)?$/
/**
* @typedef {object} RangeHeader
* @property {number} start
* @property {number | null} end
* @property {number | null} size
*/
/**
* Parse accordingly to RFC 9110
* @see https://www.rfc-editor.org/rfc/rfc9110#field.content-range
* @param {string} [range]
* @returns {RangeHeader|null}
*/
function parseRangeHeader (range) {
if (range == null || range === '') return { start: 0, end: null, size: null }
const m = range ? range.match(rangeHeaderRegex) : null
return m
? {
start: parseInt(m[1]),
end: m[2] ? parseInt(m[2]) : null,
size: m[3] ? parseInt(m[3]) : null
}
: null
}
/**
* @template {import("events").EventEmitter} T
* @param {T} obj
* @param {string} name
* @param {(...args: any[]) => void} listener
* @returns {T}
*/
function addListener (obj, name, listener) {
const listeners = (obj[kListeners] ??= [])
listeners.push([name, listener])
obj.on(name, listener)
return obj
}
/**
* @template {import("events").EventEmitter} T
* @param {T} obj
* @returns {T}
*/
function removeAllListeners (obj) {
if (obj[kListeners] != null) {
for (const [name, listener] of obj[kListeners]) {
obj.removeListener(name, listener)
}
obj[kListeners] = null
}
return obj
}
/**
* @param {import ('../dispatcher/client')} client
* @param {import ('../core/request')} request
* @param {Error} err
*/
function errorRequest (client, request, err) {
try {
request.onError(err)
assert(request.aborted)
} catch (err) {
client.emit('error', err)
}
}
/**
* @param {WeakRef<net.Socket>} socketWeakRef
* @param {object} opts
* @param {number} opts.timeout
* @param {string} opts.hostname
* @param {number} opts.port
* @returns {() => void}
*/
const setupConnectTimeout = process.platform === 'win32'
? (socketWeakRef, opts) => {
if (!opts.timeout) {
return noop
}
let s1 = null
let s2 = null
const fastTimer = timers.setFastTimeout(() => {
// setImmediate is added to make sure that we prioritize socket error events over timeouts
s1 = setImmediate(() => {
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
s2 = setImmediate(() => onConnectTimeout(socketWeakRef.deref(), opts))
})
}, opts.timeout)
return () => {
timers.clearFastTimeout(fastTimer)
clearImmediate(s1)
clearImmediate(s2)
}
}
: (socketWeakRef, opts) => {
if (!opts.timeout) {
return noop
}
let s1 = null
const fastTimer = timers.setFastTimeout(() => {
// setImmediate is added to make sure that we prioritize socket error events over timeouts
s1 = setImmediate(() => {
onConnectTimeout(socketWeakRef.deref(), opts)
})
}, opts.timeout)
return () => {
timers.clearFastTimeout(fastTimer)
clearImmediate(s1)
}
}
/**
* @param {net.Socket} socket
* @param {object} opts
* @param {number} opts.timeout
* @param {string} opts.hostname
* @param {number} opts.port
*/
function onConnectTimeout (socket, opts) {
// The socket could be already garbage collected
if (socket == null) {
return
}
let message = 'Connect Timeout Error'
if (Array.isArray(socket.autoSelectFamilyAttemptedAddresses)) {
message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')},`
} else {
message += ` (attempted address: ${opts.hostname}:${opts.port},`
}
message += ` timeout: ${opts.timeout}ms)`
destroy(socket, new ConnectTimeoutError(message))
}
/**
* @param {string} urlString
* @returns {string}
*/
function getProtocolFromUrlString (urlString) {
if (
urlString[0] === 'h' &&
urlString[1] === 't' &&
urlString[2] === 't' &&
urlString[3] === 'p'
) {
switch (urlString[4]) {
case ':':
return 'http:'
case 's':
if (urlString[5] === ':') {
return 'https:'
}
}
}
// fallback if none of the usual suspects
return urlString.slice(0, urlString.indexOf(':') + 1)
}
const kEnumerableProperty = Object.create(null)
kEnumerableProperty.enumerable = true
const normalizedMethodRecordsBase = {
delete: 'DELETE',
DELETE: 'DELETE',
get: 'GET',
GET: 'GET',
head: 'HEAD',
HEAD: 'HEAD',
options: 'OPTIONS',
OPTIONS: 'OPTIONS',
post: 'POST',
POST: 'POST',
put: 'PUT',
PUT: 'PUT'
}
const normalizedMethodRecords = {
...normalizedMethodRecordsBase,
patch: 'patch',
PATCH: 'PATCH'
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(normalizedMethodRecordsBase, null)
Object.setPrototypeOf(normalizedMethodRecords, null)
module.exports = {
kEnumerableProperty,
isDisturbed,
isBlobLike,
parseOrigin,
parseURL,
getServerName,
isStream,
isIterable,
isAsyncIterable,
isDestroyed,
headerNameToString,
bufferToLowerCasedHeaderName,
addListener,
removeAllListeners,
errorRequest,
parseRawHeaders,
encodeRawHeaders,
parseHeaders,
parseKeepAliveTimeout,
destroy,
bodyLength,
deepClone,
ReadableStreamFrom,
isBuffer,
assertRequestHandler,
getSocketInfo,
isFormDataLike,
pathHasQueryOrFragment,
serializePathWithQuery,
addAbortListener,
isValidHTTPToken,
isValidHeaderValue,
isTokenCharCode,
parseRangeHeader,
normalizedMethodRecordsBase,
normalizedMethodRecords,
isValidPort,
isHttpOrHttpsPrefixed,
nodeMajor,
nodeMinor,
safeHTTPMethods: Object.freeze(['GET', 'HEAD', 'OPTIONS', 'TRACE']),
wrapRequestBody,
setupConnectTimeout,
getProtocolFromUrlString
}

156
node_modules/undici/lib/dispatcher/agent.js generated vendored Normal file
View File

@@ -0,0 +1,156 @@
'use strict'
const { InvalidArgumentError, MaxOriginsReachedError } = require('../core/errors')
const { kClients, kRunning, kClose, kDestroy, kDispatch, kUrl } = require('../core/symbols')
const DispatcherBase = require('./dispatcher-base')
const Pool = require('./pool')
const Client = require('./client')
const util = require('../core/util')
const kOnConnect = Symbol('onConnect')
const kOnDisconnect = Symbol('onDisconnect')
const kOnConnectionError = Symbol('onConnectionError')
const kOnDrain = Symbol('onDrain')
const kFactory = Symbol('factory')
const kOptions = Symbol('options')
const kOrigins = Symbol('origins')
function defaultFactory (origin, opts) {
return opts && opts.connections === 1
? new Client(origin, opts)
: new Pool(origin, opts)
}
class Agent extends DispatcherBase {
constructor ({ factory = defaultFactory, maxOrigins = Infinity, connect, ...options } = {}) {
if (typeof factory !== 'function') {
throw new InvalidArgumentError('factory must be a function.')
}
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
throw new InvalidArgumentError('connect must be a function or an object')
}
if (typeof maxOrigins !== 'number' || Number.isNaN(maxOrigins) || maxOrigins <= 0) {
throw new InvalidArgumentError('maxOrigins must be a number greater than 0')
}
super()
if (connect && typeof connect !== 'function') {
connect = { ...connect }
}
this[kOptions] = { ...util.deepClone(options), maxOrigins, connect }
this[kFactory] = factory
this[kClients] = new Map()
this[kOrigins] = new Set()
this[kOnDrain] = (origin, targets) => {
this.emit('drain', origin, [this, ...targets])
}
this[kOnConnect] = (origin, targets) => {
this.emit('connect', origin, [this, ...targets])
}
this[kOnDisconnect] = (origin, targets, err) => {
this.emit('disconnect', origin, [this, ...targets], err)
}
this[kOnConnectionError] = (origin, targets, err) => {
this.emit('connectionError', origin, [this, ...targets], err)
}
}
get [kRunning] () {
let ret = 0
for (const { dispatcher } of this[kClients].values()) {
ret += dispatcher[kRunning]
}
return ret
}
[kDispatch] (opts, handler) {
let key
if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
key = String(opts.origin)
} else {
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
}
if (this[kOrigins].size >= this[kOptions].maxOrigins && !this[kOrigins].has(key)) {
throw new MaxOriginsReachedError()
}
const result = this[kClients].get(key)
let dispatcher = result && result.dispatcher
if (!dispatcher) {
const closeClientIfUnused = (connected) => {
const result = this[kClients].get(key)
if (result) {
if (connected) result.count -= 1
if (result.count <= 0) {
this[kClients].delete(key)
result.dispatcher.close()
}
this[kOrigins].delete(key)
}
}
dispatcher = this[kFactory](opts.origin, this[kOptions])
.on('drain', this[kOnDrain])
.on('connect', (origin, targets) => {
const result = this[kClients].get(key)
if (result) {
result.count += 1
}
this[kOnConnect](origin, targets)
})
.on('disconnect', (origin, targets, err) => {
closeClientIfUnused(true)
this[kOnDisconnect](origin, targets, err)
})
.on('connectionError', (origin, targets, err) => {
closeClientIfUnused(false)
this[kOnConnectionError](origin, targets, err)
})
this[kClients].set(key, { count: 0, dispatcher })
this[kOrigins].add(key)
}
return dispatcher.dispatch(opts, handler)
}
[kClose] () {
const closePromises = []
for (const { dispatcher } of this[kClients].values()) {
closePromises.push(dispatcher.close())
}
this[kClients].clear()
return Promise.all(closePromises)
}
[kDestroy] (err) {
const destroyPromises = []
for (const { dispatcher } of this[kClients].values()) {
destroyPromises.push(dispatcher.destroy(err))
}
this[kClients].clear()
return Promise.all(destroyPromises)
}
get stats () {
const allClientStats = {}
for (const { dispatcher } of this[kClients].values()) {
if (dispatcher.stats) {
allClientStats[dispatcher[kUrl].origin] = dispatcher.stats
}
}
return allClientStats
}
}
module.exports = Agent

206
node_modules/undici/lib/dispatcher/balanced-pool.js generated vendored Normal file
View File

@@ -0,0 +1,206 @@
'use strict'
const {
BalancedPoolMissingUpstreamError,
InvalidArgumentError
} = require('../core/errors')
const {
PoolBase,
kClients,
kNeedDrain,
kAddClient,
kRemoveClient,
kGetDispatcher
} = require('./pool-base')
const Pool = require('./pool')
const { kUrl } = require('../core/symbols')
const { parseOrigin } = require('../core/util')
const kFactory = Symbol('factory')
const kOptions = Symbol('options')
const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')
const kCurrentWeight = Symbol('kCurrentWeight')
const kIndex = Symbol('kIndex')
const kWeight = Symbol('kWeight')
const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
const kErrorPenalty = Symbol('kErrorPenalty')
/**
* Calculate the greatest common divisor of two numbers by
* using the Euclidean algorithm.
*
* @param {number} a
* @param {number} b
* @returns {number}
*/
function getGreatestCommonDivisor (a, b) {
if (a === 0) return b
while (b !== 0) {
const t = b
b = a % b
a = t
}
return a
}
function defaultFactory (origin, opts) {
return new Pool(origin, opts)
}
class BalancedPool extends PoolBase {
constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
if (typeof factory !== 'function') {
throw new InvalidArgumentError('factory must be a function.')
}
super()
this[kOptions] = opts
this[kIndex] = -1
this[kCurrentWeight] = 0
this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100
this[kErrorPenalty] = this[kOptions].errorPenalty || 15
if (!Array.isArray(upstreams)) {
upstreams = [upstreams]
}
this[kFactory] = factory
for (const upstream of upstreams) {
this.addUpstream(upstream)
}
this._updateBalancedPoolStats()
}
addUpstream (upstream) {
const upstreamOrigin = parseOrigin(upstream).origin
if (this[kClients].find((pool) => (
pool[kUrl].origin === upstreamOrigin &&
pool.closed !== true &&
pool.destroyed !== true
))) {
return this
}
const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))
this[kAddClient](pool)
pool.on('connect', () => {
pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])
})
pool.on('connectionError', () => {
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
this._updateBalancedPoolStats()
})
pool.on('disconnect', (...args) => {
const err = args[2]
if (err && err.code === 'UND_ERR_SOCKET') {
// decrease the weight of the pool.
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
this._updateBalancedPoolStats()
}
})
for (const client of this[kClients]) {
client[kWeight] = this[kMaxWeightPerServer]
}
this._updateBalancedPoolStats()
return this
}
_updateBalancedPoolStats () {
let result = 0
for (let i = 0; i < this[kClients].length; i++) {
result = getGreatestCommonDivisor(this[kClients][i][kWeight], result)
}
this[kGreatestCommonDivisor] = result
}
removeUpstream (upstream) {
const upstreamOrigin = parseOrigin(upstream).origin
const pool = this[kClients].find((pool) => (
pool[kUrl].origin === upstreamOrigin &&
pool.closed !== true &&
pool.destroyed !== true
))
if (pool) {
this[kRemoveClient](pool)
}
return this
}
get upstreams () {
return this[kClients]
.filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)
.map((p) => p[kUrl].origin)
}
[kGetDispatcher] () {
// We validate that pools is greater than 0,
// otherwise we would have to wait until an upstream
// is added, which might never happen.
if (this[kClients].length === 0) {
throw new BalancedPoolMissingUpstreamError()
}
const dispatcher = this[kClients].find(dispatcher => (
!dispatcher[kNeedDrain] &&
dispatcher.closed !== true &&
dispatcher.destroyed !== true
))
if (!dispatcher) {
return
}
const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)
if (allClientsBusy) {
return
}
let counter = 0
let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])
while (counter++ < this[kClients].length) {
this[kIndex] = (this[kIndex] + 1) % this[kClients].length
const pool = this[kClients][this[kIndex]]
// find pool index with the largest weight
if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {
maxWeightIndex = this[kIndex]
}
// decrease the current weight every `this[kClients].length`.
if (this[kIndex] === 0) {
// Set the current weight to the next lower weight.
this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]
if (this[kCurrentWeight] <= 0) {
this[kCurrentWeight] = this[kMaxWeightPerServer]
}
}
if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {
return pool
}
}
this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]
this[kIndex] = maxWeightIndex
return this[kClients][maxWeightIndex]
}
}
module.exports = BalancedPool

1617
node_modules/undici/lib/dispatcher/client-h1.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

808
node_modules/undici/lib/dispatcher/client-h2.js generated vendored Normal file
View File

@@ -0,0 +1,808 @@
'use strict'
const assert = require('node:assert')
const { pipeline } = require('node:stream')
const util = require('../core/util.js')
const {
RequestContentLengthMismatchError,
RequestAbortedError,
SocketError,
InformationalError
} = require('../core/errors.js')
const {
kUrl,
kReset,
kClient,
kRunning,
kPending,
kQueue,
kPendingIdx,
kRunningIdx,
kError,
kSocket,
kStrictContentLength,
kOnError,
kMaxConcurrentStreams,
kHTTP2Session,
kResume,
kSize,
kHTTPContext,
kClosed,
kBodyTimeout
} = require('../core/symbols.js')
const { channels } = require('../core/diagnostics.js')
const kOpenStreams = Symbol('open streams')
let extractBody
/** @type {import('http2')} */
let http2
try {
http2 = require('node:http2')
} catch {
// @ts-ignore
http2 = { constants: {} }
}
const {
constants: {
HTTP2_HEADER_AUTHORITY,
HTTP2_HEADER_METHOD,
HTTP2_HEADER_PATH,
HTTP2_HEADER_SCHEME,
HTTP2_HEADER_CONTENT_LENGTH,
HTTP2_HEADER_EXPECT,
HTTP2_HEADER_STATUS
}
} = http2
function parseH2Headers (headers) {
const result = []
for (const [name, value] of Object.entries(headers)) {
// h2 may concat the header value by array
// e.g. Set-Cookie
if (Array.isArray(value)) {
for (const subvalue of value) {
// we need to provide each header value of header name
// because the headers handler expect name-value pair
result.push(Buffer.from(name), Buffer.from(subvalue))
}
} else {
result.push(Buffer.from(name), Buffer.from(value))
}
}
return result
}
function connectH2 (client, socket) {
client[kSocket] = socket
const session = http2.connect(client[kUrl], {
createConnection: () => socket,
peerMaxConcurrentStreams: client[kMaxConcurrentStreams],
settings: {
// TODO(metcoder95): add support for PUSH
enablePush: false
}
})
session[kOpenStreams] = 0
session[kClient] = client
session[kSocket] = socket
session[kHTTP2Session] = null
util.addListener(session, 'error', onHttp2SessionError)
util.addListener(session, 'frameError', onHttp2FrameError)
util.addListener(session, 'end', onHttp2SessionEnd)
util.addListener(session, 'goaway', onHttp2SessionGoAway)
util.addListener(session, 'close', onHttp2SessionClose)
session.unref()
client[kHTTP2Session] = session
socket[kHTTP2Session] = session
util.addListener(socket, 'error', onHttp2SocketError)
util.addListener(socket, 'end', onHttp2SocketEnd)
util.addListener(socket, 'close', onHttp2SocketClose)
socket[kClosed] = false
socket.on('close', onSocketClose)
return {
version: 'h2',
defaultPipelining: Infinity,
write (request) {
return writeH2(client, request)
},
resume () {
resumeH2(client)
},
destroy (err, callback) {
if (socket[kClosed]) {
queueMicrotask(callback)
} else {
socket.destroy(err).on('close', callback)
}
},
get destroyed () {
return socket.destroyed
},
busy () {
return false
}
}
}
function resumeH2 (client) {
const socket = client[kSocket]
if (socket?.destroyed === false) {
if (client[kSize] === 0 || client[kMaxConcurrentStreams] === 0) {
socket.unref()
client[kHTTP2Session].unref()
} else {
socket.ref()
client[kHTTP2Session].ref()
}
}
}
function onHttp2SessionError (err) {
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
this[kSocket][kError] = err
this[kClient][kOnError](err)
}
function onHttp2FrameError (type, code, id) {
if (id === 0) {
const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
this[kSocket][kError] = err
this[kClient][kOnError](err)
}
}
function onHttp2SessionEnd () {
const err = new SocketError('other side closed', util.getSocketInfo(this[kSocket]))
this.destroy(err)
util.destroy(this[kSocket], err)
}
/**
* This is the root cause of #3011
* We need to handle GOAWAY frames properly, and trigger the session close
* along with the socket right away
*
* @this {import('http2').ClientHttp2Session}
* @param {number} errorCode
*/
function onHttp2SessionGoAway (errorCode) {
// TODO(mcollina): Verify if GOAWAY implements the spec correctly:
// https://datatracker.ietf.org/doc/html/rfc7540#section-6.8
// Specifically, we do not verify the "valid" stream id.
const err = this[kError] || new SocketError(`HTTP/2: "GOAWAY" frame received with code ${errorCode}`, util.getSocketInfo(this[kSocket]))
const client = this[kClient]
client[kSocket] = null
client[kHTTPContext] = null
// this is an HTTP2 session
this.close()
this[kHTTP2Session] = null
util.destroy(this[kSocket], err)
// Fail head of pipeline.
if (client[kRunningIdx] < client[kQueue].length) {
const request = client[kQueue][client[kRunningIdx]]
client[kQueue][client[kRunningIdx]++] = null
util.errorRequest(client, request, err)
client[kPendingIdx] = client[kRunningIdx]
}
assert(client[kRunning] === 0)
client.emit('disconnect', client[kUrl], [client], err)
client.emit('connectionError', client[kUrl], [client], err)
client[kResume]()
}
function onHttp2SessionClose () {
const { [kClient]: client } = this
const { [kSocket]: socket } = client
const err = this[kSocket][kError] || this[kError] || new SocketError('closed', util.getSocketInfo(socket))
client[kSocket] = null
client[kHTTPContext] = null
if (client.destroyed) {
assert(client[kPending] === 0)
// Fail entire queue.
const requests = client[kQueue].splice(client[kRunningIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(client, request, err)
}
}
}
function onHttp2SocketClose () {
const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
const client = this[kHTTP2Session][kClient]
client[kSocket] = null
client[kHTTPContext] = null
if (this[kHTTP2Session] !== null) {
this[kHTTP2Session].destroy(err)
}
client[kPendingIdx] = client[kRunningIdx]
assert(client[kRunning] === 0)
client.emit('disconnect', client[kUrl], [client], err)
client[kResume]()
}
function onHttp2SocketError (err) {
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
this[kError] = err
this[kClient][kOnError](err)
}
function onHttp2SocketEnd () {
util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
}
function onSocketClose () {
this[kClosed] = true
}
// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
function shouldSendContentLength (method) {
return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
}
function writeH2 (client, request) {
const requestTimeout = request.bodyTimeout ?? client[kBodyTimeout]
const session = client[kHTTP2Session]
const { method, path, host, upgrade, expectContinue, signal, protocol, headers: reqHeaders } = request
let { body } = request
if (upgrade) {
util.errorRequest(client, request, new Error('Upgrade not supported for H2'))
return false
}
const headers = {}
for (let n = 0; n < reqHeaders.length; n += 2) {
const key = reqHeaders[n + 0]
const val = reqHeaders[n + 1]
if (key === 'cookie') {
if (headers[key] != null) {
headers[key] = Array.isArray(headers[key]) ? (headers[key].push(val), headers[key]) : [headers[key], val]
} else {
headers[key] = val
}
continue
}
if (Array.isArray(val)) {
for (let i = 0; i < val.length; i++) {
if (headers[key]) {
headers[key] += `, ${val[i]}`
} else {
headers[key] = val[i]
}
}
} else if (headers[key]) {
headers[key] += `, ${val}`
} else {
headers[key] = val
}
}
/** @type {import('node:http2').ClientHttp2Stream} */
let stream = null
const { hostname, port } = client[kUrl]
headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ''}`
headers[HTTP2_HEADER_METHOD] = method
const abort = (err) => {
if (request.aborted || request.completed) {
return
}
err = err || new RequestAbortedError()
util.errorRequest(client, request, err)
if (stream != null) {
// Some chunks might still come after abort,
// let's ignore them
stream.removeAllListeners('data')
// On Abort, we close the stream to send RST_STREAM frame
stream.close()
// We move the running index to the next request
client[kOnError](err)
client[kResume]()
}
// We do not destroy the socket as we can continue using the session
// the stream gets destroyed and the session remains to create new streams
util.destroy(body, err)
}
try {
// We are already connected, streams are pending.
// We can call on connect, and wait for abort
request.onConnect(abort)
} catch (err) {
util.errorRequest(client, request, err)
}
if (request.aborted) {
return false
}
if (method === 'CONNECT') {
session.ref()
// We are already connected, streams are pending, first request
// will create a new stream. We trigger a request to create the stream and wait until
// `ready` event is triggered
// We disabled endStream to allow the user to write to the stream
stream = session.request(headers, { endStream: false, signal })
if (!stream.pending) {
request.onUpgrade(null, null, stream)
++session[kOpenStreams]
client[kQueue][client[kRunningIdx]++] = null
} else {
stream.once('ready', () => {
request.onUpgrade(null, null, stream)
++session[kOpenStreams]
client[kQueue][client[kRunningIdx]++] = null
})
}
stream.once('close', () => {
session[kOpenStreams] -= 1
if (session[kOpenStreams] === 0) session.unref()
})
stream.setTimeout(requestTimeout)
return true
}
// https://tools.ietf.org/html/rfc7540#section-8.3
// :path and :scheme headers must be omitted when sending CONNECT
headers[HTTP2_HEADER_PATH] = path
headers[HTTP2_HEADER_SCHEME] = protocol === 'http:' ? 'http' : 'https'
// https://tools.ietf.org/html/rfc7231#section-4.3.1
// https://tools.ietf.org/html/rfc7231#section-4.3.2
// https://tools.ietf.org/html/rfc7231#section-4.3.5
// Sending a payload body on a request that does not
// expect it can cause undefined behavior on some
// servers and corrupt connection state. Do not
// re-use the connection for further requests.
const expectsPayload = (
method === 'PUT' ||
method === 'POST' ||
method === 'PATCH'
)
if (body && typeof body.read === 'function') {
// Try to read EOF in order to get length.
body.read(0)
}
let contentLength = util.bodyLength(body)
if (util.isFormDataLike(body)) {
extractBody ??= require('../web/fetch/body.js').extractBody
const [bodyStream, contentType] = extractBody(body)
headers['content-type'] = contentType
body = bodyStream.stream
contentLength = bodyStream.length
}
if (contentLength == null) {
contentLength = request.contentLength
}
if (contentLength === 0 || !expectsPayload) {
// https://tools.ietf.org/html/rfc7230#section-3.3.2
// A user agent SHOULD NOT send a Content-Length header field when
// the request message does not contain a payload body and the method
// semantics do not anticipate such a body.
contentLength = null
}
// https://github.com/nodejs/undici/issues/2046
// A user agent may send a Content-Length header with 0 value, this should be allowed.
if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
if (client[kStrictContentLength]) {
util.errorRequest(client, request, new RequestContentLengthMismatchError())
return false
}
process.emitWarning(new RequestContentLengthMismatchError())
}
if (contentLength != null) {
assert(body, 'no body must not have content length')
headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
}
session.ref()
if (channels.sendHeaders.hasSubscribers) {
let header = ''
for (const key in headers) {
header += `${key}: ${headers[key]}\r\n`
}
channels.sendHeaders.publish({ request, headers: header, socket: session[kSocket] })
}
// TODO(metcoder95): add support for sending trailers
const shouldEndStream = method === 'GET' || method === 'HEAD' || body === null
if (expectContinue) {
headers[HTTP2_HEADER_EXPECT] = '100-continue'
stream = session.request(headers, { endStream: shouldEndStream, signal })
stream.once('continue', writeBodyH2)
} else {
stream = session.request(headers, {
endStream: shouldEndStream,
signal
})
writeBodyH2()
}
// Increment counter as we have new streams open
++session[kOpenStreams]
stream.setTimeout(requestTimeout)
stream.once('response', headers => {
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
request.onResponseStarted()
// Due to the stream nature, it is possible we face a race condition
// where the stream has been assigned, but the request has been aborted
// the request remains in-flight and headers hasn't been received yet
// for those scenarios, best effort is to destroy the stream immediately
// as there's no value to keep it open.
if (request.aborted) {
stream.removeAllListeners('data')
return
}
if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), '') === false) {
stream.pause()
}
})
stream.on('data', (chunk) => {
if (request.onData(chunk) === false) {
stream.pause()
}
})
stream.once('end', (err) => {
stream.removeAllListeners('data')
// When state is null, it means we haven't consumed body and the stream still do not have
// a state.
// Present specially when using pipeline or stream
if (stream.state?.state == null || stream.state.state < 6) {
// Do not complete the request if it was aborted
// Not prone to happen for as safety net to avoid race conditions with 'trailers'
if (!request.aborted && !request.completed) {
request.onComplete({})
}
client[kQueue][client[kRunningIdx]++] = null
client[kResume]()
} else {
// Stream is closed or half-closed-remote (6), decrement counter and cleanup
// It does not have sense to continue working with the stream as we do not
// have yet RST_STREAM support on client-side
--session[kOpenStreams]
if (session[kOpenStreams] === 0) {
session.unref()
}
abort(err ?? new InformationalError('HTTP/2: stream half-closed (remote)'))
client[kQueue][client[kRunningIdx]++] = null
client[kPendingIdx] = client[kRunningIdx]
client[kResume]()
}
})
stream.once('close', () => {
stream.removeAllListeners('data')
session[kOpenStreams] -= 1
if (session[kOpenStreams] === 0) {
session.unref()
}
})
stream.once('error', function (err) {
stream.removeAllListeners('data')
abort(err)
})
stream.once('frameError', (type, code) => {
stream.removeAllListeners('data')
abort(new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`))
})
stream.on('aborted', () => {
stream.removeAllListeners('data')
})
stream.on('timeout', () => {
const err = new InformationalError(`HTTP/2: "stream timeout after ${requestTimeout}"`)
stream.removeAllListeners('data')
session[kOpenStreams] -= 1
if (session[kOpenStreams] === 0) {
session.unref()
}
abort(err)
})
stream.once('trailers', trailers => {
if (request.aborted || request.completed) {
return
}
request.onComplete(trailers)
})
return true
function writeBodyH2 () {
/* istanbul ignore else: assertion */
if (!body || contentLength === 0) {
writeBuffer(
abort,
stream,
null,
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
} else if (util.isBuffer(body)) {
writeBuffer(
abort,
stream,
body,
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
} else if (util.isBlobLike(body)) {
if (typeof body.stream === 'function') {
writeIterable(
abort,
stream,
body.stream(),
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
} else {
writeBlob(
abort,
stream,
body,
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
}
} else if (util.isStream(body)) {
writeStream(
abort,
client[kSocket],
expectsPayload,
stream,
body,
client,
request,
contentLength
)
} else if (util.isIterable(body)) {
writeIterable(
abort,
stream,
body,
client,
request,
client[kSocket],
contentLength,
expectsPayload
)
} else {
assert(false)
}
}
}
function writeBuffer (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
try {
if (body != null && util.isBuffer(body)) {
assert(contentLength === body.byteLength, 'buffer body must have content length')
h2stream.cork()
h2stream.write(body)
h2stream.uncork()
h2stream.end()
request.onBodySent(body)
}
if (!expectsPayload) {
socket[kReset] = true
}
request.onRequestSent()
client[kResume]()
} catch (error) {
abort(error)
}
}
function writeStream (abort, socket, expectsPayload, h2stream, body, client, request, contentLength) {
assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
// For HTTP/2, is enough to pipe the stream
const pipe = pipeline(
body,
h2stream,
(err) => {
if (err) {
util.destroy(pipe, err)
abort(err)
} else {
util.removeAllListeners(pipe)
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
}
}
)
util.addListener(pipe, 'data', onPipeData)
function onPipeData (chunk) {
request.onBodySent(chunk)
}
}
async function writeBlob (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
assert(contentLength === body.size, 'blob body must have content length')
try {
if (contentLength != null && contentLength !== body.size) {
throw new RequestContentLengthMismatchError()
}
const buffer = Buffer.from(await body.arrayBuffer())
h2stream.cork()
h2stream.write(buffer)
h2stream.uncork()
h2stream.end()
request.onBodySent(buffer)
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
} catch (err) {
abort(err)
}
}
async function writeIterable (abort, h2stream, body, client, request, socket, contentLength, expectsPayload) {
assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
let callback = null
function onDrain () {
if (callback) {
const cb = callback
callback = null
cb()
}
}
const waitForDrain = () => new Promise((resolve, reject) => {
assert(callback === null)
if (socket[kError]) {
reject(socket[kError])
} else {
callback = resolve
}
})
h2stream
.on('close', onDrain)
.on('drain', onDrain)
try {
// It's up to the user to somehow abort the async iterable.
for await (const chunk of body) {
if (socket[kError]) {
throw socket[kError]
}
const res = h2stream.write(chunk)
request.onBodySent(chunk)
if (!res) {
await waitForDrain()
}
}
h2stream.end()
request.onRequestSent()
if (!expectsPayload) {
socket[kReset] = true
}
client[kResume]()
} catch (err) {
abort(err)
} finally {
h2stream
.off('close', onDrain)
.off('drain', onDrain)
}
}
module.exports = connectH2

614
node_modules/undici/lib/dispatcher/client.js generated vendored Normal file
View File

@@ -0,0 +1,614 @@
'use strict'
const assert = require('node:assert')
const net = require('node:net')
const http = require('node:http')
const util = require('../core/util.js')
const { ClientStats } = require('../util/stats.js')
const { channels } = require('../core/diagnostics.js')
const Request = require('../core/request.js')
const DispatcherBase = require('./dispatcher-base')
const {
InvalidArgumentError,
InformationalError,
ClientDestroyedError
} = require('../core/errors.js')
const buildConnector = require('../core/connect.js')
const {
kUrl,
kServerName,
kClient,
kBusy,
kConnect,
kResuming,
kRunning,
kPending,
kSize,
kQueue,
kConnected,
kConnecting,
kNeedDrain,
kKeepAliveDefaultTimeout,
kHostHeader,
kPendingIdx,
kRunningIdx,
kError,
kPipelining,
kKeepAliveTimeoutValue,
kMaxHeadersSize,
kKeepAliveMaxTimeout,
kKeepAliveTimeoutThreshold,
kHeadersTimeout,
kBodyTimeout,
kStrictContentLength,
kConnector,
kMaxRequests,
kCounter,
kClose,
kDestroy,
kDispatch,
kLocalAddress,
kMaxResponseSize,
kOnError,
kHTTPContext,
kMaxConcurrentStreams,
kResume
} = require('../core/symbols.js')
const connectH1 = require('./client-h1.js')
const connectH2 = require('./client-h2.js')
const kClosedResolve = Symbol('kClosedResolve')
const getDefaultNodeMaxHeaderSize = http &&
http.maxHeaderSize &&
Number.isInteger(http.maxHeaderSize) &&
http.maxHeaderSize > 0
? () => http.maxHeaderSize
: () => { throw new InvalidArgumentError('http module not available or http.maxHeaderSize invalid') }
const noop = () => {}
function getPipelining (client) {
return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1
}
/**
* @type {import('../../types/client.js').default}
*/
class Client extends DispatcherBase {
/**
*
* @param {string|URL} url
* @param {import('../../types/client.js').Client.Options} options
*/
constructor (url, {
maxHeaderSize,
headersTimeout,
socketTimeout,
requestTimeout,
connectTimeout,
bodyTimeout,
idleTimeout,
keepAlive,
keepAliveTimeout,
maxKeepAliveTimeout,
keepAliveMaxTimeout,
keepAliveTimeoutThreshold,
socketPath,
pipelining,
tls,
strictContentLength,
maxCachedSessions,
connect,
maxRequestsPerClient,
localAddress,
maxResponseSize,
autoSelectFamily,
autoSelectFamilyAttemptTimeout,
// h2
maxConcurrentStreams,
allowH2
} = {}) {
if (keepAlive !== undefined) {
throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
}
if (socketTimeout !== undefined) {
throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
}
if (requestTimeout !== undefined) {
throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
}
if (idleTimeout !== undefined) {
throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
}
if (maxKeepAliveTimeout !== undefined) {
throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
}
if (maxHeaderSize != null) {
if (!Number.isInteger(maxHeaderSize) || maxHeaderSize < 1) {
throw new InvalidArgumentError('invalid maxHeaderSize')
}
} else {
// If maxHeaderSize is not provided, use the default value from the http module
// or if that is not available, throw an error.
maxHeaderSize = getDefaultNodeMaxHeaderSize()
}
if (socketPath != null && typeof socketPath !== 'string') {
throw new InvalidArgumentError('invalid socketPath')
}
if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
throw new InvalidArgumentError('invalid connectTimeout')
}
if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
throw new InvalidArgumentError('invalid keepAliveTimeout')
}
if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
}
if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
}
if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
}
if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
}
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
throw new InvalidArgumentError('connect must be a function or an object')
}
if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
}
if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
throw new InvalidArgumentError('localAddress must be valid string IP address')
}
if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
throw new InvalidArgumentError('maxResponseSize must be a positive number')
}
if (
autoSelectFamilyAttemptTimeout != null &&
(!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
) {
throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
}
// h2
if (allowH2 != null && typeof allowH2 !== 'boolean') {
throw new InvalidArgumentError('allowH2 must be a valid boolean value')
}
if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
throw new InvalidArgumentError('maxConcurrentStreams must be a positive integer, greater than 0')
}
super()
if (typeof connect !== 'function') {
connect = buildConnector({
...tls,
maxCachedSessions,
allowH2,
socketPath,
timeout: connectTimeout,
...(typeof autoSelectFamily === 'boolean' ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...connect
})
}
this[kUrl] = util.parseOrigin(url)
this[kConnector] = connect
this[kPipelining] = pipelining != null ? pipelining : 1
this[kMaxHeadersSize] = maxHeaderSize
this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 2e3 : keepAliveTimeoutThreshold
this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
this[kServerName] = null
this[kLocalAddress] = localAddress != null ? localAddress : null
this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
this[kMaxRequests] = maxRequestsPerClient
this[kClosedResolve] = null
this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
this[kMaxConcurrentStreams] = maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
this[kHTTPContext] = null
// kQueue is built up of 3 sections separated by
// the kRunningIdx and kPendingIdx indices.
// | complete | running | pending |
// ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
// kRunningIdx points to the first running element.
// kPendingIdx points to the first pending element.
// This implements a fast queue with an amortized
// time of O(1).
this[kQueue] = []
this[kRunningIdx] = 0
this[kPendingIdx] = 0
this[kResume] = (sync) => resume(this, sync)
this[kOnError] = (err) => onError(this, err)
}
get pipelining () {
return this[kPipelining]
}
set pipelining (value) {
this[kPipelining] = value
this[kResume](true)
}
get stats () {
return new ClientStats(this)
}
get [kPending] () {
return this[kQueue].length - this[kPendingIdx]
}
get [kRunning] () {
return this[kPendingIdx] - this[kRunningIdx]
}
get [kSize] () {
return this[kQueue].length - this[kRunningIdx]
}
get [kConnected] () {
return !!this[kHTTPContext] && !this[kConnecting] && !this[kHTTPContext].destroyed
}
get [kBusy] () {
return Boolean(
this[kHTTPContext]?.busy(null) ||
(this[kSize] >= (getPipelining(this) || 1)) ||
this[kPending] > 0
)
}
/* istanbul ignore: only used for test */
[kConnect] (cb) {
connect(this)
this.once('connect', cb)
}
[kDispatch] (opts, handler) {
const request = new Request(this[kUrl].origin, opts, handler)
this[kQueue].push(request)
if (this[kResuming]) {
// Do nothing.
} else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
// Wait a tick in case stream/iterator is ended in the same tick.
this[kResuming] = 1
queueMicrotask(() => resume(this))
} else {
this[kResume](true)
}
if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
this[kNeedDrain] = 2
}
return this[kNeedDrain] < 2
}
[kClose] () {
// TODO: for H2 we need to gracefully flush the remaining enqueued
// request and close each stream.
return new Promise((resolve) => {
if (this[kSize]) {
this[kClosedResolve] = resolve
} else {
resolve(null)
}
})
}
[kDestroy] (err) {
return new Promise((resolve) => {
const requests = this[kQueue].splice(this[kPendingIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(this, request, err)
}
const callback = () => {
if (this[kClosedResolve]) {
// TODO (fix): Should we error here with ClientDestroyedError?
this[kClosedResolve]()
this[kClosedResolve] = null
}
resolve(null)
}
if (this[kHTTPContext]) {
this[kHTTPContext].destroy(err, callback)
this[kHTTPContext] = null
} else {
queueMicrotask(callback)
}
this[kResume]()
})
}
}
function onError (client, err) {
if (
client[kRunning] === 0 &&
err.code !== 'UND_ERR_INFO' &&
err.code !== 'UND_ERR_SOCKET'
) {
// Error is not caused by running request and not a recoverable
// socket error.
assert(client[kPendingIdx] === client[kRunningIdx])
const requests = client[kQueue].splice(client[kRunningIdx])
for (let i = 0; i < requests.length; i++) {
const request = requests[i]
util.errorRequest(client, request, err)
}
assert(client[kSize] === 0)
}
}
/**
* @param {Client} client
* @returns {void}
*/
function connect (client) {
assert(!client[kConnecting])
assert(!client[kHTTPContext])
let { host, hostname, protocol, port } = client[kUrl]
// Resolve ipv6
if (hostname[0] === '[') {
const idx = hostname.indexOf(']')
assert(idx !== -1)
const ip = hostname.substring(1, idx)
assert(net.isIPv6(ip))
hostname = ip
}
client[kConnecting] = true
if (channels.beforeConnect.hasSubscribers) {
channels.beforeConnect.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector]
})
}
client[kConnector]({
host,
hostname,
protocol,
port,
servername: client[kServerName],
localAddress: client[kLocalAddress]
}, (err, socket) => {
if (err) {
handleConnectError(client, err, { host, hostname, protocol, port })
client[kResume]()
return
}
if (client.destroyed) {
util.destroy(socket.on('error', noop), new ClientDestroyedError())
client[kResume]()
return
}
assert(socket)
try {
client[kHTTPContext] = socket.alpnProtocol === 'h2'
? connectH2(client, socket)
: connectH1(client, socket)
} catch (err) {
socket.destroy().on('error', noop)
handleConnectError(client, err, { host, hostname, protocol, port })
client[kResume]()
return
}
client[kConnecting] = false
socket[kCounter] = 0
socket[kMaxRequests] = client[kMaxRequests]
socket[kClient] = client
socket[kError] = null
if (channels.connected.hasSubscribers) {
channels.connected.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector],
socket
})
}
client.emit('connect', client[kUrl], [client])
client[kResume]()
})
}
function handleConnectError (client, err, { host, hostname, protocol, port }) {
if (client.destroyed) {
return
}
client[kConnecting] = false
if (channels.connectError.hasSubscribers) {
channels.connectError.publish({
connectParams: {
host,
hostname,
protocol,
port,
version: client[kHTTPContext]?.version,
servername: client[kServerName],
localAddress: client[kLocalAddress]
},
connector: client[kConnector],
error: err
})
}
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
assert(client[kRunning] === 0)
while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
const request = client[kQueue][client[kPendingIdx]++]
util.errorRequest(client, request, err)
}
} else {
onError(client, err)
}
client.emit('connectionError', client[kUrl], [client], err)
}
function emitDrain (client) {
client[kNeedDrain] = 0
client.emit('drain', client[kUrl], [client])
}
function resume (client, sync) {
if (client[kResuming] === 2) {
return
}
client[kResuming] = 2
_resume(client, sync)
client[kResuming] = 0
if (client[kRunningIdx] > 256) {
client[kQueue].splice(0, client[kRunningIdx])
client[kPendingIdx] -= client[kRunningIdx]
client[kRunningIdx] = 0
}
}
function _resume (client, sync) {
while (true) {
if (client.destroyed) {
assert(client[kPending] === 0)
return
}
if (client[kClosedResolve] && !client[kSize]) {
client[kClosedResolve]()
client[kClosedResolve] = null
return
}
if (client[kHTTPContext]) {
client[kHTTPContext].resume()
}
if (client[kBusy]) {
client[kNeedDrain] = 2
} else if (client[kNeedDrain] === 2) {
if (sync) {
client[kNeedDrain] = 1
queueMicrotask(() => emitDrain(client))
} else {
emitDrain(client)
}
continue
}
if (client[kPending] === 0) {
return
}
if (client[kRunning] >= (getPipelining(client) || 1)) {
return
}
const request = client[kQueue][client[kPendingIdx]]
if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
if (client[kRunning] > 0) {
return
}
client[kServerName] = request.servername
client[kHTTPContext]?.destroy(new InformationalError('servername changed'), () => {
client[kHTTPContext] = null
resume(client)
})
}
if (client[kConnecting]) {
return
}
if (!client[kHTTPContext]) {
connect(client)
return
}
if (client[kHTTPContext].destroyed) {
return
}
if (client[kHTTPContext].busy(request)) {
return
}
if (!request.aborted && client[kHTTPContext].write(request)) {
client[kPendingIdx]++
} else {
client[kQueue].splice(client[kPendingIdx], 1)
}
}
}
module.exports = Client

166
node_modules/undici/lib/dispatcher/dispatcher-base.js generated vendored Normal file
View File

@@ -0,0 +1,166 @@
'use strict'
const Dispatcher = require('./dispatcher')
const UnwrapHandler = require('../handler/unwrap-handler')
const {
ClientDestroyedError,
ClientClosedError,
InvalidArgumentError
} = require('../core/errors')
const { kDestroy, kClose, kClosed, kDestroyed, kDispatch } = require('../core/symbols')
const kOnDestroyed = Symbol('onDestroyed')
const kOnClosed = Symbol('onClosed')
class DispatcherBase extends Dispatcher {
/** @type {boolean} */
[kDestroyed] = false;
/** @type {Array|null} */
[kOnDestroyed] = null;
/** @type {boolean} */
[kClosed] = false;
/** @type {Array} */
[kOnClosed] = []
/** @returns {boolean} */
get destroyed () {
return this[kDestroyed]
}
/** @returns {boolean} */
get closed () {
return this[kClosed]
}
close (callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {
this.close((err, data) => {
return err ? reject(err) : resolve(data)
})
})
}
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (this[kDestroyed]) {
queueMicrotask(() => callback(new ClientDestroyedError(), null))
return
}
if (this[kClosed]) {
if (this[kOnClosed]) {
this[kOnClosed].push(callback)
} else {
queueMicrotask(() => callback(null, null))
}
return
}
this[kClosed] = true
this[kOnClosed].push(callback)
const onClosed = () => {
const callbacks = this[kOnClosed]
this[kOnClosed] = null
for (let i = 0; i < callbacks.length; i++) {
callbacks[i](null, null)
}
}
// Should not error.
this[kClose]()
.then(() => this.destroy())
.then(() => {
queueMicrotask(onClosed)
})
}
destroy (err, callback) {
if (typeof err === 'function') {
callback = err
err = null
}
if (callback === undefined) {
return new Promise((resolve, reject) => {
this.destroy(err, (err, data) => {
return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)
})
})
}
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (this[kDestroyed]) {
if (this[kOnDestroyed]) {
this[kOnDestroyed].push(callback)
} else {
queueMicrotask(() => callback(null, null))
}
return
}
if (!err) {
err = new ClientDestroyedError()
}
this[kDestroyed] = true
this[kOnDestroyed] = this[kOnDestroyed] || []
this[kOnDestroyed].push(callback)
const onDestroyed = () => {
const callbacks = this[kOnDestroyed]
this[kOnDestroyed] = null
for (let i = 0; i < callbacks.length; i++) {
callbacks[i](null, null)
}
}
// Should not error.
this[kDestroy](err).then(() => {
queueMicrotask(onDestroyed)
})
}
dispatch (opts, handler) {
if (!handler || typeof handler !== 'object') {
throw new InvalidArgumentError('handler must be an object')
}
handler = UnwrapHandler.unwrap(handler)
try {
if (!opts || typeof opts !== 'object') {
throw new InvalidArgumentError('opts must be an object.')
}
if (this[kDestroyed] || this[kOnDestroyed]) {
throw new ClientDestroyedError()
}
if (this[kClosed]) {
throw new ClientClosedError()
}
return this[kDispatch](opts, handler)
} catch (err) {
if (typeof handler.onError !== 'function') {
throw err
}
handler.onError(err)
return false
}
}
}
module.exports = DispatcherBase

48
node_modules/undici/lib/dispatcher/dispatcher.js generated vendored Normal file
View File

@@ -0,0 +1,48 @@
'use strict'
const EventEmitter = require('node:events')
const WrapHandler = require('../handler/wrap-handler')
const wrapInterceptor = (dispatch) => (opts, handler) => dispatch(opts, WrapHandler.wrap(handler))
class Dispatcher extends EventEmitter {
dispatch () {
throw new Error('not implemented')
}
close () {
throw new Error('not implemented')
}
destroy () {
throw new Error('not implemented')
}
compose (...args) {
// So we handle [interceptor1, interceptor2] or interceptor1, interceptor2, ...
const interceptors = Array.isArray(args[0]) ? args[0] : args
let dispatch = this.dispatch.bind(this)
for (const interceptor of interceptors) {
if (interceptor == null) {
continue
}
if (typeof interceptor !== 'function') {
throw new TypeError(`invalid interceptor, expected function received ${typeof interceptor}`)
}
dispatch = interceptor(dispatch)
dispatch = wrapInterceptor(dispatch)
if (dispatch == null || typeof dispatch !== 'function' || dispatch.length !== 2) {
throw new TypeError('invalid interceptor')
}
}
return new Proxy(this, {
get: (target, key) => key === 'dispatch' ? dispatch : target[key]
})
}
}
module.exports = Dispatcher

View File

@@ -0,0 +1,147 @@
'use strict'
const DispatcherBase = require('./dispatcher-base')
const { kClose, kDestroy, kClosed, kDestroyed, kDispatch, kNoProxyAgent, kHttpProxyAgent, kHttpsProxyAgent } = require('../core/symbols')
const ProxyAgent = require('./proxy-agent')
const Agent = require('./agent')
const DEFAULT_PORTS = {
'http:': 80,
'https:': 443
}
class EnvHttpProxyAgent extends DispatcherBase {
#noProxyValue = null
#noProxyEntries = null
#opts = null
constructor (opts = {}) {
super()
this.#opts = opts
const { httpProxy, httpsProxy, noProxy, ...agentOpts } = opts
this[kNoProxyAgent] = new Agent(agentOpts)
const HTTP_PROXY = httpProxy ?? process.env.http_proxy ?? process.env.HTTP_PROXY
if (HTTP_PROXY) {
this[kHttpProxyAgent] = new ProxyAgent({ ...agentOpts, uri: HTTP_PROXY })
} else {
this[kHttpProxyAgent] = this[kNoProxyAgent]
}
const HTTPS_PROXY = httpsProxy ?? process.env.https_proxy ?? process.env.HTTPS_PROXY
if (HTTPS_PROXY) {
this[kHttpsProxyAgent] = new ProxyAgent({ ...agentOpts, uri: HTTPS_PROXY })
} else {
this[kHttpsProxyAgent] = this[kHttpProxyAgent]
}
this.#parseNoProxy()
}
[kDispatch] (opts, handler) {
const url = new URL(opts.origin)
const agent = this.#getProxyAgentForUrl(url)
return agent.dispatch(opts, handler)
}
[kClose] () {
return Promise.all([
this[kNoProxyAgent].close(),
!this[kHttpProxyAgent][kClosed] && this[kHttpProxyAgent].close(),
!this[kHttpsProxyAgent][kClosed] && this[kHttpsProxyAgent].close()
])
}
[kDestroy] (err) {
return Promise.all([
this[kNoProxyAgent].destroy(err),
!this[kHttpProxyAgent][kDestroyed] && this[kHttpProxyAgent].destroy(err),
!this[kHttpsProxyAgent][kDestroyed] && this[kHttpsProxyAgent].destroy(err)
])
}
#getProxyAgentForUrl (url) {
let { protocol, host: hostname, port } = url
// Stripping ports in this way instead of using parsedUrl.hostname to make
// sure that the brackets around IPv6 addresses are kept.
hostname = hostname.replace(/:\d*$/, '').toLowerCase()
port = Number.parseInt(port, 10) || DEFAULT_PORTS[protocol] || 0
if (!this.#shouldProxy(hostname, port)) {
return this[kNoProxyAgent]
}
if (protocol === 'https:') {
return this[kHttpsProxyAgent]
}
return this[kHttpProxyAgent]
}
#shouldProxy (hostname, port) {
if (this.#noProxyChanged) {
this.#parseNoProxy()
}
if (this.#noProxyEntries.length === 0) {
return true // Always proxy if NO_PROXY is not set or empty.
}
if (this.#noProxyValue === '*') {
return false // Never proxy if wildcard is set.
}
for (let i = 0; i < this.#noProxyEntries.length; i++) {
const entry = this.#noProxyEntries[i]
if (entry.port && entry.port !== port) {
continue // Skip if ports don't match.
}
if (!/^[.*]/.test(entry.hostname)) {
// No wildcards, so don't proxy only if there is not an exact match.
if (hostname === entry.hostname) {
return false
}
} else {
// Don't proxy if the hostname ends with the no_proxy host.
if (hostname.endsWith(entry.hostname.replace(/^\*/, ''))) {
return false
}
}
}
return true
}
#parseNoProxy () {
const noProxyValue = this.#opts.noProxy ?? this.#noProxyEnv
const noProxySplit = noProxyValue.split(/[,\s]/)
const noProxyEntries = []
for (let i = 0; i < noProxySplit.length; i++) {
const entry = noProxySplit[i]
if (!entry) {
continue
}
const parsed = entry.match(/^(.+):(\d+)$/)
noProxyEntries.push({
hostname: (parsed ? parsed[1] : entry).toLowerCase(),
port: parsed ? Number.parseInt(parsed[2], 10) : 0
})
}
this.#noProxyValue = noProxyValue
this.#noProxyEntries = noProxyEntries
}
get #noProxyChanged () {
if (this.#opts.noProxy !== undefined) {
return false
}
return this.#noProxyValue !== this.#noProxyEnv
}
get #noProxyEnv () {
return process.env.no_proxy ?? process.env.NO_PROXY ?? ''
}
}
module.exports = EnvHttpProxyAgent

135
node_modules/undici/lib/dispatcher/fixed-queue.js generated vendored Normal file
View File

@@ -0,0 +1,135 @@
'use strict'
// Extracted from node/lib/internal/fixed_queue.js
// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
const kSize = 2048
const kMask = kSize - 1
// The FixedQueue is implemented as a singly-linked list of fixed-size
// circular buffers. It looks something like this:
//
// head tail
// | |
// v v
// +-----------+ <-----\ +-----------+ <------\ +-----------+
// | [null] | \----- | next | \------- | next |
// +-----------+ +-----------+ +-----------+
// | item | <-- bottom | item | <-- bottom | undefined |
// | item | | item | | undefined |
// | item | | item | | undefined |
// | item | | item | | undefined |
// | item | | item | bottom --> | item |
// | item | | item | | item |
// | ... | | ... | | ... |
// | item | | item | | item |
// | item | | item | | item |
// | undefined | <-- top | item | | item |
// | undefined | | item | | item |
// | undefined | | undefined | <-- top top --> | undefined |
// +-----------+ +-----------+ +-----------+
//
// Or, if there is only one circular buffer, it looks something
// like either of these:
//
// head tail head tail
// | | | |
// v v v v
// +-----------+ +-----------+
// | [null] | | [null] |
// +-----------+ +-----------+
// | undefined | | item |
// | undefined | | item |
// | item | <-- bottom top --> | undefined |
// | item | | undefined |
// | undefined | <-- top bottom --> | item |
// | undefined | | item |
// +-----------+ +-----------+
//
// Adding a value means moving `top` forward by one, removing means
// moving `bottom` forward by one. After reaching the end, the queue
// wraps around.
//
// When `top === bottom` the current queue is empty and when
// `top + 1 === bottom` it's full. This wastes a single space of storage
// but allows much quicker checks.
/**
* @type {FixedCircularBuffer}
* @template T
*/
class FixedCircularBuffer {
/** @type {number} */
bottom = 0
/** @type {number} */
top = 0
/** @type {Array<T|undefined>} */
list = new Array(kSize).fill(undefined)
/** @type {T|null} */
next = null
/** @returns {boolean} */
isEmpty () {
return this.top === this.bottom
}
/** @returns {boolean} */
isFull () {
return ((this.top + 1) & kMask) === this.bottom
}
/**
* @param {T} data
* @returns {void}
*/
push (data) {
this.list[this.top] = data
this.top = (this.top + 1) & kMask
}
/** @returns {T|null} */
shift () {
const nextItem = this.list[this.bottom]
if (nextItem === undefined) { return null }
this.list[this.bottom] = undefined
this.bottom = (this.bottom + 1) & kMask
return nextItem
}
}
/**
* @template T
*/
module.exports = class FixedQueue {
constructor () {
/** @type {FixedCircularBuffer<T>} */
this.head = this.tail = new FixedCircularBuffer()
}
/** @returns {boolean} */
isEmpty () {
return this.head.isEmpty()
}
/** @param {T} data */
push (data) {
if (this.head.isFull()) {
// Head is full: Creates a new queue, sets the old queue's `.next` to it,
// and sets it as the new main queue.
this.head = this.head.next = new FixedCircularBuffer()
}
this.head.push(data)
}
/** @returns {T|null} */
shift () {
const tail = this.tail
const next = tail.shift()
if (tail.isEmpty() && tail.next !== null) {
// If there is another queue, it forms the new tail.
this.tail = tail.next
tail.next = null
}
return next
}
}

122
node_modules/undici/lib/dispatcher/h2c-client.js generated vendored Normal file
View File

@@ -0,0 +1,122 @@
'use strict'
const { connect } = require('node:net')
const { kClose, kDestroy } = require('../core/symbols')
const { InvalidArgumentError } = require('../core/errors')
const util = require('../core/util')
const Client = require('./client')
const DispatcherBase = require('./dispatcher-base')
class H2CClient extends DispatcherBase {
#client = null
constructor (origin, clientOpts) {
if (typeof origin === 'string') {
origin = new URL(origin)
}
if (origin.protocol !== 'http:') {
throw new InvalidArgumentError(
'h2c-client: Only h2c protocol is supported'
)
}
const { connect, maxConcurrentStreams, pipelining, ...opts } =
clientOpts ?? {}
let defaultMaxConcurrentStreams = 100
let defaultPipelining = 100
if (
maxConcurrentStreams != null &&
Number.isInteger(maxConcurrentStreams) &&
maxConcurrentStreams > 0
) {
defaultMaxConcurrentStreams = maxConcurrentStreams
}
if (pipelining != null && Number.isInteger(pipelining) && pipelining > 0) {
defaultPipelining = pipelining
}
if (defaultPipelining > defaultMaxConcurrentStreams) {
throw new InvalidArgumentError(
'h2c-client: pipelining cannot be greater than maxConcurrentStreams'
)
}
super()
this.#client = new Client(origin, {
...opts,
connect: this.#buildConnector(connect),
maxConcurrentStreams: defaultMaxConcurrentStreams,
pipelining: defaultPipelining,
allowH2: true
})
}
#buildConnector (connectOpts) {
return (opts, callback) => {
const timeout = connectOpts?.connectOpts ?? 10e3
const { hostname, port, pathname } = opts
const socket = connect({
...opts,
host: hostname,
port,
pathname
})
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
if (opts.keepAlive == null || opts.keepAlive) {
const keepAliveInitialDelay =
opts.keepAliveInitialDelay == null ? 60e3 : opts.keepAliveInitialDelay
socket.setKeepAlive(true, keepAliveInitialDelay)
}
socket.alpnProtocol = 'h2'
const clearConnectTimeout = util.setupConnectTimeout(
new WeakRef(socket),
{ timeout, hostname, port }
)
socket
.setNoDelay(true)
.once('connect', function () {
queueMicrotask(clearConnectTimeout)
if (callback) {
const cb = callback
callback = null
cb(null, this)
}
})
.on('error', function (err) {
queueMicrotask(clearConnectTimeout)
if (callback) {
const cb = callback
callback = null
cb(err)
}
})
return socket
}
}
dispatch (opts, handler) {
return this.#client.dispatch(opts, handler)
}
[kClose] () {
return this.#client.close()
}
[kDestroy] () {
return this.#client.destroy()
}
}
module.exports = H2CClient

208
node_modules/undici/lib/dispatcher/pool-base.js generated vendored Normal file
View File

@@ -0,0 +1,208 @@
'use strict'
const { PoolStats } = require('../util/stats.js')
const DispatcherBase = require('./dispatcher-base')
const FixedQueue = require('./fixed-queue')
const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('../core/symbols')
const kClients = Symbol('clients')
const kNeedDrain = Symbol('needDrain')
const kQueue = Symbol('queue')
const kClosedResolve = Symbol('closed resolve')
const kOnDrain = Symbol('onDrain')
const kOnConnect = Symbol('onConnect')
const kOnDisconnect = Symbol('onDisconnect')
const kOnConnectionError = Symbol('onConnectionError')
const kGetDispatcher = Symbol('get dispatcher')
const kAddClient = Symbol('add client')
const kRemoveClient = Symbol('remove client')
class PoolBase extends DispatcherBase {
[kQueue] = new FixedQueue();
[kQueued] = 0;
[kClients] = [];
[kNeedDrain] = false;
[kOnDrain] (client, origin, targets) {
const queue = this[kQueue]
let needDrain = false
while (!needDrain) {
const item = queue.shift()
if (!item) {
break
}
this[kQueued]--
needDrain = !client.dispatch(item.opts, item.handler)
}
client[kNeedDrain] = needDrain
if (!needDrain && this[kNeedDrain]) {
this[kNeedDrain] = false
this.emit('drain', origin, [this, ...targets])
}
if (this[kClosedResolve] && queue.isEmpty()) {
const closeAll = new Array(this[kClients].length)
for (let i = 0; i < this[kClients].length; i++) {
closeAll[i] = this[kClients][i].close()
}
Promise.all(closeAll)
.then(this[kClosedResolve])
}
}
[kOnConnect] = (origin, targets) => {
this.emit('connect', origin, [this, ...targets])
};
[kOnDisconnect] = (origin, targets, err) => {
this.emit('disconnect', origin, [this, ...targets], err)
};
[kOnConnectionError] = (origin, targets, err) => {
this.emit('connectionError', origin, [this, ...targets], err)
}
get [kBusy] () {
return this[kNeedDrain]
}
get [kConnected] () {
let ret = 0
for (const { [kConnected]: connected } of this[kClients]) {
ret += connected
}
return ret
}
get [kFree] () {
let ret = 0
for (const { [kConnected]: connected, [kNeedDrain]: needDrain } of this[kClients]) {
ret += connected && !needDrain
}
return ret
}
get [kPending] () {
let ret = this[kQueued]
for (const { [kPending]: pending } of this[kClients]) {
ret += pending
}
return ret
}
get [kRunning] () {
let ret = 0
for (const { [kRunning]: running } of this[kClients]) {
ret += running
}
return ret
}
get [kSize] () {
let ret = this[kQueued]
for (const { [kSize]: size } of this[kClients]) {
ret += size
}
return ret
}
get stats () {
return new PoolStats(this)
}
[kClose] () {
if (this[kQueue].isEmpty()) {
const closeAll = new Array(this[kClients].length)
for (let i = 0; i < this[kClients].length; i++) {
closeAll[i] = this[kClients][i].close()
}
return Promise.all(closeAll)
} else {
return new Promise((resolve) => {
this[kClosedResolve] = resolve
})
}
}
[kDestroy] (err) {
while (true) {
const item = this[kQueue].shift()
if (!item) {
break
}
item.handler.onError(err)
}
const destroyAll = new Array(this[kClients].length)
for (let i = 0; i < this[kClients].length; i++) {
destroyAll[i] = this[kClients][i].destroy(err)
}
return Promise.all(destroyAll)
}
[kDispatch] (opts, handler) {
const dispatcher = this[kGetDispatcher]()
if (!dispatcher) {
this[kNeedDrain] = true
this[kQueue].push({ opts, handler })
this[kQueued]++
} else if (!dispatcher.dispatch(opts, handler)) {
dispatcher[kNeedDrain] = true
this[kNeedDrain] = !this[kGetDispatcher]()
}
return !this[kNeedDrain]
}
[kAddClient] (client) {
client
.on('drain', this[kOnDrain].bind(this, client))
.on('connect', this[kOnConnect])
.on('disconnect', this[kOnDisconnect])
.on('connectionError', this[kOnConnectionError])
this[kClients].push(client)
if (this[kNeedDrain]) {
queueMicrotask(() => {
if (this[kNeedDrain]) {
this[kOnDrain](client, client[kUrl], [client, this])
}
})
}
return this
}
[kRemoveClient] (client) {
client.close(() => {
const idx = this[kClients].indexOf(client)
if (idx !== -1) {
this[kClients].splice(idx, 1)
}
})
this[kNeedDrain] = this[kClients].some(dispatcher => (
!dispatcher[kNeedDrain] &&
dispatcher.closed !== true &&
dispatcher.destroyed !== true
))
}
}
module.exports = {
PoolBase,
kClients,
kNeedDrain,
kAddClient,
kRemoveClient,
kGetDispatcher
}

118
node_modules/undici/lib/dispatcher/pool.js generated vendored Normal file
View File

@@ -0,0 +1,118 @@
'use strict'
const {
PoolBase,
kClients,
kNeedDrain,
kAddClient,
kGetDispatcher,
kRemoveClient
} = require('./pool-base')
const Client = require('./client')
const {
InvalidArgumentError
} = require('../core/errors')
const util = require('../core/util')
const { kUrl } = require('../core/symbols')
const buildConnector = require('../core/connect')
const kOptions = Symbol('options')
const kConnections = Symbol('connections')
const kFactory = Symbol('factory')
function defaultFactory (origin, opts) {
return new Client(origin, opts)
}
class Pool extends PoolBase {
constructor (origin, {
connections,
factory = defaultFactory,
connect,
connectTimeout,
tls,
maxCachedSessions,
socketPath,
autoSelectFamily,
autoSelectFamilyAttemptTimeout,
allowH2,
clientTtl,
...options
} = {}) {
if (connections != null && (!Number.isFinite(connections) || connections < 0)) {
throw new InvalidArgumentError('invalid connections')
}
if (typeof factory !== 'function') {
throw new InvalidArgumentError('factory must be a function.')
}
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
throw new InvalidArgumentError('connect must be a function or an object')
}
if (typeof connect !== 'function') {
connect = buildConnector({
...tls,
maxCachedSessions,
allowH2,
socketPath,
timeout: connectTimeout,
...(typeof autoSelectFamily === 'boolean' ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...connect
})
}
super()
this[kConnections] = connections || null
this[kUrl] = util.parseOrigin(origin)
this[kOptions] = { ...util.deepClone(options), connect, allowH2, clientTtl }
this[kOptions].interceptors = options.interceptors
? { ...options.interceptors }
: undefined
this[kFactory] = factory
this.on('connect', (origin, targets) => {
if (clientTtl != null && clientTtl > 0) {
for (const target of targets) {
Object.assign(target, { ttl: Date.now() })
}
}
})
this.on('connectionError', (origin, targets, error) => {
// If a connection error occurs, we remove the client from the pool,
// and emit a connectionError event. They will not be re-used.
// Fixes https://github.com/nodejs/undici/issues/3895
for (const target of targets) {
// Do not use kRemoveClient here, as it will close the client,
// but the client cannot be closed in this state.
const idx = this[kClients].indexOf(target)
if (idx !== -1) {
this[kClients].splice(idx, 1)
}
}
})
}
[kGetDispatcher] () {
const clientTtlOption = this[kOptions].clientTtl
for (const client of this[kClients]) {
// check ttl of client and if it's stale, remove it from the pool
if (clientTtlOption != null && clientTtlOption > 0 && client.ttl && ((Date.now() - client.ttl) > clientTtlOption)) {
this[kRemoveClient](client)
} else if (!client[kNeedDrain]) {
return client
}
}
if (!this[kConnections] || this[kClients].length < this[kConnections]) {
const dispatcher = this[kFactory](this[kUrl], this[kOptions])
this[kAddClient](dispatcher)
return dispatcher
}
}
}
module.exports = Pool

276
node_modules/undici/lib/dispatcher/proxy-agent.js generated vendored Normal file
View File

@@ -0,0 +1,276 @@
'use strict'
const { kProxy, kClose, kDestroy, kDispatch } = require('../core/symbols')
const Agent = require('./agent')
const Pool = require('./pool')
const DispatcherBase = require('./dispatcher-base')
const { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require('../core/errors')
const buildConnector = require('../core/connect')
const Client = require('./client')
const kAgent = Symbol('proxy agent')
const kClient = Symbol('proxy client')
const kProxyHeaders = Symbol('proxy headers')
const kRequestTls = Symbol('request tls settings')
const kProxyTls = Symbol('proxy tls settings')
const kConnectEndpoint = Symbol('connect endpoint function')
const kTunnelProxy = Symbol('tunnel proxy')
function defaultProtocolPort (protocol) {
return protocol === 'https:' ? 443 : 80
}
function defaultFactory (origin, opts) {
return new Pool(origin, opts)
}
const noop = () => {}
function defaultAgentFactory (origin, opts) {
if (opts.connections === 1) {
return new Client(origin, opts)
}
return new Pool(origin, opts)
}
class Http1ProxyWrapper extends DispatcherBase {
#client
constructor (proxyUrl, { headers = {}, connect, factory }) {
if (!proxyUrl) {
throw new InvalidArgumentError('Proxy URL is mandatory')
}
super()
this[kProxyHeaders] = headers
if (factory) {
this.#client = factory(proxyUrl, { connect })
} else {
this.#client = new Client(proxyUrl, { connect })
}
}
[kDispatch] (opts, handler) {
const onHeaders = handler.onHeaders
handler.onHeaders = function (statusCode, data, resume) {
if (statusCode === 407) {
if (typeof handler.onError === 'function') {
handler.onError(new InvalidArgumentError('Proxy Authentication Required (407)'))
}
return
}
if (onHeaders) onHeaders.call(this, statusCode, data, resume)
}
// Rewrite request as an HTTP1 Proxy request, without tunneling.
const {
origin,
path = '/',
headers = {}
} = opts
opts.path = origin + path
if (!('host' in headers) && !('Host' in headers)) {
const { host } = new URL(origin)
headers.host = host
}
opts.headers = { ...this[kProxyHeaders], ...headers }
return this.#client[kDispatch](opts, handler)
}
[kClose] () {
return this.#client.close()
}
[kDestroy] (err) {
return this.#client.destroy(err)
}
}
class ProxyAgent extends DispatcherBase {
constructor (opts) {
if (!opts || (typeof opts === 'object' && !(opts instanceof URL) && !opts.uri)) {
throw new InvalidArgumentError('Proxy uri is mandatory')
}
const { clientFactory = defaultFactory } = opts
if (typeof clientFactory !== 'function') {
throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
}
const { proxyTunnel = true } = opts
super()
const url = this.#getUrl(opts)
const { href, origin, port, protocol, username, password, hostname: proxyHostname } = url
this[kProxy] = { uri: href, protocol }
this[kRequestTls] = opts.requestTls
this[kProxyTls] = opts.proxyTls
this[kProxyHeaders] = opts.headers || {}
this[kTunnelProxy] = proxyTunnel
if (opts.auth && opts.token) {
throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
} else if (opts.auth) {
/* @deprecated in favour of opts.token */
this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`
} else if (opts.token) {
this[kProxyHeaders]['proxy-authorization'] = opts.token
} else if (username && password) {
this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`
}
const connect = buildConnector({ ...opts.proxyTls })
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
const agentFactory = opts.factory || defaultAgentFactory
const factory = (origin, options) => {
const { protocol } = new URL(origin)
if (!this[kTunnelProxy] && protocol === 'http:' && this[kProxy].protocol === 'http:') {
return new Http1ProxyWrapper(this[kProxy].uri, {
headers: this[kProxyHeaders],
connect,
factory: agentFactory
})
}
return agentFactory(origin, options)
}
this[kClient] = clientFactory(url, { connect })
this[kAgent] = new Agent({
...opts,
factory,
connect: async (opts, callback) => {
let requestedPath = opts.host
if (!opts.port) {
requestedPath += `:${defaultProtocolPort(opts.protocol)}`
}
try {
const { socket, statusCode } = await this[kClient].connect({
origin,
port,
path: requestedPath,
signal: opts.signal,
headers: {
...this[kProxyHeaders],
host: opts.host,
...(opts.connections == null || opts.connections > 0 ? { 'proxy-connection': 'keep-alive' } : {})
},
servername: this[kProxyTls]?.servername || proxyHostname
})
if (statusCode !== 200) {
socket.on('error', noop).destroy()
callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
}
if (opts.protocol !== 'https:') {
callback(null, socket)
return
}
let servername
if (this[kRequestTls]) {
servername = this[kRequestTls].servername
} else {
servername = opts.servername
}
this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
} catch (err) {
if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
// Throw a custom error to avoid loop in client.js#connect
callback(new SecureProxyConnectionError(err))
} else {
callback(err)
}
}
}
})
}
dispatch (opts, handler) {
const headers = buildHeaders(opts.headers)
throwIfProxyAuthIsSent(headers)
if (headers && !('host' in headers) && !('Host' in headers)) {
const { host } = new URL(opts.origin)
headers.host = host
}
return this[kAgent].dispatch(
{
...opts,
headers
},
handler
)
}
/**
* @param {import('../../types/proxy-agent').ProxyAgent.Options | string | URL} opts
* @returns {URL}
*/
#getUrl (opts) {
if (typeof opts === 'string') {
return new URL(opts)
} else if (opts instanceof URL) {
return opts
} else {
return new URL(opts.uri)
}
}
[kClose] () {
return Promise.all([
this[kAgent].close(),
this[kClient].close()
])
}
[kDestroy] () {
return Promise.all([
this[kAgent].destroy(),
this[kClient].destroy()
])
}
}
/**
* @param {string[] | Record<string, string>} headers
* @returns {Record<string, string>}
*/
function buildHeaders (headers) {
// When using undici.fetch, the headers list is stored
// as an array.
if (Array.isArray(headers)) {
/** @type {Record<string, string>} */
const headersPair = {}
for (let i = 0; i < headers.length; i += 2) {
headersPair[headers[i]] = headers[i + 1]
}
return headersPair
}
return headers
}
/**
* @param {Record<string, string>} headers
*
* Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers
* Nevertheless, it was changed and to avoid a security vulnerability by end users
* this check was created.
* It should be removed in the next major version for performance reasons
*/
function throwIfProxyAuthIsSent (headers) {
const existProxyAuth = headers && Object.keys(headers)
.find((key) => key.toLowerCase() === 'proxy-authorization')
if (existProxyAuth) {
throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor')
}
}
module.exports = ProxyAgent

35
node_modules/undici/lib/dispatcher/retry-agent.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
'use strict'
const Dispatcher = require('./dispatcher')
const RetryHandler = require('../handler/retry-handler')
class RetryAgent extends Dispatcher {
#agent = null
#options = null
constructor (agent, options = {}) {
super(options)
this.#agent = agent
this.#options = options
}
dispatch (opts, handler) {
const retry = new RetryHandler({
...opts,
retryOptions: this.#options
}, {
dispatch: this.#agent.dispatch.bind(this.#agent),
handler
})
return this.#agent.dispatch(opts, retry)
}
close () {
return this.#agent.close()
}
destroy () {
return this.#agent.destroy()
}
}
module.exports = RetryAgent

50
node_modules/undici/lib/global.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
'use strict'
// We include a version number for the Dispatcher API. In case of breaking changes,
// this version number must be increased to avoid conflicts.
const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
const { InvalidArgumentError } = require('./core/errors')
const Agent = require('./dispatcher/agent')
if (getGlobalDispatcher() === undefined) {
setGlobalDispatcher(new Agent())
}
function setGlobalDispatcher (agent) {
if (!agent || typeof agent.dispatch !== 'function') {
throw new InvalidArgumentError('Argument agent must implement Agent')
}
Object.defineProperty(globalThis, globalDispatcher, {
value: agent,
writable: true,
enumerable: false,
configurable: false
})
}
function getGlobalDispatcher () {
return globalThis[globalDispatcher]
}
// These are the globals that can be installed by undici.install().
// Not exported by index.js to avoid use outside of this module.
const installedExports = /** @type {const} */ (
[
'fetch',
'Headers',
'Response',
'Request',
'FormData',
'WebSocket',
'CloseEvent',
'ErrorEvent',
'MessageEvent',
'EventSource'
]
)
module.exports = {
setGlobalDispatcher,
getGlobalDispatcher,
installedExports
}

469
node_modules/undici/lib/handler/cache-handler.js generated vendored Normal file
View File

@@ -0,0 +1,469 @@
'use strict'
const util = require('../core/util')
const {
parseCacheControlHeader,
parseVaryHeader,
isEtagUsable
} = require('../util/cache')
const { parseHttpDate } = require('../util/date.js')
function noop () {}
// Status codes that we can use some heuristics on to cache
const HEURISTICALLY_CACHEABLE_STATUS_CODES = [
200, 203, 204, 206, 300, 301, 308, 404, 405, 410, 414, 501
]
// Status codes which semantic is not handled by the cache
// https://datatracker.ietf.org/doc/html/rfc9111#section-3
// This list should not grow beyond 206 and 304 unless the RFC is updated
// by a newer one including more. Please introduce another list if
// implementing caching of responses with the 'must-understand' directive.
const NOT_UNDERSTOOD_STATUS_CODES = [
206, 304
]
const MAX_RESPONSE_AGE = 2147483647000
/**
* @typedef {import('../../types/dispatcher.d.ts').default.DispatchHandler} DispatchHandler
*
* @implements {DispatchHandler}
*/
class CacheHandler {
/**
* @type {import('../../types/cache-interceptor.d.ts').default.CacheKey}
*/
#cacheKey
/**
* @type {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions['type']}
*/
#cacheType
/**
* @type {number | undefined}
*/
#cacheByDefault
/**
* @type {import('../../types/cache-interceptor.d.ts').default.CacheStore}
*/
#store
/**
* @type {import('../../types/dispatcher.d.ts').default.DispatchHandler}
*/
#handler
/**
* @type {import('node:stream').Writable | undefined}
*/
#writeStream
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} opts
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
*/
constructor ({ store, type, cacheByDefault }, cacheKey, handler) {
this.#store = store
this.#cacheType = type
this.#cacheByDefault = cacheByDefault
this.#cacheKey = cacheKey
this.#handler = handler
}
onRequestStart (controller, context) {
this.#writeStream?.destroy()
this.#writeStream = undefined
this.#handler.onRequestStart?.(controller, context)
}
onRequestUpgrade (controller, statusCode, headers, socket) {
this.#handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
}
/**
* @param {import('../../types/dispatcher.d.ts').default.DispatchController} controller
* @param {number} statusCode
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
* @param {string} statusMessage
*/
onResponseStart (
controller,
statusCode,
resHeaders,
statusMessage
) {
const downstreamOnHeaders = () =>
this.#handler.onResponseStart?.(
controller,
statusCode,
resHeaders,
statusMessage
)
if (
!util.safeHTTPMethods.includes(this.#cacheKey.method) &&
statusCode >= 200 &&
statusCode <= 399
) {
// Successful response to an unsafe method, delete it from cache
// https://www.rfc-editor.org/rfc/rfc9111.html#name-invalidating-stored-response
try {
this.#store.delete(this.#cacheKey)?.catch?.(noop)
} catch {
// Fail silently
}
return downstreamOnHeaders()
}
const cacheControlHeader = resHeaders['cache-control']
const heuristicallyCacheable = resHeaders['last-modified'] && HEURISTICALLY_CACHEABLE_STATUS_CODES.includes(statusCode)
if (
!cacheControlHeader &&
!resHeaders['expires'] &&
!heuristicallyCacheable &&
!this.#cacheByDefault
) {
// Don't have anything to tell us this response is cachable and we're not
// caching by default
return downstreamOnHeaders()
}
const cacheControlDirectives = cacheControlHeader ? parseCacheControlHeader(cacheControlHeader) : {}
if (!canCacheResponse(this.#cacheType, statusCode, resHeaders, cacheControlDirectives)) {
return downstreamOnHeaders()
}
const now = Date.now()
const resAge = resHeaders.age ? getAge(resHeaders.age) : undefined
if (resAge && resAge >= MAX_RESPONSE_AGE) {
// Response considered stale
return downstreamOnHeaders()
}
const resDate = typeof resHeaders.date === 'string'
? parseHttpDate(resHeaders.date)
: undefined
const staleAt =
determineStaleAt(this.#cacheType, now, resAge, resHeaders, resDate, cacheControlDirectives) ??
this.#cacheByDefault
if (staleAt === undefined || (resAge && resAge > staleAt)) {
return downstreamOnHeaders()
}
const baseTime = resDate ? resDate.getTime() : now
const absoluteStaleAt = staleAt + baseTime
if (now >= absoluteStaleAt) {
// Response is already stale
return downstreamOnHeaders()
}
let varyDirectives
if (this.#cacheKey.headers && resHeaders.vary) {
varyDirectives = parseVaryHeader(resHeaders.vary, this.#cacheKey.headers)
if (!varyDirectives) {
// Parse error
return downstreamOnHeaders()
}
}
const deleteAt = determineDeleteAt(baseTime, cacheControlDirectives, absoluteStaleAt)
const strippedHeaders = stripNecessaryHeaders(resHeaders, cacheControlDirectives)
/**
* @type {import('../../types/cache-interceptor.d.ts').default.CacheValue}
*/
const value = {
statusCode,
statusMessage,
headers: strippedHeaders,
vary: varyDirectives,
cacheControlDirectives,
cachedAt: resAge ? now - resAge : now,
staleAt: absoluteStaleAt,
deleteAt
}
if (typeof resHeaders.etag === 'string' && isEtagUsable(resHeaders.etag)) {
value.etag = resHeaders.etag
}
this.#writeStream = this.#store.createWriteStream(this.#cacheKey, value)
if (!this.#writeStream) {
return downstreamOnHeaders()
}
const handler = this
this.#writeStream
.on('drain', () => controller.resume())
.on('error', function () {
// TODO (fix): Make error somehow observable?
handler.#writeStream = undefined
// Delete the value in case the cache store is holding onto state from
// the call to createWriteStream
handler.#store.delete(handler.#cacheKey)
})
.on('close', function () {
if (handler.#writeStream === this) {
handler.#writeStream = undefined
}
// TODO (fix): Should we resume even if was paused downstream?
controller.resume()
})
return downstreamOnHeaders()
}
onResponseData (controller, chunk) {
if (this.#writeStream?.write(chunk) === false) {
controller.pause()
}
this.#handler.onResponseData?.(controller, chunk)
}
onResponseEnd (controller, trailers) {
this.#writeStream?.end()
this.#handler.onResponseEnd?.(controller, trailers)
}
onResponseError (controller, err) {
this.#writeStream?.destroy(err)
this.#writeStream = undefined
this.#handler.onResponseError?.(controller, err)
}
}
/**
* @see https://www.rfc-editor.org/rfc/rfc9111.html#name-storing-responses-to-authen
*
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions['type']} cacheType
* @param {number} statusCode
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
*/
function canCacheResponse (cacheType, statusCode, resHeaders, cacheControlDirectives) {
// Status code must be final and understood.
if (statusCode < 200 || NOT_UNDERSTOOD_STATUS_CODES.includes(statusCode)) {
return false
}
// Responses with neither status codes that are heuristically cacheable, nor "explicit enough" caching
// directives, are not cacheable. "Explicit enough": see https://www.rfc-editor.org/rfc/rfc9111.html#section-3
if (!HEURISTICALLY_CACHEABLE_STATUS_CODES.includes(statusCode) && !resHeaders['expires'] &&
!cacheControlDirectives.public &&
cacheControlDirectives['max-age'] === undefined &&
// RFC 9111: a private response directive, if the cache is not shared
!(cacheControlDirectives.private && cacheType === 'private') &&
!(cacheControlDirectives['s-maxage'] !== undefined && cacheType === 'shared')
) {
return false
}
if (cacheControlDirectives['no-store']) {
return false
}
if (cacheType === 'shared' && cacheControlDirectives.private === true) {
return false
}
// https://www.rfc-editor.org/rfc/rfc9111.html#section-4.1-5
if (resHeaders.vary?.includes('*')) {
return false
}
// https://www.rfc-editor.org/rfc/rfc9111.html#name-storing-responses-to-authen
if (resHeaders.authorization) {
if (!cacheControlDirectives.public || typeof resHeaders.authorization !== 'string') {
return false
}
if (
Array.isArray(cacheControlDirectives['no-cache']) &&
cacheControlDirectives['no-cache'].includes('authorization')
) {
return false
}
if (
Array.isArray(cacheControlDirectives['private']) &&
cacheControlDirectives['private'].includes('authorization')
) {
return false
}
}
return true
}
/**
* @param {string | string[]} ageHeader
* @returns {number | undefined}
*/
function getAge (ageHeader) {
const age = parseInt(Array.isArray(ageHeader) ? ageHeader[0] : ageHeader)
return isNaN(age) ? undefined : age * 1000
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions['type']} cacheType
* @param {number} now
* @param {number | undefined} age
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
* @param {Date | undefined} responseDate
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
*
* @returns {number | undefined} time that the value is stale at in seconds or undefined if it shouldn't be cached
*/
function determineStaleAt (cacheType, now, age, resHeaders, responseDate, cacheControlDirectives) {
if (cacheType === 'shared') {
// Prioritize s-maxage since we're a shared cache
// s-maxage > max-age > Expire
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.2.10-3
const sMaxAge = cacheControlDirectives['s-maxage']
if (sMaxAge !== undefined) {
return sMaxAge > 0 ? sMaxAge * 1000 : undefined
}
}
const maxAge = cacheControlDirectives['max-age']
if (maxAge !== undefined) {
return maxAge > 0 ? maxAge * 1000 : undefined
}
if (typeof resHeaders.expires === 'string') {
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.3
const expiresDate = parseHttpDate(resHeaders.expires)
if (expiresDate) {
if (now >= expiresDate.getTime()) {
return undefined
}
if (responseDate) {
if (responseDate >= expiresDate) {
return undefined
}
if (age !== undefined && age > (expiresDate - responseDate)) {
return undefined
}
}
return expiresDate.getTime() - now
}
}
if (typeof resHeaders['last-modified'] === 'string') {
// https://www.rfc-editor.org/rfc/rfc9111.html#name-calculating-heuristic-fresh
const lastModified = new Date(resHeaders['last-modified'])
if (isValidDate(lastModified)) {
if (lastModified.getTime() >= now) {
return undefined
}
const responseAge = now - lastModified.getTime()
return responseAge * 0.1
}
}
if (cacheControlDirectives.immutable) {
// https://www.rfc-editor.org/rfc/rfc8246.html#section-2.2
return 31536000
}
return undefined
}
/**
* @param {number} now
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
* @param {number} staleAt
*/
function determineDeleteAt (now, cacheControlDirectives, staleAt) {
let staleWhileRevalidate = -Infinity
let staleIfError = -Infinity
let immutable = -Infinity
if (cacheControlDirectives['stale-while-revalidate']) {
staleWhileRevalidate = staleAt + (cacheControlDirectives['stale-while-revalidate'] * 1000)
}
if (cacheControlDirectives['stale-if-error']) {
staleIfError = staleAt + (cacheControlDirectives['stale-if-error'] * 1000)
}
if (staleWhileRevalidate === -Infinity && staleIfError === -Infinity) {
immutable = now + 31536000000
}
return Math.max(staleAt, staleWhileRevalidate, staleIfError, immutable)
}
/**
* Strips headers required to be removed in cached responses
* @param {import('../../types/header.d.ts').IncomingHttpHeaders} resHeaders
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives
* @returns {Record<string, string | string []>}
*/
function stripNecessaryHeaders (resHeaders, cacheControlDirectives) {
const headersToRemove = [
'connection',
'proxy-authenticate',
'proxy-authentication-info',
'proxy-authorization',
'proxy-connection',
'te',
'transfer-encoding',
'upgrade',
// We'll add age back when serving it
'age'
]
if (resHeaders['connection']) {
if (Array.isArray(resHeaders['connection'])) {
// connection: a
// connection: b
headersToRemove.push(...resHeaders['connection'].map(header => header.trim()))
} else {
// connection: a, b
headersToRemove.push(...resHeaders['connection'].split(',').map(header => header.trim()))
}
}
if (Array.isArray(cacheControlDirectives['no-cache'])) {
headersToRemove.push(...cacheControlDirectives['no-cache'])
}
if (Array.isArray(cacheControlDirectives['private'])) {
headersToRemove.push(...cacheControlDirectives['private'])
}
let strippedHeaders
for (const headerName of headersToRemove) {
if (resHeaders[headerName]) {
strippedHeaders ??= { ...resHeaders }
delete strippedHeaders[headerName]
}
}
return strippedHeaders ?? resHeaders
}
/**
* @param {Date} date
* @returns {boolean}
*/
function isValidDate (date) {
return date instanceof Date && Number.isFinite(date.valueOf())
}
module.exports = CacheHandler

View File

@@ -0,0 +1,124 @@
'use strict'
const assert = require('node:assert')
/**
* This takes care of revalidation requests we send to the origin. If we get
* a response indicating that what we have is cached (via a HTTP 304), we can
* continue using the cached value. Otherwise, we'll receive the new response
* here, which we then just pass on to the next handler (most likely a
* CacheHandler). Note that this assumes the proper headers were already
* included in the request to tell the origin that we want to revalidate the
* response (i.e. if-modified-since or if-none-match).
*
* @see https://www.rfc-editor.org/rfc/rfc9111.html#name-validation
*
* @implements {import('../../types/dispatcher.d.ts').default.DispatchHandler}
*/
class CacheRevalidationHandler {
#successful = false
/**
* @type {((boolean, any) => void) | null}
*/
#callback
/**
* @type {(import('../../types/dispatcher.d.ts').default.DispatchHandler)}
*/
#handler
#context
/**
* @type {boolean}
*/
#allowErrorStatusCodes
/**
* @param {(boolean) => void} callback Function to call if the cached value is valid
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandlers} handler
* @param {boolean} allowErrorStatusCodes
*/
constructor (callback, handler, allowErrorStatusCodes) {
if (typeof callback !== 'function') {
throw new TypeError('callback must be a function')
}
this.#callback = callback
this.#handler = handler
this.#allowErrorStatusCodes = allowErrorStatusCodes
}
onRequestStart (_, context) {
this.#successful = false
this.#context = context
}
onRequestUpgrade (controller, statusCode, headers, socket) {
this.#handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
}
onResponseStart (
controller,
statusCode,
headers,
statusMessage
) {
assert(this.#callback != null)
// https://www.rfc-editor.org/rfc/rfc9111.html#name-handling-a-validation-respo
// https://datatracker.ietf.org/doc/html/rfc5861#section-4
this.#successful = statusCode === 304 ||
(this.#allowErrorStatusCodes && statusCode >= 500 && statusCode <= 504)
this.#callback(this.#successful, this.#context)
this.#callback = null
if (this.#successful) {
return true
}
this.#handler.onRequestStart?.(controller, this.#context)
this.#handler.onResponseStart?.(
controller,
statusCode,
headers,
statusMessage
)
}
onResponseData (controller, chunk) {
if (this.#successful) {
return
}
return this.#handler.onResponseData?.(controller, chunk)
}
onResponseEnd (controller, trailers) {
if (this.#successful) {
return
}
this.#handler.onResponseEnd?.(controller, trailers)
}
onResponseError (controller, err) {
if (this.#successful) {
return
}
if (this.#callback) {
this.#callback(false)
this.#callback = null
}
if (typeof this.#handler.onResponseError === 'function') {
this.#handler.onResponseError(controller, err)
} else {
throw err
}
}
}
module.exports = CacheRevalidationHandler

67
node_modules/undici/lib/handler/decorator-handler.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
'use strict'
const assert = require('node:assert')
const WrapHandler = require('./wrap-handler')
/**
* @deprecated
*/
module.exports = class DecoratorHandler {
#handler
#onCompleteCalled = false
#onErrorCalled = false
#onResponseStartCalled = false
constructor (handler) {
if (typeof handler !== 'object' || handler === null) {
throw new TypeError('handler must be an object')
}
this.#handler = WrapHandler.wrap(handler)
}
onRequestStart (...args) {
this.#handler.onRequestStart?.(...args)
}
onRequestUpgrade (...args) {
assert(!this.#onCompleteCalled)
assert(!this.#onErrorCalled)
return this.#handler.onRequestUpgrade?.(...args)
}
onResponseStart (...args) {
assert(!this.#onCompleteCalled)
assert(!this.#onErrorCalled)
assert(!this.#onResponseStartCalled)
this.#onResponseStartCalled = true
return this.#handler.onResponseStart?.(...args)
}
onResponseData (...args) {
assert(!this.#onCompleteCalled)
assert(!this.#onErrorCalled)
return this.#handler.onResponseData?.(...args)
}
onResponseEnd (...args) {
assert(!this.#onCompleteCalled)
assert(!this.#onErrorCalled)
this.#onCompleteCalled = true
return this.#handler.onResponseEnd?.(...args)
}
onResponseError (...args) {
this.#onErrorCalled = true
return this.#handler.onResponseError?.(...args)
}
/**
* @deprecated
*/
onBodySent () {}
}

237
node_modules/undici/lib/handler/redirect-handler.js generated vendored Normal file
View File

@@ -0,0 +1,237 @@
'use strict'
const util = require('../core/util')
const { kBodyUsed } = require('../core/symbols')
const assert = require('node:assert')
const { InvalidArgumentError } = require('../core/errors')
const EE = require('node:events')
const redirectableStatusCodes = [300, 301, 302, 303, 307, 308]
const kBody = Symbol('body')
const noop = () => {}
class BodyAsyncIterable {
constructor (body) {
this[kBody] = body
this[kBodyUsed] = false
}
async * [Symbol.asyncIterator] () {
assert(!this[kBodyUsed], 'disturbed')
this[kBodyUsed] = true
yield * this[kBody]
}
}
class RedirectHandler {
static buildDispatch (dispatcher, maxRedirections) {
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
throw new InvalidArgumentError('maxRedirections must be a positive number')
}
const dispatch = dispatcher.dispatch.bind(dispatcher)
return (opts, originalHandler) => dispatch(opts, new RedirectHandler(dispatch, maxRedirections, opts, originalHandler))
}
constructor (dispatch, maxRedirections, opts, handler) {
if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
throw new InvalidArgumentError('maxRedirections must be a positive number')
}
this.dispatch = dispatch
this.location = null
const { maxRedirections: _, ...cleanOpts } = opts
this.opts = cleanOpts // opts must be a copy, exclude maxRedirections
this.maxRedirections = maxRedirections
this.handler = handler
this.history = []
if (util.isStream(this.opts.body)) {
// TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
// so that it can be dispatched again?
// TODO (fix): Do we need 100-expect support to provide a way to do this properly?
if (util.bodyLength(this.opts.body) === 0) {
this.opts.body
.on('data', function () {
assert(false)
})
}
if (typeof this.opts.body.readableDidRead !== 'boolean') {
this.opts.body[kBodyUsed] = false
EE.prototype.on.call(this.opts.body, 'data', function () {
this[kBodyUsed] = true
})
}
} else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') {
// TODO (fix): We can't access ReadableStream internal state
// to determine whether or not it has been disturbed. This is just
// a workaround.
this.opts.body = new BodyAsyncIterable(this.opts.body)
} else if (
this.opts.body &&
typeof this.opts.body !== 'string' &&
!ArrayBuffer.isView(this.opts.body) &&
util.isIterable(this.opts.body) &&
!util.isFormDataLike(this.opts.body)
) {
// TODO: Should we allow re-using iterable if !this.opts.idempotent
// or through some other flag?
this.opts.body = new BodyAsyncIterable(this.opts.body)
}
}
onRequestStart (controller, context) {
this.handler.onRequestStart?.(controller, { ...context, history: this.history })
}
onRequestUpgrade (controller, statusCode, headers, socket) {
this.handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
}
onResponseStart (controller, statusCode, headers, statusMessage) {
if (this.opts.throwOnMaxRedirect && this.history.length >= this.maxRedirections) {
throw new Error('max redirects')
}
// https://tools.ietf.org/html/rfc7231#section-6.4.2
// https://fetch.spec.whatwg.org/#http-redirect-fetch
// In case of HTTP 301 or 302 with POST, change the method to GET
if ((statusCode === 301 || statusCode === 302) && this.opts.method === 'POST') {
this.opts.method = 'GET'
if (util.isStream(this.opts.body)) {
util.destroy(this.opts.body.on('error', noop))
}
this.opts.body = null
}
// https://tools.ietf.org/html/rfc7231#section-6.4.4
// In case of HTTP 303, always replace method to be either HEAD or GET
if (statusCode === 303 && this.opts.method !== 'HEAD') {
this.opts.method = 'GET'
if (util.isStream(this.opts.body)) {
util.destroy(this.opts.body.on('error', noop))
}
this.opts.body = null
}
this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) || redirectableStatusCodes.indexOf(statusCode) === -1
? null
: headers.location
if (this.opts.origin) {
this.history.push(new URL(this.opts.path, this.opts.origin))
}
if (!this.location) {
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
return
}
const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))
const path = search ? `${pathname}${search}` : pathname
// Check for redirect loops by seeing if we've already visited this URL in our history
// This catches the case where Client/Pool try to handle cross-origin redirects but fail
// and keep redirecting to the same URL in an infinite loop
const redirectUrlString = `${origin}${path}`
for (const historyUrl of this.history) {
if (historyUrl.toString() === redirectUrlString) {
throw new InvalidArgumentError(`Redirect loop detected. Cannot redirect to ${origin}. This typically happens when using a Client or Pool with cross-origin redirects. Use an Agent for cross-origin redirects.`)
}
}
// Remove headers referring to the original URL.
// By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.
// https://tools.ietf.org/html/rfc7231#section-6.4
this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin)
this.opts.path = path
this.opts.origin = origin
this.opts.query = null
}
onResponseData (controller, chunk) {
if (this.location) {
/*
https://tools.ietf.org/html/rfc7231#section-6.4
TLDR: undici always ignores 3xx response bodies.
Redirection is used to serve the requested resource from another URL, so it assumes that
no body is generated (and thus can be ignored). Even though generating a body is not prohibited.
For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually
(which means it's optional and not mandated) contain just an hyperlink to the value of
the Location response header, so the body can be ignored safely.
For status 300, which is "Multiple Choices", the spec mentions both generating a Location
response header AND a response body with the other possible location to follow.
Since the spec explicitly chooses not to specify a format for such body and leave it to
servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.
*/
} else {
this.handler.onResponseData?.(controller, chunk)
}
}
onResponseEnd (controller, trailers) {
if (this.location) {
/*
https://tools.ietf.org/html/rfc7231#section-6.4
TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections
and neither are useful if present.
See comment on onData method above for more detailed information.
*/
this.dispatch(this.opts, this)
} else {
this.handler.onResponseEnd(controller, trailers)
}
}
onResponseError (controller, error) {
this.handler.onResponseError?.(controller, error)
}
}
// https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
if (header.length === 4) {
return util.headerNameToString(header) === 'host'
}
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
return true
}
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
const name = util.headerNameToString(header)
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
}
// https://tools.ietf.org/html/rfc7231#section-6.4
function cleanRequestHeaders (headers, removeContent, unknownOrigin) {
const ret = []
if (Array.isArray(headers)) {
for (let i = 0; i < headers.length; i += 2) {
if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) {
ret.push(headers[i], headers[i + 1])
}
}
} else if (headers && typeof headers === 'object') {
const entries = typeof headers[Symbol.iterator] === 'function' ? headers : Object.entries(headers)
for (const [key, value] of entries) {
if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {
ret.push(key, value)
}
}
} else {
assert(headers == null, 'headers must be an object or an array')
}
return ret
}
module.exports = RedirectHandler

396
node_modules/undici/lib/handler/retry-handler.js generated vendored Normal file
View File

@@ -0,0 +1,396 @@
'use strict'
const assert = require('node:assert')
const { kRetryHandlerDefaultRetry } = require('../core/symbols')
const { RequestRetryError } = require('../core/errors')
const WrapHandler = require('./wrap-handler')
const {
isDisturbed,
parseRangeHeader,
wrapRequestBody
} = require('../core/util')
function calculateRetryAfterHeader (retryAfter) {
const retryTime = new Date(retryAfter).getTime()
return isNaN(retryTime) ? 0 : retryTime - Date.now()
}
class RetryHandler {
constructor (opts, { dispatch, handler }) {
const { retryOptions, ...dispatchOpts } = opts
const {
// Retry scoped
retry: retryFn,
maxRetries,
maxTimeout,
minTimeout,
timeoutFactor,
// Response scoped
methods,
errorCodes,
retryAfter,
statusCodes,
throwOnError
} = retryOptions ?? {}
this.error = null
this.dispatch = dispatch
this.handler = WrapHandler.wrap(handler)
this.opts = { ...dispatchOpts, body: wrapRequestBody(opts.body) }
this.retryOpts = {
throwOnError: throwOnError ?? true,
retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
retryAfter: retryAfter ?? true,
maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
minTimeout: minTimeout ?? 500, // .5s
timeoutFactor: timeoutFactor ?? 2,
maxRetries: maxRetries ?? 5,
// What errors we should retry
methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
// Indicates which errors to retry
statusCodes: statusCodes ?? [500, 502, 503, 504, 429],
// List of errors to retry
errorCodes: errorCodes ?? [
'ECONNRESET',
'ECONNREFUSED',
'ENOTFOUND',
'ENETDOWN',
'ENETUNREACH',
'EHOSTDOWN',
'EHOSTUNREACH',
'EPIPE',
'UND_ERR_SOCKET'
]
}
this.retryCount = 0
this.retryCountCheckpoint = 0
this.headersSent = false
this.start = 0
this.end = null
this.etag = null
}
onResponseStartWithRetry (controller, statusCode, headers, statusMessage, err) {
if (this.retryOpts.throwOnError) {
// Preserve old behavior for status codes that are not eligible for retry
if (this.retryOpts.statusCodes.includes(statusCode) === false) {
this.headersSent = true
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
} else {
this.error = err
}
return
}
if (isDisturbed(this.opts.body)) {
this.headersSent = true
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
return
}
function shouldRetry (passedErr) {
if (passedErr) {
this.headersSent = true
this.headersSent = true
this.handler.onResponseStart?.(controller, statusCode, headers, statusMessage)
controller.resume()
return
}
this.error = err
controller.resume()
}
controller.pause()
this.retryOpts.retry(
err,
{
state: { counter: this.retryCount },
opts: { retryOptions: this.retryOpts, ...this.opts }
},
shouldRetry.bind(this)
)
}
onRequestStart (controller, context) {
if (!this.headersSent) {
this.handler.onRequestStart?.(controller, context)
}
}
onRequestUpgrade (controller, statusCode, headers, socket) {
this.handler.onRequestUpgrade?.(controller, statusCode, headers, socket)
}
static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
const { statusCode, code, headers } = err
const { method, retryOptions } = opts
const {
maxRetries,
minTimeout,
maxTimeout,
timeoutFactor,
statusCodes,
errorCodes,
methods
} = retryOptions
const { counter } = state
// Any code that is not a Undici's originated and allowed to retry
if (code && code !== 'UND_ERR_REQ_RETRY' && !errorCodes.includes(code)) {
cb(err)
return
}
// If a set of method are provided and the current method is not in the list
if (Array.isArray(methods) && !methods.includes(method)) {
cb(err)
return
}
// If a set of status code are provided and the current status code is not in the list
if (
statusCode != null &&
Array.isArray(statusCodes) &&
!statusCodes.includes(statusCode)
) {
cb(err)
return
}
// If we reached the max number of retries
if (counter > maxRetries) {
cb(err)
return
}
let retryAfterHeader = headers?.['retry-after']
if (retryAfterHeader) {
retryAfterHeader = Number(retryAfterHeader)
retryAfterHeader = Number.isNaN(retryAfterHeader)
? calculateRetryAfterHeader(headers['retry-after'])
: retryAfterHeader * 1e3 // Retry-After is in seconds
}
const retryTimeout =
retryAfterHeader > 0
? Math.min(retryAfterHeader, maxTimeout)
: Math.min(minTimeout * timeoutFactor ** (counter - 1), maxTimeout)
setTimeout(() => cb(null), retryTimeout)
}
onResponseStart (controller, statusCode, headers, statusMessage) {
this.error = null
this.retryCount += 1
if (statusCode >= 300) {
const err = new RequestRetryError('Request failed', statusCode, {
headers,
data: {
count: this.retryCount
}
})
this.onResponseStartWithRetry(controller, statusCode, headers, statusMessage, err)
return
}
// Checkpoint for resume from where we left it
if (this.headersSent) {
// Only Partial Content 206 supposed to provide Content-Range,
// any other status code that partially consumed the payload
// should not be retried because it would result in downstream
// wrongly concatenate multiple responses.
if (statusCode !== 206 && (this.start > 0 || statusCode !== 200)) {
throw new RequestRetryError('server does not support the range header and the payload was partially consumed', statusCode, {
headers,
data: { count: this.retryCount }
})
}
const contentRange = parseRangeHeader(headers['content-range'])
// If no content range
if (!contentRange) {
// We always throw here as we want to indicate that we entred unexpected path
throw new RequestRetryError('Content-Range mismatch', statusCode, {
headers,
data: { count: this.retryCount }
})
}
// Let's start with a weak etag check
if (this.etag != null && this.etag !== headers.etag) {
// We always throw here as we want to indicate that we entred unexpected path
throw new RequestRetryError('ETag mismatch', statusCode, {
headers,
data: { count: this.retryCount }
})
}
const { start, size, end = size ? size - 1 : null } = contentRange
assert(this.start === start, 'content-range mismatch')
assert(this.end == null || this.end === end, 'content-range mismatch')
return
}
if (this.end == null) {
if (statusCode === 206) {
// First time we receive 206
const range = parseRangeHeader(headers['content-range'])
if (range == null) {
this.headersSent = true
this.handler.onResponseStart?.(
controller,
statusCode,
headers,
statusMessage
)
return
}
const { start, size, end = size ? size - 1 : null } = range
assert(
start != null && Number.isFinite(start),
'content-range mismatch'
)
assert(end != null && Number.isFinite(end), 'invalid content-length')
this.start = start
this.end = end
}
// We make our best to checkpoint the body for further range headers
if (this.end == null) {
const contentLength = headers['content-length']
this.end = contentLength != null ? Number(contentLength) - 1 : null
}
assert(Number.isFinite(this.start))
assert(
this.end == null || Number.isFinite(this.end),
'invalid content-length'
)
this.resume = true
this.etag = headers.etag != null ? headers.etag : null
// Weak etags are not useful for comparison nor cache
// for instance not safe to assume if the response is byte-per-byte
// equal
if (
this.etag != null &&
this.etag[0] === 'W' &&
this.etag[1] === '/'
) {
this.etag = null
}
this.headersSent = true
this.handler.onResponseStart?.(
controller,
statusCode,
headers,
statusMessage
)
} else {
throw new RequestRetryError('Request failed', statusCode, {
headers,
data: { count: this.retryCount }
})
}
}
onResponseData (controller, chunk) {
if (this.error) {
return
}
this.start += chunk.length
this.handler.onResponseData?.(controller, chunk)
}
onResponseEnd (controller, trailers) {
if (this.error && this.retryOpts.throwOnError) {
throw this.error
}
if (!this.error) {
this.retryCount = 0
return this.handler.onResponseEnd?.(controller, trailers)
}
this.retry(controller)
}
retry (controller) {
if (this.start !== 0) {
const headers = { range: `bytes=${this.start}-${this.end ?? ''}` }
// Weak etag check - weak etags will make comparison algorithms never match
if (this.etag != null) {
headers['if-match'] = this.etag
}
this.opts = {
...this.opts,
headers: {
...this.opts.headers,
...headers
}
}
}
try {
this.retryCountCheckpoint = this.retryCount
this.dispatch(this.opts, this)
} catch (err) {
this.handler.onResponseError?.(controller, err)
}
}
onResponseError (controller, err) {
if (controller?.aborted || isDisturbed(this.opts.body)) {
this.handler.onResponseError?.(controller, err)
return
}
function shouldRetry (returnedErr) {
if (!returnedErr) {
this.retry(controller)
return
}
this.handler?.onResponseError?.(controller, returnedErr)
}
// We reconcile in case of a mix between network errors
// and server error response
if (this.retryCount - this.retryCountCheckpoint > 0) {
// We count the difference between the last checkpoint and the current retry count
this.retryCount =
this.retryCountCheckpoint +
(this.retryCount - this.retryCountCheckpoint)
} else {
this.retryCount += 1
}
this.retryOpts.retry(
err,
{
state: { counter: this.retryCount },
opts: { retryOptions: this.retryOpts, ...this.opts }
},
shouldRetry.bind(this)
)
}
}
module.exports = RetryHandler

96
node_modules/undici/lib/handler/unwrap-handler.js generated vendored Normal file
View File

@@ -0,0 +1,96 @@
'use strict'
const { parseHeaders } = require('../core/util')
const { InvalidArgumentError } = require('../core/errors')
const kResume = Symbol('resume')
class UnwrapController {
#paused = false
#reason = null
#aborted = false
#abort
[kResume] = null
constructor (abort) {
this.#abort = abort
}
pause () {
this.#paused = true
}
resume () {
if (this.#paused) {
this.#paused = false
this[kResume]?.()
}
}
abort (reason) {
if (!this.#aborted) {
this.#aborted = true
this.#reason = reason
this.#abort(reason)
}
}
get aborted () {
return this.#aborted
}
get reason () {
return this.#reason
}
get paused () {
return this.#paused
}
}
module.exports = class UnwrapHandler {
#handler
#controller
constructor (handler) {
this.#handler = handler
}
static unwrap (handler) {
// TODO (fix): More checks...
return !handler.onRequestStart ? handler : new UnwrapHandler(handler)
}
onConnect (abort, context) {
this.#controller = new UnwrapController(abort)
this.#handler.onRequestStart?.(this.#controller, context)
}
onUpgrade (statusCode, rawHeaders, socket) {
this.#handler.onRequestUpgrade?.(this.#controller, statusCode, parseHeaders(rawHeaders), socket)
}
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
this.#controller[kResume] = resume
this.#handler.onResponseStart?.(this.#controller, statusCode, parseHeaders(rawHeaders), statusMessage)
return !this.#controller.paused
}
onData (data) {
this.#handler.onResponseData?.(this.#controller, data)
return !this.#controller.paused
}
onComplete (rawTrailers) {
this.#handler.onResponseEnd?.(this.#controller, parseHeaders(rawTrailers))
}
onError (err) {
if (!this.#handler.onResponseError) {
throw new InvalidArgumentError('invalid onError method')
}
this.#handler.onResponseError?.(this.#controller, err)
}
}

95
node_modules/undici/lib/handler/wrap-handler.js generated vendored Normal file
View File

@@ -0,0 +1,95 @@
'use strict'
const { InvalidArgumentError } = require('../core/errors')
module.exports = class WrapHandler {
#handler
constructor (handler) {
this.#handler = handler
}
static wrap (handler) {
// TODO (fix): More checks...
return handler.onRequestStart ? handler : new WrapHandler(handler)
}
// Unwrap Interface
onConnect (abort, context) {
return this.#handler.onConnect?.(abort, context)
}
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
return this.#handler.onHeaders?.(statusCode, rawHeaders, resume, statusMessage)
}
onUpgrade (statusCode, rawHeaders, socket) {
return this.#handler.onUpgrade?.(statusCode, rawHeaders, socket)
}
onData (data) {
return this.#handler.onData?.(data)
}
onComplete (trailers) {
return this.#handler.onComplete?.(trailers)
}
onError (err) {
if (!this.#handler.onError) {
throw err
}
return this.#handler.onError?.(err)
}
// Wrap Interface
onRequestStart (controller, context) {
this.#handler.onConnect?.((reason) => controller.abort(reason), context)
}
onRequestUpgrade (controller, statusCode, headers, socket) {
const rawHeaders = []
for (const [key, val] of Object.entries(headers)) {
rawHeaders.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
}
this.#handler.onUpgrade?.(statusCode, rawHeaders, socket)
}
onResponseStart (controller, statusCode, headers, statusMessage) {
const rawHeaders = []
for (const [key, val] of Object.entries(headers)) {
rawHeaders.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
}
if (this.#handler.onHeaders?.(statusCode, rawHeaders, () => controller.resume(), statusMessage) === false) {
controller.pause()
}
}
onResponseData (controller, data) {
if (this.#handler.onData?.(data) === false) {
controller.pause()
}
}
onResponseEnd (controller, trailers) {
const rawTrailers = []
for (const [key, val] of Object.entries(trailers)) {
rawTrailers.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val))
}
this.#handler.onComplete?.(rawTrailers)
}
onResponseError (controller, err) {
if (!this.#handler.onError) {
throw new InvalidArgumentError('invalid onError method')
}
this.#handler.onError?.(err)
}
}

433
node_modules/undici/lib/interceptor/cache.js generated vendored Normal file
View File

@@ -0,0 +1,433 @@
'use strict'
const assert = require('node:assert')
const { Readable } = require('node:stream')
const util = require('../core/util')
const CacheHandler = require('../handler/cache-handler')
const MemoryCacheStore = require('../cache/memory-cache-store')
const CacheRevalidationHandler = require('../handler/cache-revalidation-handler')
const { assertCacheStore, assertCacheMethods, makeCacheKey, normalizeHeaders, parseCacheControlHeader } = require('../util/cache.js')
const { AbortError } = require('../core/errors.js')
/**
* @typedef {(options: import('../../types/dispatcher.d.ts').default.DispatchOptions, handler: import('../../types/dispatcher.d.ts').default.DispatchHandler) => void} DispatchFn
*/
/**
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} cacheControlDirectives
* @returns {boolean}
*/
function needsRevalidation (result, cacheControlDirectives) {
if (cacheControlDirectives?.['no-cache']) {
// Always revalidate requests with the no-cache request directive
return true
}
if (result.cacheControlDirectives?.['no-cache'] && !Array.isArray(result.cacheControlDirectives['no-cache'])) {
// Always revalidate requests with unqualified no-cache response directive
return true
}
const now = Date.now()
if (now > result.staleAt) {
// Response is stale
if (cacheControlDirectives?.['max-stale']) {
// There's a threshold where we can serve stale responses, let's see if
// we're in it
// https://www.rfc-editor.org/rfc/rfc9111.html#name-max-stale
const gracePeriod = result.staleAt + (cacheControlDirectives['max-stale'] * 1000)
return now > gracePeriod
}
return true
}
if (cacheControlDirectives?.['min-fresh']) {
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.3
// At this point, staleAt is always > now
const timeLeftTillStale = result.staleAt - now
const threshold = cacheControlDirectives['min-fresh'] * 1000
return timeLeftTillStale <= threshold
}
return false
}
/**
* Check if we're within the stale-while-revalidate window for a stale response
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
* @returns {boolean}
*/
function withinStaleWhileRevalidateWindow (result) {
const staleWhileRevalidate = result.cacheControlDirectives?.['stale-while-revalidate']
if (!staleWhileRevalidate) {
return false
}
const now = Date.now()
const staleWhileRevalidateExpiry = result.staleAt + (staleWhileRevalidate * 1000)
return now <= staleWhileRevalidateExpiry
}
/**
* @param {DispatchFn} dispatch
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} globalOpts
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} reqCacheControl
*/
function handleUncachedResponse (
dispatch,
globalOpts,
cacheKey,
handler,
opts,
reqCacheControl
) {
if (reqCacheControl?.['only-if-cached']) {
let aborted = false
try {
if (typeof handler.onConnect === 'function') {
handler.onConnect(() => {
aborted = true
})
if (aborted) {
return
}
}
if (typeof handler.onHeaders === 'function') {
handler.onHeaders(504, [], () => {}, 'Gateway Timeout')
if (aborted) {
return
}
}
if (typeof handler.onComplete === 'function') {
handler.onComplete([])
}
} catch (err) {
if (typeof handler.onError === 'function') {
handler.onError(err)
}
}
return true
}
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
}
/**
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
* @param {number} age
* @param {any} context
* @param {boolean} isStale
*/
function sendCachedValue (handler, opts, result, age, context, isStale) {
// TODO (perf): Readable.from path can be optimized...
const stream = util.isStream(result.body)
? result.body
: Readable.from(result.body ?? [])
assert(!stream.destroyed, 'stream should not be destroyed')
assert(!stream.readableDidRead, 'stream should not be readableDidRead')
const controller = {
resume () {
stream.resume()
},
pause () {
stream.pause()
},
get paused () {
return stream.isPaused()
},
get aborted () {
return stream.destroyed
},
get reason () {
return stream.errored
},
abort (reason) {
stream.destroy(reason ?? new AbortError())
}
}
stream
.on('error', function (err) {
if (!this.readableEnded) {
if (typeof handler.onResponseError === 'function') {
handler.onResponseError(controller, err)
} else {
throw err
}
}
})
.on('close', function () {
if (!this.errored) {
handler.onResponseEnd?.(controller, {})
}
})
handler.onRequestStart?.(controller, context)
if (stream.destroyed) {
return
}
// Add the age header
// https://www.rfc-editor.org/rfc/rfc9111.html#name-age
const headers = { ...result.headers, age: String(age) }
if (isStale) {
// Add warning header
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Warning
headers.warning = '110 - "response is stale"'
}
handler.onResponseStart?.(controller, result.statusCode, headers, result.statusMessage)
if (opts.method === 'HEAD') {
stream.destroy()
} else {
stream.on('data', function (chunk) {
handler.onResponseData?.(controller, chunk)
})
}
}
/**
* @param {DispatchFn} dispatch
* @param {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions} globalOpts
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
* @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} reqCacheControl
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult | undefined} result
*/
function handleResult (
dispatch,
globalOpts,
cacheKey,
handler,
opts,
reqCacheControl,
result
) {
if (!result) {
return handleUncachedResponse(dispatch, globalOpts, cacheKey, handler, opts, reqCacheControl)
}
const now = Date.now()
if (now > result.deleteAt) {
// Response is expired, cache store shouldn't have given this to us
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
}
const age = Math.round((now - result.cachedAt) / 1000)
if (reqCacheControl?.['max-age'] && age >= reqCacheControl['max-age']) {
// Response is considered expired for this specific request
// https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.1
return dispatch(opts, handler)
}
// Check if the response is stale
if (needsRevalidation(result, reqCacheControl)) {
if (util.isStream(opts.body) && util.bodyLength(opts.body) !== 0) {
// If body is a stream we can't revalidate...
// TODO (fix): This could be less strict...
return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
}
// RFC 5861: If we're within stale-while-revalidate window, serve stale immediately
// and revalidate in background
if (withinStaleWhileRevalidateWindow(result)) {
// Serve stale response immediately
sendCachedValue(handler, opts, result, age, null, true)
// Start background revalidation (fire-and-forget)
queueMicrotask(() => {
let headers = {
...opts.headers,
'if-modified-since': new Date(result.cachedAt).toUTCString()
}
if (result.etag) {
headers['if-none-match'] = result.etag
}
if (result.vary) {
headers = {
...headers,
...result.vary
}
}
// Background revalidation - update cache if we get new data
dispatch(
{
...opts,
headers
},
new CacheHandler(globalOpts, cacheKey, {
// Silent handler that just updates the cache
onRequestStart () {},
onRequestUpgrade () {},
onResponseStart () {},
onResponseData () {},
onResponseEnd () {},
onResponseError () {}
})
)
})
return true
}
let withinStaleIfErrorThreshold = false
const staleIfErrorExpiry = result.cacheControlDirectives['stale-if-error'] ?? reqCacheControl?.['stale-if-error']
if (staleIfErrorExpiry) {
withinStaleIfErrorThreshold = now < (result.staleAt + (staleIfErrorExpiry * 1000))
}
let headers = {
...opts.headers,
'if-modified-since': new Date(result.cachedAt).toUTCString()
}
if (result.etag) {
headers['if-none-match'] = result.etag
}
if (result.vary) {
headers = {
...headers,
...result.vary
}
}
// We need to revalidate the response
return dispatch(
{
...opts,
headers
},
new CacheRevalidationHandler(
(success, context) => {
if (success) {
sendCachedValue(handler, opts, result, age, context, true)
} else if (util.isStream(result.body)) {
result.body.on('error', () => {}).destroy()
}
},
new CacheHandler(globalOpts, cacheKey, handler),
withinStaleIfErrorThreshold
)
)
}
// Dump request body.
if (util.isStream(opts.body)) {
opts.body.on('error', () => {}).destroy()
}
sendCachedValue(handler, opts, result, age, null, false)
}
/**
* @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions} [opts]
* @returns {import('../../types/dispatcher.d.ts').default.DispatcherComposeInterceptor}
*/
module.exports = (opts = {}) => {
const {
store = new MemoryCacheStore(),
methods = ['GET'],
cacheByDefault = undefined,
type = 'shared'
} = opts
if (typeof opts !== 'object' || opts === null) {
throw new TypeError(`expected type of opts to be an Object, got ${opts === null ? 'null' : typeof opts}`)
}
assertCacheStore(store, 'opts.store')
assertCacheMethods(methods, 'opts.methods')
if (typeof cacheByDefault !== 'undefined' && typeof cacheByDefault !== 'number') {
throw new TypeError(`expected opts.cacheByDefault to be number or undefined, got ${typeof cacheByDefault}`)
}
if (typeof type !== 'undefined' && type !== 'shared' && type !== 'private') {
throw new TypeError(`expected opts.type to be shared, private, or undefined, got ${typeof type}`)
}
const globalOpts = {
store,
methods,
cacheByDefault,
type
}
const safeMethodsToNotCache = util.safeHTTPMethods.filter(method => methods.includes(method) === false)
return dispatch => {
return (opts, handler) => {
if (!opts.origin || safeMethodsToNotCache.includes(opts.method)) {
// Not a method we want to cache or we don't have the origin, skip
return dispatch(opts, handler)
}
opts = {
...opts,
headers: normalizeHeaders(opts)
}
const reqCacheControl = opts.headers?.['cache-control']
? parseCacheControlHeader(opts.headers['cache-control'])
: undefined
if (reqCacheControl?.['no-store']) {
return dispatch(opts, handler)
}
/**
* @type {import('../../types/cache-interceptor.d.ts').default.CacheKey}
*/
const cacheKey = makeCacheKey(opts)
const result = store.get(cacheKey)
if (result && typeof result.then === 'function') {
result.then(result => {
handleResult(dispatch,
globalOpts,
cacheKey,
handler,
opts,
reqCacheControl,
result
)
})
} else {
handleResult(
dispatch,
globalOpts,
cacheKey,
handler,
opts,
reqCacheControl,
result
)
}
return true
}
}
}

253
node_modules/undici/lib/interceptor/decompress.js generated vendored Normal file
View File

@@ -0,0 +1,253 @@
'use strict'
const { createInflate, createGunzip, createBrotliDecompress, createZstdDecompress } = require('node:zlib')
const { pipeline } = require('node:stream')
const DecoratorHandler = require('../handler/decorator-handler')
/** @typedef {import('node:stream').Transform} Transform */
/** @typedef {import('node:stream').Transform} Controller */
/** @typedef {Transform&import('node:zlib').Zlib} DecompressorStream */
/** @type {Record<string, () => DecompressorStream>} */
const supportedEncodings = {
gzip: createGunzip,
'x-gzip': createGunzip,
br: createBrotliDecompress,
deflate: createInflate,
compress: createInflate,
'x-compress': createInflate,
...(createZstdDecompress ? { zstd: createZstdDecompress } : {})
}
const defaultSkipStatusCodes = /** @type {const} */ ([204, 304])
let warningEmitted = /** @type {boolean} */ (false)
/**
* @typedef {Object} DecompressHandlerOptions
* @property {number[]|Readonly<number[]>} [skipStatusCodes=[204, 304]] - List of status codes to skip decompression for
* @property {boolean} [skipErrorResponses] - Whether to skip decompression for error responses (status codes >= 400)
*/
class DecompressHandler extends DecoratorHandler {
/** @type {Transform[]} */
#decompressors = []
/** @type {NodeJS.WritableStream&NodeJS.ReadableStream|null} */
#pipelineStream
/** @type {Readonly<number[]>} */
#skipStatusCodes
/** @type {boolean} */
#skipErrorResponses
constructor (handler, { skipStatusCodes = defaultSkipStatusCodes, skipErrorResponses = true } = {}) {
super(handler)
this.#skipStatusCodes = skipStatusCodes
this.#skipErrorResponses = skipErrorResponses
}
/**
* Determines if decompression should be skipped based on encoding and status code
* @param {string} contentEncoding - Content-Encoding header value
* @param {number} statusCode - HTTP status code of the response
* @returns {boolean} - True if decompression should be skipped
*/
#shouldSkipDecompression (contentEncoding, statusCode) {
if (!contentEncoding || statusCode < 200) return true
if (this.#skipStatusCodes.includes(statusCode)) return true
if (this.#skipErrorResponses && statusCode >= 400) return true
return false
}
/**
* Creates a chain of decompressors for multiple content encodings
*
* @param {string} encodings - Comma-separated list of content encodings
* @returns {Array<DecompressorStream>} - Array of decompressor streams
*/
#createDecompressionChain (encodings) {
const parts = encodings.split(',')
/** @type {DecompressorStream[]} */
const decompressors = []
for (let i = parts.length - 1; i >= 0; i--) {
const encoding = parts[i].trim()
if (!encoding) continue
if (!supportedEncodings[encoding]) {
decompressors.length = 0 // Clear if unsupported encoding
return decompressors // Unsupported encoding
}
decompressors.push(supportedEncodings[encoding]())
}
return decompressors
}
/**
* Sets up event handlers for a decompressor stream using readable events
* @param {DecompressorStream} decompressor - The decompressor stream
* @param {Controller} controller - The controller to coordinate with
* @returns {void}
*/
#setupDecompressorEvents (decompressor, controller) {
decompressor.on('readable', () => {
let chunk
while ((chunk = decompressor.read()) !== null) {
const result = super.onResponseData(controller, chunk)
if (result === false) {
break
}
}
})
decompressor.on('error', (error) => {
super.onResponseError(controller, error)
})
}
/**
* Sets up event handling for a single decompressor
* @param {Controller} controller - The controller to handle events
* @returns {void}
*/
#setupSingleDecompressor (controller) {
const decompressor = this.#decompressors[0]
this.#setupDecompressorEvents(decompressor, controller)
decompressor.on('end', () => {
super.onResponseEnd(controller, {})
})
}
/**
* Sets up event handling for multiple chained decompressors using pipeline
* @param {Controller} controller - The controller to handle events
* @returns {void}
*/
#setupMultipleDecompressors (controller) {
const lastDecompressor = this.#decompressors[this.#decompressors.length - 1]
this.#setupDecompressorEvents(lastDecompressor, controller)
this.#pipelineStream = pipeline(this.#decompressors, (err) => {
if (err) {
super.onResponseError(controller, err)
return
}
super.onResponseEnd(controller, {})
})
}
/**
* Cleans up decompressor references to prevent memory leaks
* @returns {void}
*/
#cleanupDecompressors () {
this.#decompressors.length = 0
this.#pipelineStream = null
}
/**
* @param {Controller} controller
* @param {number} statusCode
* @param {Record<string, string | string[] | undefined>} headers
* @param {string} statusMessage
* @returns {void}
*/
onResponseStart (controller, statusCode, headers, statusMessage) {
const contentEncoding = headers['content-encoding']
// If content encoding is not supported or status code is in skip list
if (this.#shouldSkipDecompression(contentEncoding, statusCode)) {
return super.onResponseStart(controller, statusCode, headers, statusMessage)
}
const decompressors = this.#createDecompressionChain(contentEncoding.toLowerCase())
if (decompressors.length === 0) {
this.#cleanupDecompressors()
return super.onResponseStart(controller, statusCode, headers, statusMessage)
}
this.#decompressors = decompressors
// Remove compression headers since we're decompressing
const { 'content-encoding': _, 'content-length': __, ...newHeaders } = headers
if (this.#decompressors.length === 1) {
this.#setupSingleDecompressor(controller)
} else {
this.#setupMultipleDecompressors(controller)
}
super.onResponseStart(controller, statusCode, newHeaders, statusMessage)
}
/**
* @param {Controller} controller
* @param {Buffer} chunk
* @returns {void}
*/
onResponseData (controller, chunk) {
if (this.#decompressors.length > 0) {
this.#decompressors[0].write(chunk)
return
}
super.onResponseData(controller, chunk)
}
/**
* @param {Controller} controller
* @param {Record<string, string | string[]> | undefined} trailers
* @returns {void}
*/
onResponseEnd (controller, trailers) {
if (this.#decompressors.length > 0) {
this.#decompressors[0].end()
this.#cleanupDecompressors()
return
}
super.onResponseEnd(controller, trailers)
}
/**
* @param {Controller} controller
* @param {Error} err
* @returns {void}
*/
onResponseError (controller, err) {
if (this.#decompressors.length > 0) {
for (const decompressor of this.#decompressors) {
decompressor.destroy(err)
}
this.#cleanupDecompressors()
}
super.onResponseError(controller, err)
}
}
/**
* Creates a decompression interceptor for HTTP responses
* @param {DecompressHandlerOptions} [options] - Options for the interceptor
* @returns {Function} - Interceptor function
*/
function createDecompressInterceptor (options = {}) {
// Emit experimental warning only once
if (!warningEmitted) {
process.emitWarning(
'DecompressInterceptor is experimental and subject to change',
'ExperimentalWarning'
)
warningEmitted = true
}
return (dispatch) => {
return (opts, handler) => {
const decompressHandler = new DecompressHandler(handler, options)
return dispatch(opts, decompressHandler)
}
}
}
module.exports = createDecompressInterceptor

432
node_modules/undici/lib/interceptor/dns.js generated vendored Normal file
View File

@@ -0,0 +1,432 @@
'use strict'
const { isIP } = require('node:net')
const { lookup } = require('node:dns')
const DecoratorHandler = require('../handler/decorator-handler')
const { InvalidArgumentError, InformationalError } = require('../core/errors')
const maxInt = Math.pow(2, 31) - 1
class DNSInstance {
#maxTTL = 0
#maxItems = 0
#records = new Map()
dualStack = true
affinity = null
lookup = null
pick = null
constructor (opts) {
this.#maxTTL = opts.maxTTL
this.#maxItems = opts.maxItems
this.dualStack = opts.dualStack
this.affinity = opts.affinity
this.lookup = opts.lookup ?? this.#defaultLookup
this.pick = opts.pick ?? this.#defaultPick
}
get full () {
return this.#records.size === this.#maxItems
}
runLookup (origin, opts, cb) {
const ips = this.#records.get(origin.hostname)
// If full, we just return the origin
if (ips == null && this.full) {
cb(null, origin)
return
}
const newOpts = {
affinity: this.affinity,
dualStack: this.dualStack,
lookup: this.lookup,
pick: this.pick,
...opts.dns,
maxTTL: this.#maxTTL,
maxItems: this.#maxItems
}
// If no IPs we lookup
if (ips == null) {
this.lookup(origin, newOpts, (err, addresses) => {
if (err || addresses == null || addresses.length === 0) {
cb(err ?? new InformationalError('No DNS entries found'))
return
}
this.setRecords(origin, addresses)
const records = this.#records.get(origin.hostname)
const ip = this.pick(
origin,
records,
newOpts.affinity
)
let port
if (typeof ip.port === 'number') {
port = `:${ip.port}`
} else if (origin.port !== '') {
port = `:${origin.port}`
} else {
port = ''
}
cb(
null,
new URL(`${origin.protocol}//${
ip.family === 6 ? `[${ip.address}]` : ip.address
}${port}`)
)
})
} else {
// If there's IPs we pick
const ip = this.pick(
origin,
ips,
newOpts.affinity
)
// If no IPs we lookup - deleting old records
if (ip == null) {
this.#records.delete(origin.hostname)
this.runLookup(origin, opts, cb)
return
}
let port
if (typeof ip.port === 'number') {
port = `:${ip.port}`
} else if (origin.port !== '') {
port = `:${origin.port}`
} else {
port = ''
}
cb(
null,
new URL(`${origin.protocol}//${
ip.family === 6 ? `[${ip.address}]` : ip.address
}${port}`)
)
}
}
#defaultLookup (origin, opts, cb) {
lookup(
origin.hostname,
{
all: true,
family: this.dualStack === false ? this.affinity : 0,
order: 'ipv4first'
},
(err, addresses) => {
if (err) {
return cb(err)
}
const results = new Map()
for (const addr of addresses) {
// On linux we found duplicates, we attempt to remove them with
// the latest record
results.set(`${addr.address}:${addr.family}`, addr)
}
cb(null, results.values())
}
)
}
#defaultPick (origin, hostnameRecords, affinity) {
let ip = null
const { records, offset } = hostnameRecords
let family
if (this.dualStack) {
if (affinity == null) {
// Balance between ip families
if (offset == null || offset === maxInt) {
hostnameRecords.offset = 0
affinity = 4
} else {
hostnameRecords.offset++
affinity = (hostnameRecords.offset & 1) === 1 ? 6 : 4
}
}
if (records[affinity] != null && records[affinity].ips.length > 0) {
family = records[affinity]
} else {
family = records[affinity === 4 ? 6 : 4]
}
} else {
family = records[affinity]
}
// If no IPs we return null
if (family == null || family.ips.length === 0) {
return ip
}
if (family.offset == null || family.offset === maxInt) {
family.offset = 0
} else {
family.offset++
}
const position = family.offset % family.ips.length
ip = family.ips[position] ?? null
if (ip == null) {
return ip
}
if (Date.now() - ip.timestamp > ip.ttl) { // record TTL is already in ms
// We delete expired records
// It is possible that they have different TTL, so we manage them individually
family.ips.splice(position, 1)
return this.pick(origin, hostnameRecords, affinity)
}
return ip
}
pickFamily (origin, ipFamily) {
const records = this.#records.get(origin.hostname)?.records
if (!records) {
return null
}
const family = records[ipFamily]
if (!family) {
return null
}
if (family.offset == null || family.offset === maxInt) {
family.offset = 0
} else {
family.offset++
}
const position = family.offset % family.ips.length
const ip = family.ips[position] ?? null
if (ip == null) {
return ip
}
if (Date.now() - ip.timestamp > ip.ttl) { // record TTL is already in ms
// We delete expired records
// It is possible that they have different TTL, so we manage them individually
family.ips.splice(position, 1)
}
return ip
}
setRecords (origin, addresses) {
const timestamp = Date.now()
const records = { records: { 4: null, 6: null } }
for (const record of addresses) {
record.timestamp = timestamp
if (typeof record.ttl === 'number') {
// The record TTL is expected to be in ms
record.ttl = Math.min(record.ttl, this.#maxTTL)
} else {
record.ttl = this.#maxTTL
}
const familyRecords = records.records[record.family] ?? { ips: [] }
familyRecords.ips.push(record)
records.records[record.family] = familyRecords
}
this.#records.set(origin.hostname, records)
}
deleteRecords (origin) {
this.#records.delete(origin.hostname)
}
getHandler (meta, opts) {
return new DNSDispatchHandler(this, meta, opts)
}
}
class DNSDispatchHandler extends DecoratorHandler {
#state = null
#opts = null
#dispatch = null
#origin = null
#controller = null
#newOrigin = null
#firstTry = true
constructor (state, { origin, handler, dispatch, newOrigin }, opts) {
super(handler)
this.#origin = origin
this.#newOrigin = newOrigin
this.#opts = { ...opts }
this.#state = state
this.#dispatch = dispatch
}
onResponseError (controller, err) {
switch (err.code) {
case 'ETIMEDOUT':
case 'ECONNREFUSED': {
if (this.#state.dualStack) {
if (!this.#firstTry) {
super.onResponseError(controller, err)
return
}
this.#firstTry = false
// Pick an ip address from the other family
const otherFamily = this.#newOrigin.hostname[0] === '[' ? 4 : 6
const ip = this.#state.pickFamily(this.#origin, otherFamily)
if (ip == null) {
super.onResponseError(controller, err)
return
}
let port
if (typeof ip.port === 'number') {
port = `:${ip.port}`
} else if (this.#origin.port !== '') {
port = `:${this.#origin.port}`
} else {
port = ''
}
const dispatchOpts = {
...this.#opts,
origin: `${this.#origin.protocol}//${
ip.family === 6 ? `[${ip.address}]` : ip.address
}${port}`
}
this.#dispatch(dispatchOpts, this)
return
}
// if dual-stack disabled, we error out
super.onResponseError(controller, err)
break
}
case 'ENOTFOUND':
this.#state.deleteRecords(this.#origin)
super.onResponseError(controller, err)
break
default:
super.onResponseError(controller, err)
break
}
}
}
module.exports = interceptorOpts => {
if (
interceptorOpts?.maxTTL != null &&
(typeof interceptorOpts?.maxTTL !== 'number' || interceptorOpts?.maxTTL < 0)
) {
throw new InvalidArgumentError('Invalid maxTTL. Must be a positive number')
}
if (
interceptorOpts?.maxItems != null &&
(typeof interceptorOpts?.maxItems !== 'number' ||
interceptorOpts?.maxItems < 1)
) {
throw new InvalidArgumentError(
'Invalid maxItems. Must be a positive number and greater than zero'
)
}
if (
interceptorOpts?.affinity != null &&
interceptorOpts?.affinity !== 4 &&
interceptorOpts?.affinity !== 6
) {
throw new InvalidArgumentError('Invalid affinity. Must be either 4 or 6')
}
if (
interceptorOpts?.dualStack != null &&
typeof interceptorOpts?.dualStack !== 'boolean'
) {
throw new InvalidArgumentError('Invalid dualStack. Must be a boolean')
}
if (
interceptorOpts?.lookup != null &&
typeof interceptorOpts?.lookup !== 'function'
) {
throw new InvalidArgumentError('Invalid lookup. Must be a function')
}
if (
interceptorOpts?.pick != null &&
typeof interceptorOpts?.pick !== 'function'
) {
throw new InvalidArgumentError('Invalid pick. Must be a function')
}
const dualStack = interceptorOpts?.dualStack ?? true
let affinity
if (dualStack) {
affinity = interceptorOpts?.affinity ?? null
} else {
affinity = interceptorOpts?.affinity ?? 4
}
const opts = {
maxTTL: interceptorOpts?.maxTTL ?? 10e3, // Expressed in ms
lookup: interceptorOpts?.lookup ?? null,
pick: interceptorOpts?.pick ?? null,
dualStack,
affinity,
maxItems: interceptorOpts?.maxItems ?? Infinity
}
const instance = new DNSInstance(opts)
return dispatch => {
return function dnsInterceptor (origDispatchOpts, handler) {
const origin =
origDispatchOpts.origin.constructor === URL
? origDispatchOpts.origin
: new URL(origDispatchOpts.origin)
if (isIP(origin.hostname) !== 0) {
return dispatch(origDispatchOpts, handler)
}
instance.runLookup(origin, origDispatchOpts, (err, newOrigin) => {
if (err) {
return handler.onResponseError(null, err)
}
const dispatchOpts = {
...origDispatchOpts,
servername: origin.hostname, // For SNI on TLS
origin: newOrigin.origin,
headers: {
host: origin.host,
...origDispatchOpts.headers
}
}
dispatch(
dispatchOpts,
instance.getHandler(
{ origin, dispatch, handler, newOrigin },
origDispatchOpts
)
)
})
return true
}
}
}

112
node_modules/undici/lib/interceptor/dump.js generated vendored Normal file
View File

@@ -0,0 +1,112 @@
'use strict'
const { InvalidArgumentError, RequestAbortedError } = require('../core/errors')
const DecoratorHandler = require('../handler/decorator-handler')
class DumpHandler extends DecoratorHandler {
#maxSize = 1024 * 1024
#dumped = false
#size = 0
#controller = null
aborted = false
reason = false
constructor ({ maxSize, signal }, handler) {
if (maxSize != null && (!Number.isFinite(maxSize) || maxSize < 1)) {
throw new InvalidArgumentError('maxSize must be a number greater than 0')
}
super(handler)
this.#maxSize = maxSize ?? this.#maxSize
// this.#handler = handler
}
#abort (reason) {
this.aborted = true
this.reason = reason
}
onRequestStart (controller, context) {
controller.abort = this.#abort.bind(this)
this.#controller = controller
return super.onRequestStart(controller, context)
}
onResponseStart (controller, statusCode, headers, statusMessage) {
const contentLength = headers['content-length']
if (contentLength != null && contentLength > this.#maxSize) {
throw new RequestAbortedError(
`Response size (${contentLength}) larger than maxSize (${
this.#maxSize
})`
)
}
if (this.aborted === true) {
return true
}
return super.onResponseStart(controller, statusCode, headers, statusMessage)
}
onResponseError (controller, err) {
if (this.#dumped) {
return
}
// On network errors before connect, controller will be null
err = this.#controller?.reason ?? err
super.onResponseError(controller, err)
}
onResponseData (controller, chunk) {
this.#size = this.#size + chunk.length
if (this.#size >= this.#maxSize) {
this.#dumped = true
if (this.aborted === true) {
super.onResponseError(controller, this.reason)
} else {
super.onResponseEnd(controller, {})
}
}
return true
}
onResponseEnd (controller, trailers) {
if (this.#dumped) {
return
}
if (this.#controller.aborted === true) {
super.onResponseError(controller, this.reason)
return
}
super.onResponseEnd(controller, trailers)
}
}
function createDumpInterceptor (
{ maxSize: defaultMaxSize } = {
maxSize: 1024 * 1024
}
) {
return dispatch => {
return function Intercept (opts, handler) {
const { dumpMaxSize = defaultMaxSize } = opts
const dumpHandler = new DumpHandler({ maxSize: dumpMaxSize, signal: opts.signal }, handler)
return dispatch(opts, dumpHandler)
}
}
}
module.exports = createDumpInterceptor

21
node_modules/undici/lib/interceptor/redirect.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
'use strict'
const RedirectHandler = require('../handler/redirect-handler')
function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections } = {}) {
return (dispatch) => {
return function Intercept (opts, handler) {
const { maxRedirections = defaultMaxRedirections, ...rest } = opts
if (maxRedirections == null || maxRedirections === 0) {
return dispatch(opts, handler)
}
const dispatchOpts = { ...rest } // Stop sub dispatcher from also redirecting.
const redirectHandler = new RedirectHandler(dispatch, maxRedirections, dispatchOpts, handler)
return dispatch(dispatchOpts, redirectHandler)
}
}
}
module.exports = createRedirectInterceptor

95
node_modules/undici/lib/interceptor/response-error.js generated vendored Normal file
View File

@@ -0,0 +1,95 @@
'use strict'
// const { parseHeaders } = require('../core/util')
const DecoratorHandler = require('../handler/decorator-handler')
const { ResponseError } = require('../core/errors')
class ResponseErrorHandler extends DecoratorHandler {
#statusCode
#contentType
#decoder
#headers
#body
constructor (_opts, { handler }) {
super(handler)
}
#checkContentType (contentType) {
return (this.#contentType ?? '').indexOf(contentType) === 0
}
onRequestStart (controller, context) {
this.#statusCode = 0
this.#contentType = null
this.#decoder = null
this.#headers = null
this.#body = ''
return super.onRequestStart(controller, context)
}
onResponseStart (controller, statusCode, headers, statusMessage) {
this.#statusCode = statusCode
this.#headers = headers
this.#contentType = headers['content-type']
if (this.#statusCode < 400) {
return super.onResponseStart(controller, statusCode, headers, statusMessage)
}
if (this.#checkContentType('application/json') || this.#checkContentType('text/plain')) {
this.#decoder = new TextDecoder('utf-8')
}
}
onResponseData (controller, chunk) {
if (this.#statusCode < 400) {
return super.onResponseData(controller, chunk)
}
this.#body += this.#decoder?.decode(chunk, { stream: true }) ?? ''
}
onResponseEnd (controller, trailers) {
if (this.#statusCode >= 400) {
this.#body += this.#decoder?.decode(undefined, { stream: false }) ?? ''
if (this.#checkContentType('application/json')) {
try {
this.#body = JSON.parse(this.#body)
} catch {
// Do nothing...
}
}
let err
const stackTraceLimit = Error.stackTraceLimit
Error.stackTraceLimit = 0
try {
err = new ResponseError('Response Error', this.#statusCode, {
body: this.#body,
headers: this.#headers
})
} finally {
Error.stackTraceLimit = stackTraceLimit
}
super.onResponseError(controller, err)
} else {
super.onResponseEnd(controller, trailers)
}
}
onResponseError (controller, err) {
super.onResponseError(controller, err)
}
}
module.exports = () => {
return (dispatch) => {
return function Intercept (opts, handler) {
return dispatch(opts, new ResponseErrorHandler(opts, { handler }))
}
}
}

19
node_modules/undici/lib/interceptor/retry.js generated vendored Normal file
View File

@@ -0,0 +1,19 @@
'use strict'
const RetryHandler = require('../handler/retry-handler')
module.exports = globalOpts => {
return dispatch => {
return function retryInterceptor (opts, handler) {
return dispatch(
opts,
new RetryHandler(
{ ...opts, retryOptions: { ...globalOpts, ...opts.retryOptions } },
{
handler,
dispatch
}
)
)
}
}
}

0
node_modules/undici/lib/llhttp/.gitkeep generated vendored Normal file
View File

195
node_modules/undici/lib/llhttp/constants.d.ts generated vendored Normal file
View File

@@ -0,0 +1,195 @@
export type IntDict = Record<string, number>;
export declare const ERROR: IntDict;
export declare const TYPE: IntDict;
export declare const FLAGS: IntDict;
export declare const LENIENT_FLAGS: IntDict;
export declare const METHODS: IntDict;
export declare const STATUSES: IntDict;
export declare const FINISH: IntDict;
export declare const HEADER_STATE: IntDict;
export declare const METHODS_HTTP: number[];
export declare const METHODS_ICE: number[];
export declare const METHODS_RTSP: number[];
export declare const METHOD_MAP: IntDict;
export declare const H_METHOD_MAP: {
[k: string]: number;
};
export declare const STATUSES_HTTP: number[];
export type CharList = (string | number)[];
export declare const ALPHA: CharList;
export declare const NUM_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare const HEX_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
A: number;
B: number;
C: number;
D: number;
E: number;
F: number;
a: number;
b: number;
c: number;
d: number;
e: number;
f: number;
};
export declare const NUM: CharList;
export declare const ALPHANUM: CharList;
export declare const MARK: CharList;
export declare const USERINFO_CHARS: CharList;
export declare const URL_CHAR: CharList;
export declare const HEX: CharList;
export declare const TOKEN: CharList;
export declare const HEADER_CHARS: CharList;
export declare const CONNECTION_TOKEN_CHARS: CharList;
export declare const QUOTED_STRING: CharList;
export declare const HTAB_SP_VCHAR_OBS_TEXT: CharList;
export declare const MAJOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare const MINOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
export declare const SPECIAL_HEADERS: {
connection: number;
'content-length': number;
'proxy-connection': number;
'transfer-encoding': number;
upgrade: number;
};
declare const _default: {
ERROR: IntDict;
TYPE: IntDict;
FLAGS: IntDict;
LENIENT_FLAGS: IntDict;
METHODS: IntDict;
STATUSES: IntDict;
FINISH: IntDict;
HEADER_STATE: IntDict;
ALPHA: CharList;
NUM_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
HEX_MAP: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
A: number;
B: number;
C: number;
D: number;
E: number;
F: number;
a: number;
b: number;
c: number;
d: number;
e: number;
f: number;
};
NUM: CharList;
ALPHANUM: CharList;
MARK: CharList;
USERINFO_CHARS: CharList;
URL_CHAR: CharList;
HEX: CharList;
TOKEN: CharList;
HEADER_CHARS: CharList;
CONNECTION_TOKEN_CHARS: CharList;
QUOTED_STRING: CharList;
HTAB_SP_VCHAR_OBS_TEXT: CharList;
MAJOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
MINOR: {
0: number;
1: number;
2: number;
3: number;
4: number;
5: number;
6: number;
7: number;
8: number;
9: number;
};
SPECIAL_HEADERS: {
connection: number;
'content-length': number;
'proxy-connection': number;
'transfer-encoding': number;
upgrade: number;
};
METHODS_HTTP: number[];
METHODS_ICE: number[];
METHODS_RTSP: number[];
METHOD_MAP: IntDict;
H_METHOD_MAP: {
[k: string]: number;
};
STATUSES_HTTP: number[];
};
export default _default;

531
node_modules/undici/lib/llhttp/constants.js generated vendored Normal file
View File

@@ -0,0 +1,531 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SPECIAL_HEADERS = exports.MINOR = exports.MAJOR = exports.HTAB_SP_VCHAR_OBS_TEXT = exports.QUOTED_STRING = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.HEX = exports.URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.STATUSES_HTTP = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.HEADER_STATE = exports.FINISH = exports.STATUSES = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;
const utils_1 = require("./utils");
// Emums
exports.ERROR = {
OK: 0,
INTERNAL: 1,
STRICT: 2,
CR_EXPECTED: 25,
LF_EXPECTED: 3,
UNEXPECTED_CONTENT_LENGTH: 4,
UNEXPECTED_SPACE: 30,
CLOSED_CONNECTION: 5,
INVALID_METHOD: 6,
INVALID_URL: 7,
INVALID_CONSTANT: 8,
INVALID_VERSION: 9,
INVALID_HEADER_TOKEN: 10,
INVALID_CONTENT_LENGTH: 11,
INVALID_CHUNK_SIZE: 12,
INVALID_STATUS: 13,
INVALID_EOF_STATE: 14,
INVALID_TRANSFER_ENCODING: 15,
CB_MESSAGE_BEGIN: 16,
CB_HEADERS_COMPLETE: 17,
CB_MESSAGE_COMPLETE: 18,
CB_CHUNK_HEADER: 19,
CB_CHUNK_COMPLETE: 20,
PAUSED: 21,
PAUSED_UPGRADE: 22,
PAUSED_H2_UPGRADE: 23,
USER: 24,
CB_URL_COMPLETE: 26,
CB_STATUS_COMPLETE: 27,
CB_METHOD_COMPLETE: 32,
CB_VERSION_COMPLETE: 33,
CB_HEADER_FIELD_COMPLETE: 28,
CB_HEADER_VALUE_COMPLETE: 29,
CB_CHUNK_EXTENSION_NAME_COMPLETE: 34,
CB_CHUNK_EXTENSION_VALUE_COMPLETE: 35,
CB_RESET: 31,
CB_PROTOCOL_COMPLETE: 38,
};
exports.TYPE = {
BOTH: 0, // default
REQUEST: 1,
RESPONSE: 2,
};
exports.FLAGS = {
CONNECTION_KEEP_ALIVE: 1 << 0,
CONNECTION_CLOSE: 1 << 1,
CONNECTION_UPGRADE: 1 << 2,
CHUNKED: 1 << 3,
UPGRADE: 1 << 4,
CONTENT_LENGTH: 1 << 5,
SKIPBODY: 1 << 6,
TRAILING: 1 << 7,
// 1 << 8 is unused
TRANSFER_ENCODING: 1 << 9,
};
exports.LENIENT_FLAGS = {
HEADERS: 1 << 0,
CHUNKED_LENGTH: 1 << 1,
KEEP_ALIVE: 1 << 2,
TRANSFER_ENCODING: 1 << 3,
VERSION: 1 << 4,
DATA_AFTER_CLOSE: 1 << 5,
OPTIONAL_LF_AFTER_CR: 1 << 6,
OPTIONAL_CRLF_AFTER_CHUNK: 1 << 7,
OPTIONAL_CR_BEFORE_LF: 1 << 8,
SPACES_AFTER_CHUNK_SIZE: 1 << 9,
};
exports.METHODS = {
'DELETE': 0,
'GET': 1,
'HEAD': 2,
'POST': 3,
'PUT': 4,
/* pathological */
'CONNECT': 5,
'OPTIONS': 6,
'TRACE': 7,
/* WebDAV */
'COPY': 8,
'LOCK': 9,
'MKCOL': 10,
'MOVE': 11,
'PROPFIND': 12,
'PROPPATCH': 13,
'SEARCH': 14,
'UNLOCK': 15,
'BIND': 16,
'REBIND': 17,
'UNBIND': 18,
'ACL': 19,
/* subversion */
'REPORT': 20,
'MKACTIVITY': 21,
'CHECKOUT': 22,
'MERGE': 23,
/* upnp */
'M-SEARCH': 24,
'NOTIFY': 25,
'SUBSCRIBE': 26,
'UNSUBSCRIBE': 27,
/* RFC-5789 */
'PATCH': 28,
'PURGE': 29,
/* CalDAV */
'MKCALENDAR': 30,
/* RFC-2068, section 19.6.1.2 */
'LINK': 31,
'UNLINK': 32,
/* icecast */
'SOURCE': 33,
/* RFC-7540, section 11.6 */
'PRI': 34,
/* RFC-2326 RTSP */
'DESCRIBE': 35,
'ANNOUNCE': 36,
'SETUP': 37,
'PLAY': 38,
'PAUSE': 39,
'TEARDOWN': 40,
'GET_PARAMETER': 41,
'SET_PARAMETER': 42,
'REDIRECT': 43,
'RECORD': 44,
/* RAOP */
'FLUSH': 45,
/* DRAFT https://www.ietf.org/archive/id/draft-ietf-httpbis-safe-method-w-body-02.html */
'QUERY': 46,
};
exports.STATUSES = {
CONTINUE: 100,
SWITCHING_PROTOCOLS: 101,
PROCESSING: 102,
EARLY_HINTS: 103,
RESPONSE_IS_STALE: 110, // Unofficial
REVALIDATION_FAILED: 111, // Unofficial
DISCONNECTED_OPERATION: 112, // Unofficial
HEURISTIC_EXPIRATION: 113, // Unofficial
MISCELLANEOUS_WARNING: 199, // Unofficial
OK: 200,
CREATED: 201,
ACCEPTED: 202,
NON_AUTHORITATIVE_INFORMATION: 203,
NO_CONTENT: 204,
RESET_CONTENT: 205,
PARTIAL_CONTENT: 206,
MULTI_STATUS: 207,
ALREADY_REPORTED: 208,
TRANSFORMATION_APPLIED: 214, // Unofficial
IM_USED: 226,
MISCELLANEOUS_PERSISTENT_WARNING: 299, // Unofficial
MULTIPLE_CHOICES: 300,
MOVED_PERMANENTLY: 301,
FOUND: 302,
SEE_OTHER: 303,
NOT_MODIFIED: 304,
USE_PROXY: 305,
SWITCH_PROXY: 306, // No longer used
TEMPORARY_REDIRECT: 307,
PERMANENT_REDIRECT: 308,
BAD_REQUEST: 400,
UNAUTHORIZED: 401,
PAYMENT_REQUIRED: 402,
FORBIDDEN: 403,
NOT_FOUND: 404,
METHOD_NOT_ALLOWED: 405,
NOT_ACCEPTABLE: 406,
PROXY_AUTHENTICATION_REQUIRED: 407,
REQUEST_TIMEOUT: 408,
CONFLICT: 409,
GONE: 410,
LENGTH_REQUIRED: 411,
PRECONDITION_FAILED: 412,
PAYLOAD_TOO_LARGE: 413,
URI_TOO_LONG: 414,
UNSUPPORTED_MEDIA_TYPE: 415,
RANGE_NOT_SATISFIABLE: 416,
EXPECTATION_FAILED: 417,
IM_A_TEAPOT: 418,
PAGE_EXPIRED: 419, // Unofficial
ENHANCE_YOUR_CALM: 420, // Unofficial
MISDIRECTED_REQUEST: 421,
UNPROCESSABLE_ENTITY: 422,
LOCKED: 423,
FAILED_DEPENDENCY: 424,
TOO_EARLY: 425,
UPGRADE_REQUIRED: 426,
PRECONDITION_REQUIRED: 428,
TOO_MANY_REQUESTS: 429,
REQUEST_HEADER_FIELDS_TOO_LARGE_UNOFFICIAL: 430, // Unofficial
REQUEST_HEADER_FIELDS_TOO_LARGE: 431,
LOGIN_TIMEOUT: 440, // Unofficial
NO_RESPONSE: 444, // Unofficial
RETRY_WITH: 449, // Unofficial
BLOCKED_BY_PARENTAL_CONTROL: 450, // Unofficial
UNAVAILABLE_FOR_LEGAL_REASONS: 451,
CLIENT_CLOSED_LOAD_BALANCED_REQUEST: 460, // Unofficial
INVALID_X_FORWARDED_FOR: 463, // Unofficial
REQUEST_HEADER_TOO_LARGE: 494, // Unofficial
SSL_CERTIFICATE_ERROR: 495, // Unofficial
SSL_CERTIFICATE_REQUIRED: 496, // Unofficial
HTTP_REQUEST_SENT_TO_HTTPS_PORT: 497, // Unofficial
INVALID_TOKEN: 498, // Unofficial
CLIENT_CLOSED_REQUEST: 499, // Unofficial
INTERNAL_SERVER_ERROR: 500,
NOT_IMPLEMENTED: 501,
BAD_GATEWAY: 502,
SERVICE_UNAVAILABLE: 503,
GATEWAY_TIMEOUT: 504,
HTTP_VERSION_NOT_SUPPORTED: 505,
VARIANT_ALSO_NEGOTIATES: 506,
INSUFFICIENT_STORAGE: 507,
LOOP_DETECTED: 508,
BANDWIDTH_LIMIT_EXCEEDED: 509,
NOT_EXTENDED: 510,
NETWORK_AUTHENTICATION_REQUIRED: 511,
WEB_SERVER_UNKNOWN_ERROR: 520, // Unofficial
WEB_SERVER_IS_DOWN: 521, // Unofficial
CONNECTION_TIMEOUT: 522, // Unofficial
ORIGIN_IS_UNREACHABLE: 523, // Unofficial
TIMEOUT_OCCURED: 524, // Unofficial
SSL_HANDSHAKE_FAILED: 525, // Unofficial
INVALID_SSL_CERTIFICATE: 526, // Unofficial
RAILGUN_ERROR: 527, // Unofficial
SITE_IS_OVERLOADED: 529, // Unofficial
SITE_IS_FROZEN: 530, // Unofficial
IDENTITY_PROVIDER_AUTHENTICATION_ERROR: 561, // Unofficial
NETWORK_READ_TIMEOUT: 598, // Unofficial
NETWORK_CONNECT_TIMEOUT: 599, // Unofficial
};
exports.FINISH = {
SAFE: 0,
SAFE_WITH_CB: 1,
UNSAFE: 2,
};
exports.HEADER_STATE = {
GENERAL: 0,
CONNECTION: 1,
CONTENT_LENGTH: 2,
TRANSFER_ENCODING: 3,
UPGRADE: 4,
CONNECTION_KEEP_ALIVE: 5,
CONNECTION_CLOSE: 6,
CONNECTION_UPGRADE: 7,
TRANSFER_ENCODING_CHUNKED: 8,
};
// C headers
exports.METHODS_HTTP = [
exports.METHODS.DELETE,
exports.METHODS.GET,
exports.METHODS.HEAD,
exports.METHODS.POST,
exports.METHODS.PUT,
exports.METHODS.CONNECT,
exports.METHODS.OPTIONS,
exports.METHODS.TRACE,
exports.METHODS.COPY,
exports.METHODS.LOCK,
exports.METHODS.MKCOL,
exports.METHODS.MOVE,
exports.METHODS.PROPFIND,
exports.METHODS.PROPPATCH,
exports.METHODS.SEARCH,
exports.METHODS.UNLOCK,
exports.METHODS.BIND,
exports.METHODS.REBIND,
exports.METHODS.UNBIND,
exports.METHODS.ACL,
exports.METHODS.REPORT,
exports.METHODS.MKACTIVITY,
exports.METHODS.CHECKOUT,
exports.METHODS.MERGE,
exports.METHODS['M-SEARCH'],
exports.METHODS.NOTIFY,
exports.METHODS.SUBSCRIBE,
exports.METHODS.UNSUBSCRIBE,
exports.METHODS.PATCH,
exports.METHODS.PURGE,
exports.METHODS.MKCALENDAR,
exports.METHODS.LINK,
exports.METHODS.UNLINK,
exports.METHODS.PRI,
// TODO(indutny): should we allow it with HTTP?
exports.METHODS.SOURCE,
exports.METHODS.QUERY,
];
exports.METHODS_ICE = [
exports.METHODS.SOURCE,
];
exports.METHODS_RTSP = [
exports.METHODS.OPTIONS,
exports.METHODS.DESCRIBE,
exports.METHODS.ANNOUNCE,
exports.METHODS.SETUP,
exports.METHODS.PLAY,
exports.METHODS.PAUSE,
exports.METHODS.TEARDOWN,
exports.METHODS.GET_PARAMETER,
exports.METHODS.SET_PARAMETER,
exports.METHODS.REDIRECT,
exports.METHODS.RECORD,
exports.METHODS.FLUSH,
// For AirPlay
exports.METHODS.GET,
exports.METHODS.POST,
];
exports.METHOD_MAP = (0, utils_1.enumToMap)(exports.METHODS);
exports.H_METHOD_MAP = Object.fromEntries(Object.entries(exports.METHODS).filter(([k]) => k.startsWith('H')));
exports.STATUSES_HTTP = [
exports.STATUSES.CONTINUE,
exports.STATUSES.SWITCHING_PROTOCOLS,
exports.STATUSES.PROCESSING,
exports.STATUSES.EARLY_HINTS,
exports.STATUSES.RESPONSE_IS_STALE,
exports.STATUSES.REVALIDATION_FAILED,
exports.STATUSES.DISCONNECTED_OPERATION,
exports.STATUSES.HEURISTIC_EXPIRATION,
exports.STATUSES.MISCELLANEOUS_WARNING,
exports.STATUSES.OK,
exports.STATUSES.CREATED,
exports.STATUSES.ACCEPTED,
exports.STATUSES.NON_AUTHORITATIVE_INFORMATION,
exports.STATUSES.NO_CONTENT,
exports.STATUSES.RESET_CONTENT,
exports.STATUSES.PARTIAL_CONTENT,
exports.STATUSES.MULTI_STATUS,
exports.STATUSES.ALREADY_REPORTED,
exports.STATUSES.TRANSFORMATION_APPLIED,
exports.STATUSES.IM_USED,
exports.STATUSES.MISCELLANEOUS_PERSISTENT_WARNING,
exports.STATUSES.MULTIPLE_CHOICES,
exports.STATUSES.MOVED_PERMANENTLY,
exports.STATUSES.FOUND,
exports.STATUSES.SEE_OTHER,
exports.STATUSES.NOT_MODIFIED,
exports.STATUSES.USE_PROXY,
exports.STATUSES.SWITCH_PROXY,
exports.STATUSES.TEMPORARY_REDIRECT,
exports.STATUSES.PERMANENT_REDIRECT,
exports.STATUSES.BAD_REQUEST,
exports.STATUSES.UNAUTHORIZED,
exports.STATUSES.PAYMENT_REQUIRED,
exports.STATUSES.FORBIDDEN,
exports.STATUSES.NOT_FOUND,
exports.STATUSES.METHOD_NOT_ALLOWED,
exports.STATUSES.NOT_ACCEPTABLE,
exports.STATUSES.PROXY_AUTHENTICATION_REQUIRED,
exports.STATUSES.REQUEST_TIMEOUT,
exports.STATUSES.CONFLICT,
exports.STATUSES.GONE,
exports.STATUSES.LENGTH_REQUIRED,
exports.STATUSES.PRECONDITION_FAILED,
exports.STATUSES.PAYLOAD_TOO_LARGE,
exports.STATUSES.URI_TOO_LONG,
exports.STATUSES.UNSUPPORTED_MEDIA_TYPE,
exports.STATUSES.RANGE_NOT_SATISFIABLE,
exports.STATUSES.EXPECTATION_FAILED,
exports.STATUSES.IM_A_TEAPOT,
exports.STATUSES.PAGE_EXPIRED,
exports.STATUSES.ENHANCE_YOUR_CALM,
exports.STATUSES.MISDIRECTED_REQUEST,
exports.STATUSES.UNPROCESSABLE_ENTITY,
exports.STATUSES.LOCKED,
exports.STATUSES.FAILED_DEPENDENCY,
exports.STATUSES.TOO_EARLY,
exports.STATUSES.UPGRADE_REQUIRED,
exports.STATUSES.PRECONDITION_REQUIRED,
exports.STATUSES.TOO_MANY_REQUESTS,
exports.STATUSES.REQUEST_HEADER_FIELDS_TOO_LARGE_UNOFFICIAL,
exports.STATUSES.REQUEST_HEADER_FIELDS_TOO_LARGE,
exports.STATUSES.LOGIN_TIMEOUT,
exports.STATUSES.NO_RESPONSE,
exports.STATUSES.RETRY_WITH,
exports.STATUSES.BLOCKED_BY_PARENTAL_CONTROL,
exports.STATUSES.UNAVAILABLE_FOR_LEGAL_REASONS,
exports.STATUSES.CLIENT_CLOSED_LOAD_BALANCED_REQUEST,
exports.STATUSES.INVALID_X_FORWARDED_FOR,
exports.STATUSES.REQUEST_HEADER_TOO_LARGE,
exports.STATUSES.SSL_CERTIFICATE_ERROR,
exports.STATUSES.SSL_CERTIFICATE_REQUIRED,
exports.STATUSES.HTTP_REQUEST_SENT_TO_HTTPS_PORT,
exports.STATUSES.INVALID_TOKEN,
exports.STATUSES.CLIENT_CLOSED_REQUEST,
exports.STATUSES.INTERNAL_SERVER_ERROR,
exports.STATUSES.NOT_IMPLEMENTED,
exports.STATUSES.BAD_GATEWAY,
exports.STATUSES.SERVICE_UNAVAILABLE,
exports.STATUSES.GATEWAY_TIMEOUT,
exports.STATUSES.HTTP_VERSION_NOT_SUPPORTED,
exports.STATUSES.VARIANT_ALSO_NEGOTIATES,
exports.STATUSES.INSUFFICIENT_STORAGE,
exports.STATUSES.LOOP_DETECTED,
exports.STATUSES.BANDWIDTH_LIMIT_EXCEEDED,
exports.STATUSES.NOT_EXTENDED,
exports.STATUSES.NETWORK_AUTHENTICATION_REQUIRED,
exports.STATUSES.WEB_SERVER_UNKNOWN_ERROR,
exports.STATUSES.WEB_SERVER_IS_DOWN,
exports.STATUSES.CONNECTION_TIMEOUT,
exports.STATUSES.ORIGIN_IS_UNREACHABLE,
exports.STATUSES.TIMEOUT_OCCURED,
exports.STATUSES.SSL_HANDSHAKE_FAILED,
exports.STATUSES.INVALID_SSL_CERTIFICATE,
exports.STATUSES.RAILGUN_ERROR,
exports.STATUSES.SITE_IS_OVERLOADED,
exports.STATUSES.SITE_IS_FROZEN,
exports.STATUSES.IDENTITY_PROVIDER_AUTHENTICATION_ERROR,
exports.STATUSES.NETWORK_READ_TIMEOUT,
exports.STATUSES.NETWORK_CONNECT_TIMEOUT,
];
exports.ALPHA = [];
for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {
// Upper case
exports.ALPHA.push(String.fromCharCode(i));
// Lower case
exports.ALPHA.push(String.fromCharCode(i + 0x20));
}
exports.NUM_MAP = {
0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
};
exports.HEX_MAP = {
0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF,
a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf,
};
exports.NUM = [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
];
exports.ALPHANUM = exports.ALPHA.concat(exports.NUM);
exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')'];
exports.USERINFO_CHARS = exports.ALPHANUM
.concat(exports.MARK)
.concat(['%', ';', ':', '&', '=', '+', '$', ',']);
// TODO(indutny): use RFC
exports.URL_CHAR = [
'!', '"', '$', '%', '&', '\'',
'(', ')', '*', '+', ',', '-', '.', '/',
':', ';', '<', '=', '>',
'@', '[', '\\', ']', '^', '_',
'`',
'{', '|', '}', '~',
].concat(exports.ALPHANUM);
exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);
/* Tokens as defined by rfc 2616. Also lowercases them.
* token = 1*<any CHAR except CTLs or separators>
* separators = "(" | ")" | "<" | ">" | "@"
* | "," | ";" | ":" | "\" | <">
* | "/" | "[" | "]" | "?" | "="
* | "{" | "}" | SP | HT
*/
exports.TOKEN = [
'!', '#', '$', '%', '&', '\'',
'*', '+', '-', '.',
'^', '_', '`',
'|', '~',
].concat(exports.ALPHANUM);
/*
* Verify that a char is a valid visible (printable) US-ASCII
* character or %x80-FF
*/
exports.HEADER_CHARS = ['\t'];
for (let i = 32; i <= 255; i++) {
if (i !== 127) {
exports.HEADER_CHARS.push(i);
}
}
// ',' = \x44
exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);
exports.QUOTED_STRING = ['\t', ' '];
for (let i = 0x21; i <= 0xff; i++) {
if (i !== 0x22 && i !== 0x5c) { // All characters in ASCII except \ and "
exports.QUOTED_STRING.push(i);
}
}
exports.HTAB_SP_VCHAR_OBS_TEXT = ['\t', ' '];
// VCHAR: https://tools.ietf.org/html/rfc5234#appendix-B.1
for (let i = 0x21; i <= 0x7E; i++) {
exports.HTAB_SP_VCHAR_OBS_TEXT.push(i);
}
// OBS_TEXT: https://datatracker.ietf.org/doc/html/rfc9110#name-collected-abnf
for (let i = 0x80; i <= 0xff; i++) {
exports.HTAB_SP_VCHAR_OBS_TEXT.push(i);
}
exports.MAJOR = exports.NUM_MAP;
exports.MINOR = exports.MAJOR;
exports.SPECIAL_HEADERS = {
'connection': exports.HEADER_STATE.CONNECTION,
'content-length': exports.HEADER_STATE.CONTENT_LENGTH,
'proxy-connection': exports.HEADER_STATE.CONNECTION,
'transfer-encoding': exports.HEADER_STATE.TRANSFER_ENCODING,
'upgrade': exports.HEADER_STATE.UPGRADE,
};
exports.default = {
ERROR: exports.ERROR,
TYPE: exports.TYPE,
FLAGS: exports.FLAGS,
LENIENT_FLAGS: exports.LENIENT_FLAGS,
METHODS: exports.METHODS,
STATUSES: exports.STATUSES,
FINISH: exports.FINISH,
HEADER_STATE: exports.HEADER_STATE,
ALPHA: exports.ALPHA,
NUM_MAP: exports.NUM_MAP,
HEX_MAP: exports.HEX_MAP,
NUM: exports.NUM,
ALPHANUM: exports.ALPHANUM,
MARK: exports.MARK,
USERINFO_CHARS: exports.USERINFO_CHARS,
URL_CHAR: exports.URL_CHAR,
HEX: exports.HEX,
TOKEN: exports.TOKEN,
HEADER_CHARS: exports.HEADER_CHARS,
CONNECTION_TOKEN_CHARS: exports.CONNECTION_TOKEN_CHARS,
QUOTED_STRING: exports.QUOTED_STRING,
HTAB_SP_VCHAR_OBS_TEXT: exports.HTAB_SP_VCHAR_OBS_TEXT,
MAJOR: exports.MAJOR,
MINOR: exports.MINOR,
SPECIAL_HEADERS: exports.SPECIAL_HEADERS,
METHODS_HTTP: exports.METHODS_HTTP,
METHODS_ICE: exports.METHODS_ICE,
METHODS_RTSP: exports.METHODS_RTSP,
METHOD_MAP: exports.METHOD_MAP,
H_METHOD_MAP: exports.H_METHOD_MAP,
STATUSES_HTTP: exports.STATUSES_HTTP,
};

15
node_modules/undici/lib/llhttp/llhttp-wasm.js generated vendored Normal file

File diff suppressed because one or more lines are too long

15
node_modules/undici/lib/llhttp/llhttp_simd-wasm.js generated vendored Normal file

File diff suppressed because one or more lines are too long

2
node_modules/undici/lib/llhttp/utils.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
import type { IntDict } from './constants';
export declare function enumToMap(obj: IntDict, filter?: readonly number[], exceptions?: readonly number[]): IntDict;

12
node_modules/undici/lib/llhttp/utils.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.enumToMap = enumToMap;
function enumToMap(obj, filter = [], exceptions = []) {
const emptyFilter = (filter?.length ?? 0) === 0;
const emptyExceptions = (exceptions?.length ?? 0) === 0;
return Object.fromEntries(Object.entries(obj).filter(([, value]) => {
return (typeof value === 'number' &&
(emptyFilter || filter.includes(value)) &&
(emptyExceptions || !exceptions.includes(value)));
}));
}

230
node_modules/undici/lib/mock/mock-agent.js generated vendored Normal file
View File

@@ -0,0 +1,230 @@
'use strict'
const { kClients } = require('../core/symbols')
const Agent = require('../dispatcher/agent')
const {
kAgent,
kMockAgentSet,
kMockAgentGet,
kDispatches,
kIsMockActive,
kNetConnect,
kGetNetConnect,
kOptions,
kFactory,
kMockAgentRegisterCallHistory,
kMockAgentIsCallHistoryEnabled,
kMockAgentAddCallHistoryLog,
kMockAgentMockCallHistoryInstance,
kMockAgentAcceptsNonStandardSearchParameters,
kMockCallHistoryAddLog,
kIgnoreTrailingSlash
} = require('./mock-symbols')
const MockClient = require('./mock-client')
const MockPool = require('./mock-pool')
const { matchValue, normalizeSearchParams, buildAndValidateMockOptions } = require('./mock-utils')
const { InvalidArgumentError, UndiciError } = require('../core/errors')
const Dispatcher = require('../dispatcher/dispatcher')
const PendingInterceptorsFormatter = require('./pending-interceptors-formatter')
const { MockCallHistory } = require('./mock-call-history')
class MockAgent extends Dispatcher {
constructor (opts = {}) {
super(opts)
const mockOptions = buildAndValidateMockOptions(opts)
this[kNetConnect] = true
this[kIsMockActive] = true
this[kMockAgentIsCallHistoryEnabled] = mockOptions.enableCallHistory ?? false
this[kMockAgentAcceptsNonStandardSearchParameters] = mockOptions.acceptNonStandardSearchParameters ?? false
this[kIgnoreTrailingSlash] = mockOptions.ignoreTrailingSlash ?? false
// Instantiate Agent and encapsulate
if (opts?.agent && typeof opts.agent.dispatch !== 'function') {
throw new InvalidArgumentError('Argument opts.agent must implement Agent')
}
const agent = opts?.agent ? opts.agent : new Agent(opts)
this[kAgent] = agent
this[kClients] = agent[kClients]
this[kOptions] = mockOptions
if (this[kMockAgentIsCallHistoryEnabled]) {
this[kMockAgentRegisterCallHistory]()
}
}
get (origin) {
const originKey = this[kIgnoreTrailingSlash]
? origin.replace(/\/$/, '')
: origin
let dispatcher = this[kMockAgentGet](originKey)
if (!dispatcher) {
dispatcher = this[kFactory](originKey)
this[kMockAgentSet](originKey, dispatcher)
}
return dispatcher
}
dispatch (opts, handler) {
// Call MockAgent.get to perform additional setup before dispatching as normal
this.get(opts.origin)
this[kMockAgentAddCallHistoryLog](opts)
const acceptNonStandardSearchParameters = this[kMockAgentAcceptsNonStandardSearchParameters]
const dispatchOpts = { ...opts }
if (acceptNonStandardSearchParameters && dispatchOpts.path) {
const [path, searchParams] = dispatchOpts.path.split('?')
const normalizedSearchParams = normalizeSearchParams(searchParams, acceptNonStandardSearchParameters)
dispatchOpts.path = `${path}?${normalizedSearchParams}`
}
return this[kAgent].dispatch(dispatchOpts, handler)
}
async close () {
this.clearCallHistory()
await this[kAgent].close()
this[kClients].clear()
}
deactivate () {
this[kIsMockActive] = false
}
activate () {
this[kIsMockActive] = true
}
enableNetConnect (matcher) {
if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) {
if (Array.isArray(this[kNetConnect])) {
this[kNetConnect].push(matcher)
} else {
this[kNetConnect] = [matcher]
}
} else if (typeof matcher === 'undefined') {
this[kNetConnect] = true
} else {
throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')
}
}
disableNetConnect () {
this[kNetConnect] = false
}
enableCallHistory () {
this[kMockAgentIsCallHistoryEnabled] = true
return this
}
disableCallHistory () {
this[kMockAgentIsCallHistoryEnabled] = false
return this
}
getCallHistory () {
return this[kMockAgentMockCallHistoryInstance]
}
clearCallHistory () {
if (this[kMockAgentMockCallHistoryInstance] !== undefined) {
this[kMockAgentMockCallHistoryInstance].clear()
}
}
// This is required to bypass issues caused by using global symbols - see:
// https://github.com/nodejs/undici/issues/1447
get isMockActive () {
return this[kIsMockActive]
}
[kMockAgentRegisterCallHistory] () {
if (this[kMockAgentMockCallHistoryInstance] === undefined) {
this[kMockAgentMockCallHistoryInstance] = new MockCallHistory()
}
}
[kMockAgentAddCallHistoryLog] (opts) {
if (this[kMockAgentIsCallHistoryEnabled]) {
// additional setup when enableCallHistory class method is used after mockAgent instantiation
this[kMockAgentRegisterCallHistory]()
// add call history log on every call (intercepted or not)
this[kMockAgentMockCallHistoryInstance][kMockCallHistoryAddLog](opts)
}
}
[kMockAgentSet] (origin, dispatcher) {
this[kClients].set(origin, { count: 0, dispatcher })
}
[kFactory] (origin) {
const mockOptions = Object.assign({ agent: this }, this[kOptions])
return this[kOptions] && this[kOptions].connections === 1
? new MockClient(origin, mockOptions)
: new MockPool(origin, mockOptions)
}
[kMockAgentGet] (origin) {
// First check if we can immediately find it
const result = this[kClients].get(origin)
if (result?.dispatcher) {
return result.dispatcher
}
// If the origin is not a string create a dummy parent pool and return to user
if (typeof origin !== 'string') {
const dispatcher = this[kFactory]('http://localhost:9999')
this[kMockAgentSet](origin, dispatcher)
return dispatcher
}
// If we match, create a pool and assign the same dispatches
for (const [keyMatcher, result] of Array.from(this[kClients])) {
if (result && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {
const dispatcher = this[kFactory](origin)
this[kMockAgentSet](origin, dispatcher)
dispatcher[kDispatches] = result.dispatcher[kDispatches]
return dispatcher
}
}
}
[kGetNetConnect] () {
return this[kNetConnect]
}
pendingInterceptors () {
const mockAgentClients = this[kClients]
return Array.from(mockAgentClients.entries())
.flatMap(([origin, result]) => result.dispatcher[kDispatches].map(dispatch => ({ ...dispatch, origin })))
.filter(({ pending }) => pending)
}
assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) {
const pending = this.pendingInterceptors()
if (pending.length === 0) {
return
}
throw new UndiciError(
pending.length === 1
? `1 interceptor is pending:\n\n${pendingInterceptorsFormatter.format(pending)}`.trim()
: `${pending.length} interceptors are pending:\n\n${pendingInterceptorsFormatter.format(pending)}`.trim()
)
}
}
module.exports = MockAgent

Some files were not shown because too many files have changed in this diff Show More