基本schema测试
This commit is contained in:
201
app.js
Normal file
201
app.js
Normal file
@@ -0,0 +1,201 @@
|
||||
// ====== 1. 引入和设置 ======
|
||||
import Fastify from 'fastify';
|
||||
import mongoPlugin from '@fastify/mongodb';
|
||||
import { ObjectId } from 'mongodb';
|
||||
import { request as undiciRequest } from 'undici';
|
||||
|
||||
const fastify = Fastify({
|
||||
logger: true, // 启用日志记录
|
||||
});
|
||||
|
||||
|
||||
// 你的 MongoDB 连接字符串
|
||||
const mongoConnectionString = "mongodb://mongo_c6bNmG:mongo_tyNkXh@83.229.121.44:27017/todo_app_fastify?authSource=admin";
|
||||
|
||||
// ====== 2. 注册 MongoDB 插件 ======
|
||||
// fastify.register() 是 Fastify 插件系统的核心
|
||||
// 我们在这里告诉 Fastify:“请加载 @fastify/mongodb 插件,
|
||||
// 并使用这个连接字符串去连接数据库。”
|
||||
fastify.register(mongoPlugin, {
|
||||
forceClose: true, // 在应用关闭时强制关闭连接
|
||||
url: mongoConnectionString,
|
||||
});
|
||||
// 插件注册后,Fastify 的实例上会多出一个 `mongo` 对象 (`fastify.mongo`)
|
||||
// 我们可以通过它来访问数据库和 ObjectId 等工具。
|
||||
|
||||
// ====== 3. 创建 CRUD 路由 ======
|
||||
// 我们将创建一个简单的 "items" 集合来存放商品
|
||||
|
||||
fastify.get('/', async (request, reply) => {
|
||||
return { message: '欢迎来到 Fastify 核心功能示例!'};
|
||||
})
|
||||
|
||||
const createUserSchema = {
|
||||
// body 部分定义了对 POST 请求体的验证规则
|
||||
body: {
|
||||
type: 'object',
|
||||
required: ['name', 'email'], // name 和 email 字段是必需的
|
||||
properties: {
|
||||
name: { type: 'string' },
|
||||
email: { type: 'string', format: 'email' }, // 甚至可以验证 email 格式
|
||||
},
|
||||
},
|
||||
//response 部分定义了响应的格式,Fastify 会用它来极速序列化 JSON
|
||||
response: {
|
||||
201: { // 201 状态码对应的响应格式
|
||||
type: 'object',
|
||||
properties: {
|
||||
_id: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
email: { type: 'string' },
|
||||
},
|
||||
// 建议明确哪些字段是必须返回的
|
||||
required: ['_id', 'name', 'email'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// --- CREATE (增) ---
|
||||
// 创建一个新商品
|
||||
fastify.post('/users',{ schema : createUserSchema}, async (request, reply) => {
|
||||
// `fastify.mongo.db` 指向你连接的那个数据库实例 (my-beginner-db)
|
||||
const itemsCollection = fastify.mongo.db.collection('items');
|
||||
|
||||
// request.body 包含了 POST 请求发来的 JSON 数据
|
||||
const newUsers = request.body;
|
||||
|
||||
// 调用 MongoDB 的 insertOne 方法插入数据
|
||||
const result = await itemsCollection.insertOne(newUsers);
|
||||
fastify.log.info({ msg: '用户创建成功', result: result });
|
||||
|
||||
if (!result.acknowledged) {
|
||||
return reply.status(500).send({ message: 'Failed to insert user into database' });
|
||||
}
|
||||
|
||||
// 关键修改:我们需要构建一个对象,使其结构精确匹配 response.201 schema
|
||||
const responsePayload = {
|
||||
_id: result.insertedId.toString(), // 将 ObjectId 转换为字符串
|
||||
name: newUsers.name, // 从原始请求体中获取 name
|
||||
email: newUsers.email, // 从原始请求体中获取 email
|
||||
};
|
||||
|
||||
// 现在 responsePayload 的结构完全符合 response.201 schema
|
||||
return reply.status(201).send(responsePayload); // 发送这个符合 schema 的对象
|
||||
});
|
||||
|
||||
|
||||
|
||||
// --- 正确实现的 Hook ---
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
// 这个钩子会在每个请求匹配到路由之前执行
|
||||
fastify.log.info(`收到一个 ${request.method} 请求,访问路径: ${request.url}`);
|
||||
|
||||
// --- 转发到 Gotify ---
|
||||
const gotifyPayload = {
|
||||
title: "新请求通知",
|
||||
message: `收到 ${request.method} 请求,ip ${request.ip}路径: ${request.url}`,
|
||||
priority: 5,
|
||||
};
|
||||
|
||||
fastify.log.info("准备向 Gotify 发送通知...");
|
||||
|
||||
// 2.【关键】采用“即发即忘”模式,不要使用 await 来阻塞主流程
|
||||
undiciRequest('https://gotify.zotv.ru/message?token=A1wFaeaj-VskqyF', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(gotifyPayload),
|
||||
})
|
||||
.then(response => {
|
||||
// 可选:如果成功,可以在后台记录一下
|
||||
// 注意:这里的 .statusCode 是 undici v5 及更早版本的用法,新版可能不同
|
||||
if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||
fastify.log.info("Gotify 通知发送成功!");
|
||||
} else {
|
||||
fastify.log.warn(`Gotify 通知发送异常,状态码: ${response.statusCode}`);
|
||||
}
|
||||
// 消耗掉响应体,防止内存泄漏
|
||||
return response.body.dump();
|
||||
})
|
||||
.catch(error => {
|
||||
// 3.【关键】必须捕获错误,否则一个失败的通知会让你的整个服务器进程崩溃!
|
||||
fastify.log.error({ msg: 'Gotify 通知发送失败', err: error });
|
||||
});
|
||||
|
||||
// 4.【关键】注意,这里没有 await,钩子函数会立即执行完毕,不会等待通知结果
|
||||
});
|
||||
|
||||
// --- READ (查) ---
|
||||
// 获取所有商品
|
||||
fastify.get('/users', async (request, reply) => {
|
||||
const itemsCollection = fastify.mongo.db.collection('items');
|
||||
const items = await itemsCollection.find({}).toArray();
|
||||
return items;
|
||||
});
|
||||
|
||||
// 获取单个商品 (通过 ID)
|
||||
fastify.get('/items/:id', async (request, reply) => {
|
||||
const itemsCollection = fastify.mongo.db.collection('items');
|
||||
|
||||
// **关键点**: MongoDB 的 `_id` 是一个特殊的 ObjectId 类型, 不是普通字符串。
|
||||
// 我们需要从 fastify.mongo 获取 ObjectId 构造函数来转换 URL 中的 id 参数。
|
||||
const { ObjectId } = fastify.mongo;
|
||||
const idToFind = new ObjectId(request.params.id);
|
||||
|
||||
const item = await itemsCollection.findOne({ _id: idToFind });
|
||||
|
||||
if (!item) {
|
||||
return reply.status(404).send({ error: '商品未找到' });
|
||||
}
|
||||
return item;
|
||||
});
|
||||
|
||||
// --- UPDATE (改) ---
|
||||
// 更新一个商品
|
||||
fastify.put('/items/:id', async (request, reply) => {
|
||||
const itemsCollection = fastify.mongo.db.collection('items');
|
||||
const { ObjectId } = fastify.mongo;
|
||||
const idToUpdate = new ObjectId(request.params.id);
|
||||
|
||||
const updateData = request.body;
|
||||
|
||||
const result = await itemsCollection.updateOne(
|
||||
{ _id: idToUpdate }, // 筛选条件
|
||||
{ $set: updateData } // 更新操作
|
||||
);
|
||||
|
||||
if (result.modifiedCount === 0) {
|
||||
return reply.status(404).send({ error: '未找到商品或无需更新' });
|
||||
}
|
||||
|
||||
return { message: '商品更新成功' };
|
||||
});
|
||||
|
||||
// --- DELETE (删) ---
|
||||
// 删除一个商品
|
||||
fastify.delete('/items/:id', async (request, reply) => {
|
||||
const itemsCollection = fastify.mongo.db.collection('items');
|
||||
const { ObjectId } = fastify.mongo;
|
||||
const idToDelete = new ObjectId(request.params.id);
|
||||
|
||||
const result = await itemsCollection.deleteOne({ _id: idToDelete });
|
||||
|
||||
if (result.deletedCount === 0) {
|
||||
return reply.status(404).send({ error: '商品未找到' });
|
||||
}
|
||||
|
||||
// 204 No Content 是一个常见的成功删除的响应
|
||||
return reply.status(204).send(idToDelete);
|
||||
});
|
||||
|
||||
|
||||
// ====== 4. 启动服务器 ======
|
||||
const start = async () => {
|
||||
try {
|
||||
await fastify.listen({ port: 3000, host: '0.0.0.0' });
|
||||
} catch (err) {
|
||||
fastify.log.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
start();
|
||||
1
node_modules/.bin/pino
generated
vendored
Symbolic link
1
node_modules/.bin/pino
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../pino/bin.js
|
||||
1
node_modules/.bin/semver
generated
vendored
Symbolic link
1
node_modules/.bin/semver
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../semver/bin/semver.js
|
||||
763
node_modules/.package-lock.json
generated
vendored
Normal file
763
node_modules/.package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,763 @@
|
||||
{
|
||||
"name": "test",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@fastify/ajv-compiler": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/ajv-compiler/-/ajv-compiler-4.0.2.tgz",
|
||||
"integrity": "sha512-Rkiu/8wIjpsf46Rr+Fitd3HRP+VsxUFDDeag0hs9L0ksfnwx2g7SPQQTFL0E8Qv+rfXzQOxBJnjUB9ITUDjfWQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ajv": "^8.12.0",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"fast-uri": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/error": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.2.0.tgz",
|
||||
"integrity": "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/fast-json-stringify-compiler": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/fast-json-stringify-compiler/-/fast-json-stringify-compiler-5.0.3.tgz",
|
||||
"integrity": "sha512-uik7yYHkLr6fxd8hJSZ8c+xF4WafPK+XzneQDPU+D10r5X19GW8lJcom2YijX2+qtFF1ENJlHXKFM9ouXNJYgQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-json-stringify": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/forwarded": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/forwarded/-/forwarded-3.0.0.tgz",
|
||||
"integrity": "sha512-kJExsp4JCms7ipzg7SJ3y8DwmePaELHxKYtg+tZow+k0znUTf3cb+npgyqm8+ATZOdmfgfydIebPDWM172wfyA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@fastify/merge-json-schemas": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/merge-json-schemas/-/merge-json-schemas-0.2.1.tgz",
|
||||
"integrity": "sha512-OA3KGBCy6KtIvLf8DINC5880o5iBlDX4SxzLQS8HorJAbqluzLRn80UXU0bxZn7UOFhFgpRJDasfwn9nG4FG4A==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"dequal": "^2.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/mongodb": {
|
||||
"version": "9.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/mongodb/-/mongodb-9.0.2.tgz",
|
||||
"integrity": "sha512-h04HpQ7nVeB2eR4YPJiFWaeFot+E6K6DHP5ymby3WEhExnVMaxd6FUVszDoU+bM3MmK9wtIFgJLUfOKcYU+nKQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fastify-plugin": "^5.0.0",
|
||||
"mongodb": "^6.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/proxy-addr": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/proxy-addr/-/proxy-addr-5.0.0.tgz",
|
||||
"integrity": "sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/forwarded": "^3.0.0",
|
||||
"ipaddr.js": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@mongodb-js/saslprep": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.3.0.tgz",
|
||||
"integrity": "sha512-zlayKCsIjYb7/IdfqxorK5+xUMyi4vOKcFy10wKJYc63NSdKI8mNME+uJqfatkPmOSMMUiojrL58IePKBm3gvQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"sparse-bitfield": "^3.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/webidl-conversions": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz",
|
||||
"integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/whatwg-url": {
|
||||
"version": "11.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz",
|
||||
"integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/webidl-conversions": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/abstract-logging": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/abstract-logging/-/abstract-logging-2.0.1.tgz",
|
||||
"integrity": "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/ajv": {
|
||||
"version": "8.17.1",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
|
||||
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"fast-uri": "^3.0.1",
|
||||
"json-schema-traverse": "^1.0.0",
|
||||
"require-from-string": "^2.0.2"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/epoberezkin"
|
||||
}
|
||||
},
|
||||
"node_modules/ajv-formats": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz",
|
||||
"integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ajv": "^8.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"ajv": "^8.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"ajv": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/atomic-sleep": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz",
|
||||
"integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/avvio": {
|
||||
"version": "9.1.0",
|
||||
"resolved": "https://registry.npmjs.org/avvio/-/avvio-9.1.0.tgz",
|
||||
"integrity": "sha512-fYASnYi600CsH/j9EQov7lECAniYiBFiiAtBNuZYLA2leLe9qOvZzqYHFjtIj6gD2VMoMLP14834LFWvr4IfDw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/error": "^4.0.0",
|
||||
"fastq": "^1.17.1"
|
||||
}
|
||||
},
|
||||
"node_modules/bson": {
|
||||
"version": "6.10.4",
|
||||
"resolved": "https://registry.npmjs.org/bson/-/bson-6.10.4.tgz",
|
||||
"integrity": "sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=16.20.1"
|
||||
}
|
||||
},
|
||||
"node_modules/cookie": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz",
|
||||
"integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/dequal": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
||||
"integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-decode-uri-component": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz",
|
||||
"integrity": "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fast-deep-equal": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fast-json-stringify": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-6.0.1.tgz",
|
||||
"integrity": "sha512-s7SJE83QKBZwg54dIbD5rCtzOBVD43V1ReWXXYqBgwCwHLYAAT0RQc/FmrQglXqWPpz6omtryJQOau5jI4Nrvg==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/merge-json-schemas": "^0.2.0",
|
||||
"ajv": "^8.12.0",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"fast-uri": "^3.0.0",
|
||||
"json-schema-ref-resolver": "^2.0.0",
|
||||
"rfdc": "^1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-querystring": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz",
|
||||
"integrity": "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-decode-uri-component": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-redact": {
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.5.0.tgz",
|
||||
"integrity": "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-uri": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz",
|
||||
"integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/fastify": {
|
||||
"version": "5.6.0",
|
||||
"resolved": "https://registry.npmjs.org/fastify/-/fastify-5.6.0.tgz",
|
||||
"integrity": "sha512-9j2r9TnwNsfGiCKGYT0Voqy244qwcoYM9qvNi/i+F8sNNWDnqUEVuGYNc9GyjldhXmMlJmVPS6gI1LdvjYGRJw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@fastify/ajv-compiler": "^4.0.0",
|
||||
"@fastify/error": "^4.0.0",
|
||||
"@fastify/fast-json-stringify-compiler": "^5.0.0",
|
||||
"@fastify/proxy-addr": "^5.0.0",
|
||||
"abstract-logging": "^2.0.1",
|
||||
"avvio": "^9.0.0",
|
||||
"fast-json-stringify": "^6.0.0",
|
||||
"find-my-way": "^9.0.0",
|
||||
"light-my-request": "^6.0.0",
|
||||
"pino": "^9.0.0",
|
||||
"process-warning": "^5.0.0",
|
||||
"rfdc": "^1.3.1",
|
||||
"secure-json-parse": "^4.0.0",
|
||||
"semver": "^7.6.0",
|
||||
"toad-cache": "^3.7.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fastify-plugin": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.0.1.tgz",
|
||||
"integrity": "sha512-HCxs+YnRaWzCl+cWRYFnHmeRFyR5GVnJTAaCJQiYzQSDwK9MgJdyAsuL3nh0EWRCYMgQ5MeziymvmAhUHYHDUQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fastq": {
|
||||
"version": "1.19.1",
|
||||
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz",
|
||||
"integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"reusify": "^1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/find-my-way": {
|
||||
"version": "9.3.0",
|
||||
"resolved": "https://registry.npmjs.org/find-my-way/-/find-my-way-9.3.0.tgz",
|
||||
"integrity": "sha512-eRoFWQw+Yv2tuYlK2pjFS2jGXSxSppAs3hSQjfxVKxM5amECzIgYYc1FEI8ZmhSh/Ig+FrKEz43NLRKJjYCZVg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"fast-querystring": "^1.0.0",
|
||||
"safe-regex2": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/ipaddr.js": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz",
|
||||
"integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/json-schema-ref-resolver": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/json-schema-ref-resolver/-/json-schema-ref-resolver-2.0.1.tgz",
|
||||
"integrity": "sha512-HG0SIB9X4J8bwbxCbnd5FfPEbcXAJYTi1pBJeP/QPON+w8ovSME8iRG+ElHNxZNX2Qh6eYn1GdzJFS4cDFfx0Q==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"dequal": "^2.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/json-schema-traverse": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
|
||||
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/light-my-request": {
|
||||
"version": "6.6.0",
|
||||
"resolved": "https://registry.npmjs.org/light-my-request/-/light-my-request-6.6.0.tgz",
|
||||
"integrity": "sha512-CHYbu8RtboSIoVsHZ6Ye4cj4Aw/yg2oAFimlF7mNvfDV192LR7nDiKtSIfCuLT7KokPSTn/9kfVLm5OGN0A28A==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"cookie": "^1.0.1",
|
||||
"process-warning": "^4.0.0",
|
||||
"set-cookie-parser": "^2.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/light-my-request/node_modules/process-warning": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/process-warning/-/process-warning-4.0.1.tgz",
|
||||
"integrity": "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/memory-pager": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
|
||||
"integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/mongodb": {
|
||||
"version": "6.20.0",
|
||||
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.20.0.tgz",
|
||||
"integrity": "sha512-Tl6MEIU3K4Rq3TSHd+sZQqRBoGlFsOgNrH5ltAcFBV62Re3Fd+FcaVf8uSEQFOJ51SDowDVttBTONMfoYWrWlQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@mongodb-js/saslprep": "^1.3.0",
|
||||
"bson": "^6.10.4",
|
||||
"mongodb-connection-string-url": "^3.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.20.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@aws-sdk/credential-providers": "^3.188.0",
|
||||
"@mongodb-js/zstd": "^1.1.0 || ^2.0.0",
|
||||
"gcp-metadata": "^5.2.0",
|
||||
"kerberos": "^2.0.1",
|
||||
"mongodb-client-encryption": ">=6.0.0 <7",
|
||||
"snappy": "^7.3.2",
|
||||
"socks": "^2.7.1"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@aws-sdk/credential-providers": {
|
||||
"optional": true
|
||||
},
|
||||
"@mongodb-js/zstd": {
|
||||
"optional": true
|
||||
},
|
||||
"gcp-metadata": {
|
||||
"optional": true
|
||||
},
|
||||
"kerberos": {
|
||||
"optional": true
|
||||
},
|
||||
"mongodb-client-encryption": {
|
||||
"optional": true
|
||||
},
|
||||
"snappy": {
|
||||
"optional": true
|
||||
},
|
||||
"socks": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-connection-string-url": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz",
|
||||
"integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@types/whatwg-url": "^11.0.2",
|
||||
"whatwg-url": "^14.1.0 || ^13.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/on-exit-leak-free": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz",
|
||||
"integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pino": {
|
||||
"version": "9.11.0",
|
||||
"resolved": "https://registry.npmjs.org/pino/-/pino-9.11.0.tgz",
|
||||
"integrity": "sha512-+YIodBB9sxcWeR8PrXC2K3gEDyfkUuVEITOcbqrfcj+z5QW4ioIcqZfYFbrLTYLsmAwunbS7nfU/dpBB6PZc1g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"atomic-sleep": "^1.0.0",
|
||||
"fast-redact": "^3.1.1",
|
||||
"on-exit-leak-free": "^2.1.0",
|
||||
"pino-abstract-transport": "^2.0.0",
|
||||
"pino-std-serializers": "^7.0.0",
|
||||
"process-warning": "^5.0.0",
|
||||
"quick-format-unescaped": "^4.0.3",
|
||||
"real-require": "^0.2.0",
|
||||
"safe-stable-stringify": "^2.3.1",
|
||||
"sonic-boom": "^4.0.1",
|
||||
"thread-stream": "^3.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"pino": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/pino-abstract-transport": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz",
|
||||
"integrity": "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"split2": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pino-std-serializers": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz",
|
||||
"integrity": "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/process-warning": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz",
|
||||
"integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/punycode": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
|
||||
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/quick-format-unescaped": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz",
|
||||
"integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/real-require": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz",
|
||||
"integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 12.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/require-from-string": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
|
||||
"integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ret": {
|
||||
"version": "0.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ret/-/ret-0.5.0.tgz",
|
||||
"integrity": "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/reusify": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz",
|
||||
"integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"iojs": ">=1.0.0",
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/rfdc": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
|
||||
"integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/safe-regex2": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/safe-regex2/-/safe-regex2-5.0.0.tgz",
|
||||
"integrity": "sha512-YwJwe5a51WlK7KbOJREPdjNrpViQBI3p4T50lfwPuDhZnE3XGVTlGvi+aolc5+RvxDD6bnUmjVsU9n1eboLUYw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ret": "~0.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-stable-stringify": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz",
|
||||
"integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/secure-json-parse": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.0.0.tgz",
|
||||
"integrity": "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.7.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
|
||||
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/set-cookie-parser": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz",
|
||||
"integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/sonic-boom": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz",
|
||||
"integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"atomic-sleep": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/sparse-bitfield": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz",
|
||||
"integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"memory-pager": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/split2": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
|
||||
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">= 10.x"
|
||||
}
|
||||
},
|
||||
"node_modules/thread-stream": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz",
|
||||
"integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"real-require": "^0.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/toad-cache": {
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.7.0.tgz",
|
||||
"integrity": "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/tr46": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
|
||||
"integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"punycode": "^2.3.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/undici": {
|
||||
"version": "7.16.0",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-7.16.0.tgz",
|
||||
"integrity": "sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=20.18.1"
|
||||
}
|
||||
},
|
||||
"node_modules/webidl-conversions": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
|
||||
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/whatwg-url": {
|
||||
"version": "14.2.0",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
|
||||
"integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tr46": "^5.1.0",
|
||||
"webidl-conversions": "^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
2
node_modules/@fastify/ajv-compiler/.gitattributes
generated
vendored
Normal file
2
node_modules/@fastify/ajv-compiler/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Set default behavior to automatically convert line endings
|
||||
* text=auto eol=lf
|
||||
21
node_modules/@fastify/ajv-compiler/.github/.stale.yml
generated
vendored
Normal file
21
node_modules/@fastify/ajv-compiler/.github/.stale.yml
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 15
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- "discussion"
|
||||
- "feature request"
|
||||
- "bug"
|
||||
- "help wanted"
|
||||
- "plugin suggestion"
|
||||
- "good first issue"
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
||||
13
node_modules/@fastify/ajv-compiler/.github/dependabot.yml
generated
vendored
Normal file
13
node_modules/@fastify/ajv-compiler/.github/dependabot.yml
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
8
node_modules/@fastify/ajv-compiler/.github/tests_checker.yml
generated
vendored
Normal file
8
node_modules/@fastify/ajv-compiler/.github/tests_checker.yml
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
comment: |
|
||||
Hello! Thank you for contributing!
|
||||
It appears that you have changed the code, but the tests that verify your change are missing. Could you please add them?
|
||||
fileExtensions:
|
||||
- '.ts'
|
||||
- '.js'
|
||||
|
||||
testDir: 'test'
|
||||
26
node_modules/@fastify/ajv-compiler/.github/workflows/ci.yml
generated
vendored
Normal file
26
node_modules/@fastify/ajv-compiler/.github/workflows/ci.yml
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- next
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
|
||||
env:
|
||||
TZ: 'UTC'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
uses: fastify/workflows/.github/workflows/plugins-ci.yml@v5
|
||||
with:
|
||||
lint: true
|
||||
license-check: true
|
||||
2
node_modules/@fastify/ajv-compiler/.taprc
generated
vendored
Normal file
2
node_modules/@fastify/ajv-compiler/.taprc
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
files:
|
||||
- test/**/*.test.js
|
||||
24
node_modules/@fastify/ajv-compiler/LICENSE
generated
vendored
Normal file
24
node_modules/@fastify/ajv-compiler/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) The Fastify Team
|
||||
|
||||
The Fastify team members are listed at https://github.com/fastify/fastify#team
|
||||
and in the README file.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
237
node_modules/@fastify/ajv-compiler/README.md
generated
vendored
Normal file
237
node_modules/@fastify/ajv-compiler/README.md
generated
vendored
Normal file
@@ -0,0 +1,237 @@
|
||||
# @fastify/ajv-compiler
|
||||
|
||||
[](https://github.com/fastify/ajv-compiler/actions/workflows/ci.yml)
|
||||
[](https://www.npmjs.com/package/@fastify/ajv-compiler)
|
||||
[](https://github.com/neostandard/neostandard)
|
||||
|
||||
This module manages the [`ajv`](https://www.npmjs.com/package/ajv) instances for the Fastify framework.
|
||||
It isolates the `ajv` dependency so that the AJV version is not tightly coupled to the Fastify version.
|
||||
This allows the user to decide which version of AJV to use in their Fastify-based application.
|
||||
|
||||
|
||||
## Versions
|
||||
|
||||
| `@fastify/ajv-compiler` | `ajv` | Default in `fastify` |
|
||||
|------------------------:|------:|---------------------:|
|
||||
| v4.x | v8.x | ^5.x |
|
||||
| v3.x | v8.x | ^4.x |
|
||||
| v2.x | v8.x | - |
|
||||
| v1.x | v6.x | ^3.14 |
|
||||
|
||||
### AJV Configuration
|
||||
|
||||
The Fastify's default [`ajv` options](https://github.com/ajv-validator/ajv/tree/v6#options) are:
|
||||
|
||||
```js
|
||||
{
|
||||
coerceTypes: 'array',
|
||||
useDefaults: true,
|
||||
removeAdditional: true,
|
||||
uriResolver: require('fast-uri'),
|
||||
addUsedSchema: false,
|
||||
// Explicitly set allErrors to `false`.
|
||||
// When set to `true`, a DoS attack is possible.
|
||||
allErrors: false
|
||||
}
|
||||
```
|
||||
|
||||
Moreover, the [`ajv-formats`](https://www.npmjs.com/package/ajv-formats) module is included by default.
|
||||
If you need to customize it, check the _usage_ section below.
|
||||
|
||||
To customize the `ajv` options, see how in the [Fastify documentation](https://fastify.dev/docs/latest/Reference/Server/#ajv).
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
This module is already used as default by Fastify.
|
||||
If you need to provide your server instance with a different version, refer to [the Fastify docs](https://fastify.dev/docs/latest/Reference/Server/#schemacontroller).
|
||||
|
||||
### Customize the `ajv-formats` plugin
|
||||
|
||||
The `format` keyword is not part of the official `ajv` module since v7. To use it, you need to install the `ajv-formats` module and this module
|
||||
does it for you with the default configuration.
|
||||
|
||||
If you need to configure the `ajv-formats` plugin you can do it using the standard Fastify configuration:
|
||||
|
||||
```js
|
||||
const app = fastify({
|
||||
ajv: {
|
||||
plugins: [[require('ajv-formats'), { mode: 'fast' }]]
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
In this way, your setup will have precedence over the `@fastify/ajv-compiler` default configuration.
|
||||
|
||||
### Customize the `ajv` instance
|
||||
|
||||
If you need to customize the `ajv` instance and take full control of its configuration, you can do it by
|
||||
using the `onCreate` option in the Fastify configuration that accepts a synchronous function that receives the `ajv` instance:
|
||||
|
||||
```js
|
||||
const app = fastify({
|
||||
ajv: {
|
||||
onCreate: (ajv) => {
|
||||
// Modify the ajv instance as you need.
|
||||
ajv.addFormat('myFormat', (data) => typeof data === 'string')
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### Fastify with JTD
|
||||
|
||||
The [JSON Type Definition](https://jsontypedef.com/) feature is supported by AJV v8.x and you can benefit from it in your Fastify application.
|
||||
|
||||
With Fastify v3.20.x and higher, you can use the `@fastify/ajv-compiler` module to load JSON Type Definitions like so:
|
||||
|
||||
```js
|
||||
const factory = require('@fastify/ajv-compiler')()
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
ajv: {
|
||||
customOptions: { }, // additional JTD options
|
||||
mode: 'JTD'
|
||||
},
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
The default AJV JTD options are the same as [Fastify's default options](#AJV-Configuration).
|
||||
|
||||
#### Fastify with JTD and serialization
|
||||
|
||||
You can use JTD Schemas to serialize your response object too:
|
||||
|
||||
```js
|
||||
const factoryValidator = require('@fastify/ajv-compiler')()
|
||||
const factorySerializer = require('@fastify/ajv-compiler')({ jtdSerializer: true })
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
ajv: {
|
||||
customOptions: { }, // additional JTD options
|
||||
mode: 'JTD'
|
||||
},
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factoryValidator,
|
||||
buildSerializer: factorySerializer
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
### AJV Standalone
|
||||
|
||||
AJV v8 introduced a [standalone feature](https://ajv.js.org/standalone.html) that lets you pre-compile your schemas and use them in your application for a faster startup.
|
||||
|
||||
To use this feature, you must be aware of the following:
|
||||
|
||||
1. You must generate and save the application's compiled schemas.
|
||||
2. Read the compiled schemas from the file and provide them back to your Fastify application.
|
||||
|
||||
|
||||
#### Generate and save the compiled schemas
|
||||
|
||||
Fastify helps you to generate the validation schemas functions and it is your choice to save them where you want.
|
||||
To accomplish this, you must use a new compiler: `StandaloneValidator`.
|
||||
|
||||
You must provide 2 parameters to this compiler:
|
||||
|
||||
- `readMode: false`: a boolean to indicate that you want to generate the schemas functions string.
|
||||
- `storeFunction`" a sync function that must store the source code of the schemas functions. You may provide an async function too, but you must manage errors.
|
||||
|
||||
When `readMode: false`, **the compiler is meant to be used in development ONLY**.
|
||||
|
||||
|
||||
```js
|
||||
const { StandaloneValidator } = require('@fastify/ajv-compiler')
|
||||
const factory = StandaloneValidator({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaValidationCode) {
|
||||
// routeOpts is like: { schema, method, url, httpPart }
|
||||
// schemaValidationCode is a string source code that is the compiled schema function
|
||||
const fileName = generateFileName(routeOpts)
|
||||
fs.writeFileSync(path.join(__dirname, fileName), schemaValidationCode)
|
||||
}
|
||||
})
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// ... add all your routes with schemas ...
|
||||
|
||||
app.ready().then(() => {
|
||||
// at this stage all your schemas are compiled and stored in the file system
|
||||
// now it is important to turn off the readMode
|
||||
})
|
||||
```
|
||||
|
||||
#### Read the compiled schemas functions
|
||||
|
||||
At this stage, you should have a file for every route's schema.
|
||||
To use them, you must use the `StandaloneValidator` with the parameters:
|
||||
|
||||
- `readMode: true`: a boolean to indicate that you want to read and use the schemas functions string.
|
||||
- `restoreFunction`" a sync function that must return a function to validate the route.
|
||||
|
||||
Important keep away before you continue reading the documentation:
|
||||
|
||||
- when you use the `readMode: true`, the application schemas are not compiled (they are ignored). So, if you change your schemas, you must recompile them!
|
||||
- as you can see, you must relate the route's schema to the file name using the `routeOpts` object. You may use the `routeOpts.schema.$id` field to do so, it is up to you to define a unique schema identifier.
|
||||
|
||||
```js
|
||||
const { StandaloneValidator } = require('@fastify/ajv-compiler')
|
||||
const factory = StandaloneValidator({
|
||||
readMode: true,
|
||||
restoreFunction (routeOpts) {
|
||||
// routeOpts is like: { schema, method, url, httpPart }
|
||||
const fileName = generateFileName(routeOpts)
|
||||
return require(path.join(__dirname, fileName))
|
||||
}
|
||||
})
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// ... add all your routes with schemas as before...
|
||||
|
||||
app.listen({ port: 3000 })
|
||||
```
|
||||
|
||||
### How it works
|
||||
|
||||
This module provides a factory function to produce [Validator Compilers](https://fastify.dev/docs/latest/Reference/Server/#validatorcompiler) functions.
|
||||
|
||||
The Fastify factory function is just one per server instance and it is called for every encapsulated context created by the application through the `fastify.register()` call.
|
||||
|
||||
Every Validator Compiler produced has a dedicated AJV instance, so this factory will try to produce as less as possible AJV instances to reduce the memory footprint and the startup time.
|
||||
|
||||
The variables involved to choose if a Validator Compiler can be reused are:
|
||||
|
||||
- the AJV configuration: it is [one per server](https://fastify.dev/docs/latest/Reference/Server/#ajv)
|
||||
- the external JSON schemas: once a new schema is added to a fastify's context, calling `fastify.addSchema()`, it will cause a new AJV initialization
|
||||
|
||||
|
||||
## License
|
||||
|
||||
Licensed under [MIT](./LICENSE).
|
||||
37
node_modules/@fastify/ajv-compiler/benchmark/small-object.mjs
generated
vendored
Normal file
37
node_modules/@fastify/ajv-compiler/benchmark/small-object.mjs
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import cronometro from 'cronometro'
|
||||
|
||||
import fjs from 'fast-json-stringify'
|
||||
import AjvCompiler from '../index.js'
|
||||
|
||||
const fjsSerialize = buildFJSSerializerFunction({
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { type: 'string' },
|
||||
name: { type: 'string' }
|
||||
}
|
||||
})
|
||||
const ajvSerialize = buildAJVSerializerFunction({
|
||||
properties: {
|
||||
hello: { type: 'string' },
|
||||
name: { type: 'string' }
|
||||
}
|
||||
})
|
||||
|
||||
await cronometro({
|
||||
'fast-json-stringify': function () {
|
||||
fjsSerialize({ hello: 'Ciao', name: 'Manuel' })
|
||||
},
|
||||
'ajv serializer': function () {
|
||||
ajvSerialize({ hello: 'Ciao', name: 'Manuel' })
|
||||
}
|
||||
})
|
||||
|
||||
function buildFJSSerializerFunction (schema) {
|
||||
return fjs(schema)
|
||||
}
|
||||
|
||||
function buildAJVSerializerFunction (schema) {
|
||||
const factory = AjvCompiler({ jtdSerializer: true })
|
||||
const compiler = factory({}, { customOptions: {} })
|
||||
return compiler({ schema })
|
||||
}
|
||||
6
node_modules/@fastify/ajv-compiler/eslint.config.js
generated
vendored
Normal file
6
node_modules/@fastify/ajv-compiler/eslint.config.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('neostandard')({
|
||||
ignores: require('neostandard').resolveIgnoresFromGitignore(),
|
||||
ts: true
|
||||
})
|
||||
53
node_modules/@fastify/ajv-compiler/index.js
generated
vendored
Normal file
53
node_modules/@fastify/ajv-compiler/index.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
'use strict'
|
||||
|
||||
const AjvReference = Symbol.for('fastify.ajv-compiler.reference')
|
||||
const ValidatorCompiler = require('./lib/validator-compiler')
|
||||
const SerializerCompiler = require('./lib/serializer-compiler')
|
||||
|
||||
function AjvCompiler (opts) {
|
||||
const validatorPool = new Map()
|
||||
const serializerPool = new Map()
|
||||
|
||||
if (opts && opts.jtdSerializer === true) {
|
||||
return function buildSerializerFromPool (externalSchemas, serializerOpts) {
|
||||
const uniqueAjvKey = getPoolKey({}, serializerOpts)
|
||||
if (serializerPool.has(uniqueAjvKey)) {
|
||||
return serializerPool.get(uniqueAjvKey)
|
||||
}
|
||||
|
||||
const compiler = new SerializerCompiler(externalSchemas, serializerOpts)
|
||||
const ret = compiler.buildSerializerFunction.bind(compiler)
|
||||
serializerPool.set(uniqueAjvKey, ret)
|
||||
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
return function buildCompilerFromPool (externalSchemas, options) {
|
||||
const uniqueAjvKey = getPoolKey(externalSchemas, options.customOptions)
|
||||
if (validatorPool.has(uniqueAjvKey)) {
|
||||
return validatorPool.get(uniqueAjvKey)
|
||||
}
|
||||
|
||||
const compiler = new ValidatorCompiler(externalSchemas, options)
|
||||
const ret = compiler.buildValidatorFunction.bind(compiler)
|
||||
validatorPool.set(uniqueAjvKey, ret)
|
||||
|
||||
if (options.customOptions.code !== undefined) {
|
||||
ret[AjvReference] = compiler
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
function getPoolKey (externalSchemas, options) {
|
||||
const externals = JSON.stringify(externalSchemas)
|
||||
const ajvConfig = JSON.stringify(options)
|
||||
return `${externals}${ajvConfig}`
|
||||
}
|
||||
module.exports = AjvCompiler
|
||||
module.exports.default = AjvCompiler
|
||||
module.exports.AjvCompiler = AjvCompiler
|
||||
module.exports.AjvReference = AjvReference
|
||||
module.exports.StandaloneValidator = require('./standalone')
|
||||
14
node_modules/@fastify/ajv-compiler/lib/default-ajv-options.js
generated
vendored
Normal file
14
node_modules/@fastify/ajv-compiler/lib/default-ajv-options.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const fastUri = require('fast-uri')
|
||||
|
||||
module.exports = Object.freeze({
|
||||
coerceTypes: 'array',
|
||||
useDefaults: true,
|
||||
removeAdditional: true,
|
||||
uriResolver: fastUri,
|
||||
addUsedSchema: false,
|
||||
// Explicitly set allErrors to `false`.
|
||||
// When set to `true`, a DoS attack is possible.
|
||||
allErrors: false
|
||||
})
|
||||
27
node_modules/@fastify/ajv-compiler/lib/serializer-compiler.js
generated
vendored
Normal file
27
node_modules/@fastify/ajv-compiler/lib/serializer-compiler.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
'use strict'
|
||||
|
||||
const AjvJTD = require('ajv/dist/jtd')
|
||||
|
||||
const defaultAjvOptions = require('./default-ajv-options')
|
||||
|
||||
class SerializerCompiler {
|
||||
constructor (_externalSchemas, options) {
|
||||
this.ajv = new AjvJTD(Object.assign({}, defaultAjvOptions, options))
|
||||
|
||||
/**
|
||||
* https://ajv.js.org/json-type-definition.html#ref-form
|
||||
* Unlike JSON Schema, JTD does not allow to reference:
|
||||
* - any schema fragment other than root level definitions member
|
||||
* - root of the schema - there is another way to define a self-recursive schema (see Example 2)
|
||||
* - another schema file (but you can still combine schemas from multiple files using JavaScript).
|
||||
*
|
||||
* So we ignore the externalSchemas parameter.
|
||||
*/
|
||||
}
|
||||
|
||||
buildSerializerFunction ({ schema/*, method, url, httpStatus */ }) {
|
||||
return this.ajv.compileSerializer(schema)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SerializerCompiler
|
||||
58
node_modules/@fastify/ajv-compiler/lib/validator-compiler.js
generated
vendored
Normal file
58
node_modules/@fastify/ajv-compiler/lib/validator-compiler.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
'use strict'
|
||||
|
||||
const Ajv = require('ajv').default
|
||||
const AjvJTD = require('ajv/dist/jtd')
|
||||
|
||||
const defaultAjvOptions = require('./default-ajv-options')
|
||||
|
||||
class ValidatorCompiler {
|
||||
constructor (externalSchemas, options) {
|
||||
// This instance of Ajv is private
|
||||
// it should not be customized or used
|
||||
if (options.mode === 'JTD') {
|
||||
this.ajv = new AjvJTD(Object.assign({}, defaultAjvOptions, options.customOptions))
|
||||
} else {
|
||||
this.ajv = new Ajv(Object.assign({}, defaultAjvOptions, options.customOptions))
|
||||
}
|
||||
|
||||
let addFormatPlugin = true
|
||||
if (options.plugins && options.plugins.length > 0) {
|
||||
for (const plugin of options.plugins) {
|
||||
if (Array.isArray(plugin)) {
|
||||
addFormatPlugin = addFormatPlugin && plugin[0].name !== 'formatsPlugin'
|
||||
plugin[0](this.ajv, plugin[1])
|
||||
} else {
|
||||
addFormatPlugin = addFormatPlugin && plugin.name !== 'formatsPlugin'
|
||||
plugin(this.ajv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (addFormatPlugin) {
|
||||
require('ajv-formats')(this.ajv)
|
||||
}
|
||||
|
||||
options.onCreate?.(this.ajv)
|
||||
|
||||
const sourceSchemas = Object.values(externalSchemas)
|
||||
for (const extSchema of sourceSchemas) {
|
||||
this.ajv.addSchema(extSchema)
|
||||
}
|
||||
}
|
||||
|
||||
buildValidatorFunction ({ schema/*, method, url, httpPart */ }) {
|
||||
// Ajv does not support compiling two schemas with the same
|
||||
// id inside the same instance. Therefore if we have already
|
||||
// compiled the schema with the given id, we just return it.
|
||||
if (schema.$id) {
|
||||
const stored = this.ajv.getSchema(schema.$id)
|
||||
if (stored) {
|
||||
return stored
|
||||
}
|
||||
}
|
||||
|
||||
return this.ajv.compile(schema)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ValidatorCompiler
|
||||
84
node_modules/@fastify/ajv-compiler/package.json
generated
vendored
Normal file
84
node_modules/@fastify/ajv-compiler/package.json
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
{
|
||||
"name": "@fastify/ajv-compiler",
|
||||
"version": "4.0.2",
|
||||
"description": "Build and manage the AJV instances for the fastify framework",
|
||||
"main": "index.js",
|
||||
"type": "commonjs",
|
||||
"types": "types/index.d.ts",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint",
|
||||
"lint:fix": "eslint --fix",
|
||||
"unit": "tap",
|
||||
"test": "npm run unit && npm run test:typescript",
|
||||
"test:typescript": "tsd",
|
||||
"ajv:compile": "ajv compile -s test/source.json -o test/validate_schema.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/fastify/ajv-compiler.git"
|
||||
},
|
||||
"keywords": [
|
||||
"ajv",
|
||||
"validator",
|
||||
"schema",
|
||||
"compiler",
|
||||
"fastify"
|
||||
],
|
||||
"author": "Manuel Spigolon <behemoth89@gmail.com> (https://github.com/Eomm)",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Matteo Collina",
|
||||
"email": "hello@matteocollina.com"
|
||||
},
|
||||
{
|
||||
"name": "Aras Abbasi",
|
||||
"email": "aras.abbasi@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "James Sumners",
|
||||
"url": "https://james.sumners.info"
|
||||
},
|
||||
{
|
||||
"name": "Frazer Smith",
|
||||
"email": "frazer.dev@icloud.com",
|
||||
"url": "https://github.com/fdawgs"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/fastify/ajv-compiler/issues"
|
||||
},
|
||||
"homepage": "https://github.com/fastify/ajv-compiler#readme",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"devDependencies": {
|
||||
"ajv-cli": "^5.0.0",
|
||||
"ajv-errors": "^3.0.0",
|
||||
"ajv-i18n": "^4.2.0",
|
||||
"ajv-merge-patch": "^5.0.1",
|
||||
"cronometro": "^4.0.0",
|
||||
"eslint": "^9.17.0",
|
||||
"fastify": "^5.0.0",
|
||||
"neostandard": "^0.12.0",
|
||||
"require-from-string": "^2.0.2",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"tap": "^19.0.0",
|
||||
"tsd": "^0.31.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ajv": "^8.12.0",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"fast-uri": "^3.0.0"
|
||||
}
|
||||
}
|
||||
44
node_modules/@fastify/ajv-compiler/standalone.js
generated
vendored
Normal file
44
node_modules/@fastify/ajv-compiler/standalone.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
'use strict'
|
||||
|
||||
const ValidatorSelector = require('./index')
|
||||
const standaloneCode = require('ajv/dist/standalone').default
|
||||
|
||||
function StandaloneValidator (options = { readMode: true }) {
|
||||
if (options.readMode === true && !options.restoreFunction) {
|
||||
throw new Error('You must provide a restoreFunction options when readMode ON')
|
||||
}
|
||||
|
||||
if (options.readMode !== true && !options.storeFunction) {
|
||||
throw new Error('You must provide a storeFunction options when readMode OFF')
|
||||
}
|
||||
|
||||
if (options.readMode === true) {
|
||||
// READ MODE: it behalf only in the restore function provided by the user
|
||||
return function wrapper () {
|
||||
return function (opts) {
|
||||
return options.restoreFunction(opts)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WRITE MODE: it behalf on the default ValidatorSelector, wrapping the API to run the Ajv Standalone code generation
|
||||
const factory = ValidatorSelector()
|
||||
return function wrapper (externalSchemas, ajvOptions = {}) {
|
||||
if (!ajvOptions.customOptions || !ajvOptions.customOptions.code) {
|
||||
// to generate the validation source code, these options are mandatory
|
||||
ajvOptions.customOptions = Object.assign({}, ajvOptions.customOptions, { code: { source: true } })
|
||||
}
|
||||
|
||||
const compiler = factory(externalSchemas, ajvOptions)
|
||||
return function (opts) { // { schema/*, method, url, httpPart */ }
|
||||
const validationFunc = compiler(opts)
|
||||
|
||||
const schemaValidationCode = standaloneCode(compiler[ValidatorSelector.AjvReference].ajv, validationFunc)
|
||||
options.storeFunction(opts, schemaValidationCode)
|
||||
|
||||
return validationFunc
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = StandaloneValidator
|
||||
0
node_modules/@fastify/ajv-compiler/test/.gitkeep
generated
vendored
Normal file
0
node_modules/@fastify/ajv-compiler/test/.gitkeep
generated
vendored
Normal file
59
node_modules/@fastify/ajv-compiler/test/duplicated-id-compile.test.js
generated
vendored
Normal file
59
node_modules/@fastify/ajv-compiler/test/duplicated-id-compile.test.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
'use strict'
|
||||
|
||||
const t = require('tap')
|
||||
const AjvCompiler = require('../index')
|
||||
|
||||
const postSchema = Object.freeze({
|
||||
$schema: 'http://json-schema.org/draft-07/schema#',
|
||||
type: 'object',
|
||||
$id: 'http://mydomain.com/user',
|
||||
title: 'User schema',
|
||||
description: 'Contains all user fields',
|
||||
properties: {
|
||||
username: { type: 'string', minLength: 4 },
|
||||
firstName: { type: 'string', minLength: 1 },
|
||||
lastName: { type: 'string', minLength: 1 },
|
||||
email: { type: 'string' },
|
||||
password: { type: 'string', minLength: 6 },
|
||||
bio: { type: 'string' }
|
||||
},
|
||||
required: ['username', 'firstName', 'lastName', 'email', 'bio', 'password']
|
||||
})
|
||||
|
||||
const patchSchema = Object.freeze({
|
||||
$schema: 'http://json-schema.org/draft-07/schema#',
|
||||
type: 'object',
|
||||
$id: 'http://mydomain.com/user',
|
||||
title: 'User schema',
|
||||
description: 'Contains all user fields',
|
||||
properties: {
|
||||
firstName: { type: 'string', minLength: 1 },
|
||||
lastName: { type: 'string', minLength: 1 },
|
||||
bio: { type: 'string' }
|
||||
}
|
||||
})
|
||||
|
||||
const fastifyAjvOptionsDefault = Object.freeze({
|
||||
customOptions: {}
|
||||
})
|
||||
|
||||
t.test('must not store schema on compile', t => {
|
||||
t.plan(4)
|
||||
const factory = AjvCompiler()
|
||||
const compiler = factory({}, fastifyAjvOptionsDefault)
|
||||
const postFn = compiler({ schema: postSchema })
|
||||
const patchFn = compiler({ schema: patchSchema })
|
||||
|
||||
const resultForPost = postFn({})
|
||||
t.equal(resultForPost, false)
|
||||
t.has(postFn.errors, [
|
||||
{
|
||||
keyword: 'required',
|
||||
message: "must have required property 'username'"
|
||||
}
|
||||
])
|
||||
|
||||
const resultForPatch = patchFn({})
|
||||
t.ok(resultForPatch)
|
||||
t.notOk(patchFn.errors)
|
||||
})
|
||||
307
node_modules/@fastify/ajv-compiler/test/index.test.js
generated
vendored
Normal file
307
node_modules/@fastify/ajv-compiler/test/index.test.js
generated
vendored
Normal file
@@ -0,0 +1,307 @@
|
||||
'use strict'
|
||||
|
||||
const t = require('tap')
|
||||
const fastify = require('fastify')
|
||||
const AjvCompiler = require('../index')
|
||||
|
||||
const sym = Symbol.for('fastify.ajv-compiler.reference')
|
||||
|
||||
const sampleSchema = Object.freeze({
|
||||
$id: 'example1',
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string' }
|
||||
}
|
||||
})
|
||||
|
||||
const externalSchemas1 = Object.freeze({})
|
||||
const externalSchemas2 = Object.freeze({
|
||||
foo: {
|
||||
$id: 'foo',
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const fastifyAjvOptionsDefault = Object.freeze({
|
||||
customOptions: {}
|
||||
})
|
||||
|
||||
const fastifyJtdDefault = Object.freeze({
|
||||
customOptions: { },
|
||||
mode: 'JTD'
|
||||
})
|
||||
|
||||
const fastifyAjvOptionsCustom = Object.freeze({
|
||||
customOptions: {
|
||||
allErrors: true,
|
||||
removeAdditional: false
|
||||
},
|
||||
plugins: [
|
||||
require('ajv-formats'),
|
||||
[require('ajv-errors'), { singleError: false }]
|
||||
]
|
||||
})
|
||||
|
||||
t.test('basic usage', t => {
|
||||
t.plan(1)
|
||||
const factory = AjvCompiler()
|
||||
const compiler = factory(externalSchemas1, fastifyAjvOptionsDefault)
|
||||
const validatorFunc = compiler({ schema: sampleSchema })
|
||||
const result = validatorFunc({ name: 'hello' })
|
||||
t.equal(result, true)
|
||||
})
|
||||
|
||||
t.test('array coercion', t => {
|
||||
t.plan(2)
|
||||
const factory = AjvCompiler()
|
||||
const compiler = factory(externalSchemas1, fastifyAjvOptionsDefault)
|
||||
|
||||
const arraySchema = {
|
||||
$id: 'example1',
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'array', items: { type: 'string' } }
|
||||
}
|
||||
}
|
||||
|
||||
const validatorFunc = compiler({ schema: arraySchema })
|
||||
|
||||
const inputObj = { name: 'hello' }
|
||||
t.equal(validatorFunc(inputObj), true)
|
||||
t.same(inputObj, { name: ['hello'] }, 'the name property should be coerced to an array')
|
||||
})
|
||||
|
||||
t.test('nullable default', t => {
|
||||
t.plan(2)
|
||||
const factory = AjvCompiler()
|
||||
const compiler = factory({}, fastifyAjvOptionsDefault)
|
||||
const validatorFunc = compiler({
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
nullable: { type: 'string', nullable: true },
|
||||
notNullable: { type: 'string' }
|
||||
}
|
||||
}
|
||||
})
|
||||
const input = { nullable: null, notNullable: null }
|
||||
const result = validatorFunc(input)
|
||||
t.equal(result, true)
|
||||
t.same(input, { nullable: null, notNullable: '' }, 'the notNullable field has been coerced')
|
||||
})
|
||||
|
||||
t.test('plugin loading', t => {
|
||||
t.plan(3)
|
||||
const factory = AjvCompiler()
|
||||
const compiler = factory(externalSchemas1, fastifyAjvOptionsCustom)
|
||||
const validatorFunc = compiler({
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
q: {
|
||||
type: 'string',
|
||||
format: 'date',
|
||||
formatMinimum: '2016-02-06',
|
||||
formatExclusiveMaximum: '2016-12-27'
|
||||
}
|
||||
},
|
||||
required: ['q'],
|
||||
errorMessage: 'hello world'
|
||||
}
|
||||
})
|
||||
const result = validatorFunc({ q: '2016-10-02' })
|
||||
t.equal(result, true)
|
||||
|
||||
const resultFail = validatorFunc({})
|
||||
t.equal(resultFail, false)
|
||||
t.equal(validatorFunc.errors[0].message, 'hello world')
|
||||
})
|
||||
|
||||
t.test('optimization - cache ajv instance', t => {
|
||||
t.plan(5)
|
||||
const factory = AjvCompiler()
|
||||
const compiler1 = factory(externalSchemas1, fastifyAjvOptionsDefault)
|
||||
const compiler2 = factory(externalSchemas1, fastifyAjvOptionsDefault)
|
||||
t.equal(compiler1, compiler2, 'same instance')
|
||||
t.same(compiler1, compiler2, 'same instance')
|
||||
|
||||
const compiler3 = factory(externalSchemas2, fastifyAjvOptionsDefault)
|
||||
t.not(compiler3, compiler1, 'new ajv instance when externa schema change')
|
||||
|
||||
const compiler4 = factory(externalSchemas1, fastifyAjvOptionsCustom)
|
||||
t.not(compiler4, compiler1, 'new ajv instance when externa schema change')
|
||||
t.not(compiler4, compiler3, 'new ajv instance when externa schema change')
|
||||
})
|
||||
|
||||
t.test('the onCreate callback can enhance the ajv instance', t => {
|
||||
t.plan(2)
|
||||
const factory = AjvCompiler()
|
||||
|
||||
const fastifyAjvCustomOptionsFormats = Object.freeze({
|
||||
onCreate (ajv) {
|
||||
for (const [formatName, format] of Object.entries(this.customOptions.formats)) {
|
||||
ajv.addFormat(formatName, format)
|
||||
}
|
||||
},
|
||||
customOptions: {
|
||||
formats: {
|
||||
date: /foo/
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const compiler1 = factory(externalSchemas1, fastifyAjvCustomOptionsFormats)
|
||||
const validatorFunc = compiler1({
|
||||
schema: {
|
||||
type: 'string',
|
||||
format: 'date'
|
||||
}
|
||||
})
|
||||
const result = validatorFunc('foo')
|
||||
t.equal(result, true)
|
||||
|
||||
const resultFail = validatorFunc('2016-10-02')
|
||||
t.equal(resultFail, false)
|
||||
})
|
||||
|
||||
// https://github.com/fastify/fastify/pull/2969
|
||||
t.test('compile same $id when in external schema', t => {
|
||||
t.plan(3)
|
||||
const factory = AjvCompiler()
|
||||
|
||||
const base = {
|
||||
$id: 'urn:schema:base',
|
||||
definitions: {
|
||||
hello: { type: 'string' }
|
||||
},
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: '#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const refSchema = {
|
||||
$id: 'urn:schema:ref',
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: 'urn:schema:base#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const compiler = factory({
|
||||
[base.$id]: base,
|
||||
[refSchema.$id]: refSchema
|
||||
|
||||
}, fastifyAjvOptionsDefault)
|
||||
|
||||
t.notOk(compiler[sym], 'the ajv reference do not exists if code is not activated')
|
||||
|
||||
const validatorFunc1 = compiler({
|
||||
schema: {
|
||||
$id: 'urn:schema:ref'
|
||||
}
|
||||
})
|
||||
|
||||
const validatorFunc2 = compiler({
|
||||
schema: {
|
||||
$id: 'urn:schema:ref'
|
||||
}
|
||||
})
|
||||
|
||||
t.pass('the compile does not fail if the schema compiled is already in the external schemas')
|
||||
t.equal(validatorFunc1, validatorFunc2, 'the returned function is the same')
|
||||
})
|
||||
|
||||
t.test('JTD MODE', t => {
|
||||
t.plan(2)
|
||||
|
||||
t.test('compile jtd schema', t => {
|
||||
t.plan(4)
|
||||
const factory = AjvCompiler()
|
||||
|
||||
const jtdSchema = {
|
||||
discriminator: 'version',
|
||||
mapping: {
|
||||
1: {
|
||||
properties: {
|
||||
foo: { type: 'uint8' }
|
||||
}
|
||||
},
|
||||
2: {
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const compiler = factory({}, fastifyJtdDefault)
|
||||
const validatorFunc = compiler({ schema: jtdSchema })
|
||||
t.pass('generated validation function for JTD SCHEMA')
|
||||
|
||||
const result = validatorFunc({
|
||||
version: '2',
|
||||
foo: []
|
||||
})
|
||||
t.notOk(result, 'failed validation')
|
||||
t.type(validatorFunc.errors, 'Array')
|
||||
|
||||
const success = validatorFunc({
|
||||
version: '1',
|
||||
foo: 42
|
||||
})
|
||||
t.ok(success)
|
||||
})
|
||||
|
||||
t.test('fastify integration', async t => {
|
||||
const factory = AjvCompiler()
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
ajv: {
|
||||
customOptions: { },
|
||||
mode: 'JTD'
|
||||
},
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.post('/', {
|
||||
schema: {
|
||||
body: {
|
||||
discriminator: 'version',
|
||||
mapping: {
|
||||
1: {
|
||||
properties: {
|
||||
foo: { type: 'uint8' }
|
||||
}
|
||||
},
|
||||
2: {
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}, () => {})
|
||||
|
||||
const res = await app.inject({
|
||||
url: '/',
|
||||
method: 'POST',
|
||||
payload: {
|
||||
version: '1',
|
||||
foo: 'this is not a number'
|
||||
}
|
||||
})
|
||||
|
||||
t.equal(res.statusCode, 400)
|
||||
t.equal(res.json().message, 'body/foo must be uint8')
|
||||
})
|
||||
})
|
||||
264
node_modules/@fastify/ajv-compiler/test/plugins.test.js
generated
vendored
Normal file
264
node_modules/@fastify/ajv-compiler/test/plugins.test.js
generated
vendored
Normal file
@@ -0,0 +1,264 @@
|
||||
'use strict'
|
||||
|
||||
const t = require('tap')
|
||||
const fastify = require('fastify')
|
||||
const AjvCompiler = require('../index')
|
||||
|
||||
const ajvFormats = require('ajv-formats')
|
||||
const ajvErrors = require('ajv-errors')
|
||||
const localize = require('ajv-i18n')
|
||||
|
||||
t.test('Format Baseline test', async (t) => {
|
||||
const app = buildApplication({
|
||||
customOptions: {
|
||||
validateFormats: false
|
||||
}
|
||||
})
|
||||
|
||||
const res = await app.inject({
|
||||
url: '/hello',
|
||||
headers: {
|
||||
'x-foo': 'hello',
|
||||
'x-date': 'not a date',
|
||||
'x-email': 'not an email'
|
||||
},
|
||||
query: {
|
||||
foo: 'hello',
|
||||
date: 'not a date',
|
||||
email: 'not an email'
|
||||
}
|
||||
})
|
||||
t.equal(res.statusCode, 200, 'format validation does not apply as configured')
|
||||
t.equal(res.payload, 'hello')
|
||||
})
|
||||
|
||||
t.test('Custom Format plugin loading test', (t) => {
|
||||
t.plan(6)
|
||||
const app = buildApplication({
|
||||
customOptions: {
|
||||
validateFormats: true
|
||||
},
|
||||
plugins: [[ajvFormats, { mode: 'fast' }]]
|
||||
})
|
||||
|
||||
app.inject('/hello', (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 400, 'format validation applies')
|
||||
})
|
||||
|
||||
app.inject('/2ad0612c-7578-4b18-9a6f-579863f40e0b', (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 400, 'format validation applies')
|
||||
})
|
||||
|
||||
app.inject({
|
||||
url: '/2ad0612c-7578-4b18-9a6f-579863f40e0b',
|
||||
headers: {
|
||||
'x-foo': 'hello',
|
||||
'x-date': new Date().toISOString(),
|
||||
'x-email': 'foo@bar.baz'
|
||||
},
|
||||
query: {
|
||||
foo: 'hello',
|
||||
date: new Date().toISOString(),
|
||||
email: 'foo@bar.baz'
|
||||
}
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 200)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Format plugin set by default test', (t) => {
|
||||
t.plan(6)
|
||||
const app = buildApplication({})
|
||||
|
||||
app.inject('/hello', (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 400, 'format validation applies')
|
||||
})
|
||||
|
||||
app.inject('/2ad0612c-7578-4b18-9a6f-579863f40e0b', (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 400, 'format validation applies')
|
||||
})
|
||||
|
||||
app.inject({
|
||||
url: '/2ad0612c-7578-4b18-9a6f-579863f40e0b',
|
||||
headers: {
|
||||
'x-foo': 'hello',
|
||||
'x-date': new Date().toISOString(),
|
||||
'x-email': 'foo@bar.baz'
|
||||
},
|
||||
query: {
|
||||
foo: 'hello',
|
||||
date: new Date().toISOString(),
|
||||
email: 'foo@bar.baz'
|
||||
}
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 200)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Custom error messages', (t) => {
|
||||
t.plan(9)
|
||||
|
||||
const app = buildApplication({
|
||||
customOptions: {
|
||||
removeAdditional: false,
|
||||
allErrors: true
|
||||
},
|
||||
plugins: [ajvFormats, ajvErrors]
|
||||
})
|
||||
|
||||
const errorMessage = {
|
||||
required: 'custom miss',
|
||||
type: 'custom type', // will not replace internal "type" error for the property "foo"
|
||||
_: 'custom type', // this prop will do it
|
||||
additionalProperties: 'custom too many params'
|
||||
}
|
||||
|
||||
app.post('/', {
|
||||
handler: () => { t.fail('dont call me') },
|
||||
schema: {
|
||||
body: {
|
||||
type: 'object',
|
||||
required: ['foo'],
|
||||
properties: {
|
||||
foo: { type: 'integer' }
|
||||
},
|
||||
additionalProperties: false,
|
||||
errorMessage
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.inject({
|
||||
url: '/',
|
||||
method: 'post',
|
||||
payload: {}
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 400)
|
||||
t.match(res.json().message, errorMessage.required)
|
||||
})
|
||||
|
||||
app.inject({
|
||||
url: '/',
|
||||
method: 'post',
|
||||
payload: { foo: 'not a number' }
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 400)
|
||||
t.match(res.json().message, errorMessage.type)
|
||||
})
|
||||
|
||||
app.inject({
|
||||
url: '/',
|
||||
method: 'post',
|
||||
payload: { foo: 3, bar: 'ops' }
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.statusCode, 400)
|
||||
t.match(res.json().message, errorMessage.additionalProperties)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Custom i18n error messages', (t) => {
|
||||
t.plan(3)
|
||||
|
||||
const app = buildApplication({
|
||||
customOptions: {
|
||||
allErrors: true,
|
||||
messages: false
|
||||
},
|
||||
plugins: [ajvFormats]
|
||||
})
|
||||
|
||||
app.post('/', {
|
||||
handler: () => { t.fail('dont call me') },
|
||||
schema: {
|
||||
body: {
|
||||
type: 'object',
|
||||
required: ['foo'],
|
||||
properties: {
|
||||
foo: { type: 'integer' }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.setErrorHandler((error, request, reply) => {
|
||||
t.pass('Error handler executed')
|
||||
if (error.validation) {
|
||||
localize.ru(error.validation)
|
||||
reply.status(400).send(error.validation)
|
||||
return
|
||||
}
|
||||
t.fail('not other errors')
|
||||
})
|
||||
|
||||
app.inject({
|
||||
method: 'POST',
|
||||
url: '/',
|
||||
payload: {
|
||||
foo: 'string'
|
||||
}
|
||||
}, (err, res) => {
|
||||
t.error(err)
|
||||
t.equal(res.json()[0].message, 'должно быть integer')
|
||||
})
|
||||
})
|
||||
|
||||
function buildApplication (ajvOptions) {
|
||||
const factory = AjvCompiler()
|
||||
|
||||
const app = fastify({
|
||||
ajv: ajvOptions,
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/:id', {
|
||||
schema: {
|
||||
headers: {
|
||||
type: 'object',
|
||||
required: [
|
||||
'x-foo',
|
||||
'x-date',
|
||||
'x-email'
|
||||
],
|
||||
properties: {
|
||||
'x-foo': { type: 'string' },
|
||||
'x-date': { type: 'string', format: 'date-time' },
|
||||
'x-email': { type: 'string', format: 'email' }
|
||||
}
|
||||
},
|
||||
query: {
|
||||
type: 'object',
|
||||
required: [
|
||||
'foo',
|
||||
'date',
|
||||
'email'
|
||||
],
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
date: { type: 'string', format: 'date-time' },
|
||||
email: { type: 'string', format: 'email' }
|
||||
}
|
||||
},
|
||||
params: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', format: 'uuid' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}, async () => 'hello')
|
||||
|
||||
return app
|
||||
}
|
||||
279
node_modules/@fastify/ajv-compiler/test/serialization.test.js
generated
vendored
Normal file
279
node_modules/@fastify/ajv-compiler/test/serialization.test.js
generated
vendored
Normal file
@@ -0,0 +1,279 @@
|
||||
'use strict'
|
||||
|
||||
const t = require('tap')
|
||||
const fastify = require('fastify')
|
||||
const AjvCompiler = require('../index')
|
||||
|
||||
const jtdSchema = {
|
||||
discriminator: 'version',
|
||||
mapping: {
|
||||
1: {
|
||||
properties: {
|
||||
foo: { type: 'uint8' }
|
||||
}
|
||||
},
|
||||
2: {
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const externalSchemas1 = Object.freeze({})
|
||||
const externalSchemas2 = Object.freeze({
|
||||
foo: {
|
||||
definitions: {
|
||||
coordinates: {
|
||||
properties: {
|
||||
lat: { type: 'float32' },
|
||||
lng: { type: 'float32' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const fastifyAjvOptionsDefault = Object.freeze({
|
||||
customOptions: {}
|
||||
})
|
||||
|
||||
t.test('basic serializer usage', t => {
|
||||
t.plan(4)
|
||||
const factory = AjvCompiler({ jtdSerializer: true })
|
||||
const compiler = factory(externalSchemas1, fastifyAjvOptionsDefault)
|
||||
const serializeFunc = compiler({ schema: jtdSchema })
|
||||
t.equal(serializeFunc({ version: '1', foo: 42 }), '{"version":"1","foo":42}')
|
||||
t.equal(serializeFunc({ version: '2', foo: 'hello' }), '{"version":"2","foo":"hello"}')
|
||||
t.equal(serializeFunc({ version: '3', foo: 'hello' }), '{"version":"3"}')
|
||||
t.equal(serializeFunc({ version: '2', foo: ['not', 1, { string: 'string' }] }), '{"version":"2","foo":"not,1,[object Object]"}')
|
||||
})
|
||||
|
||||
t.test('external schemas are ignored', t => {
|
||||
t.plan(1)
|
||||
const factory = AjvCompiler({ jtdSerializer: true })
|
||||
const compiler = factory(externalSchemas2, fastifyAjvOptionsDefault)
|
||||
const serializeFunc = compiler({
|
||||
schema: {
|
||||
definitions: {
|
||||
coordinates: {
|
||||
properties: {
|
||||
lat: { type: 'float32' },
|
||||
lng: { type: 'float32' }
|
||||
}
|
||||
}
|
||||
},
|
||||
properties: {
|
||||
userLoc: { ref: 'coordinates' },
|
||||
serverLoc: { ref: 'coordinates' }
|
||||
}
|
||||
}
|
||||
})
|
||||
t.equal(serializeFunc(
|
||||
{ userLoc: { lat: 50, lng: -90 }, serverLoc: { lat: -15, lng: 50 } }),
|
||||
'{"userLoc":{"lat":50,"lng":-90},"serverLoc":{"lat":-15,"lng":50}}'
|
||||
)
|
||||
})
|
||||
|
||||
t.test('fastify integration within JTD serializer', async t => {
|
||||
const factoryValidator = AjvCompiler()
|
||||
const factorySerializer = AjvCompiler({ jtdSerializer: true })
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
ajv: {
|
||||
customOptions: { },
|
||||
mode: 'JTD'
|
||||
},
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factoryValidator,
|
||||
buildSerializer: factorySerializer
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.post('/', {
|
||||
schema: {
|
||||
body: jtdSchema,
|
||||
response: {
|
||||
200: {
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
createdAt: { type: 'timestamp' },
|
||||
karma: { type: 'int32' },
|
||||
isAdmin: { type: 'boolean' }
|
||||
}
|
||||
},
|
||||
400: jtdSchema
|
||||
}
|
||||
}
|
||||
}, async () => {
|
||||
return {
|
||||
id: '123',
|
||||
createdAt: new Date('1999-01-31T23:00:00.000Z'),
|
||||
karma: 42,
|
||||
isAdmin: true,
|
||||
remove: 'me'
|
||||
}
|
||||
})
|
||||
|
||||
{
|
||||
const res = await app.inject({
|
||||
url: '/',
|
||||
method: 'POST',
|
||||
payload: {
|
||||
version: '1',
|
||||
foo: 'not a number'
|
||||
}
|
||||
})
|
||||
|
||||
t.equal(res.statusCode, 400)
|
||||
t.same(res.json(), { version: 'undefined' })
|
||||
}
|
||||
|
||||
{
|
||||
const res = await app.inject({
|
||||
url: '/',
|
||||
method: 'POST',
|
||||
payload: {
|
||||
version: '1',
|
||||
foo: 32
|
||||
}
|
||||
})
|
||||
|
||||
t.equal(res.statusCode, 200)
|
||||
t.same(res.json(), {
|
||||
id: '123',
|
||||
createdAt: '1999-01-31T23:00:00.000Z',
|
||||
karma: 42,
|
||||
isAdmin: true
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.test('fastify integration and cached serializer', async t => {
|
||||
const factoryValidator = AjvCompiler()
|
||||
const factorySerializer = AjvCompiler({ jtdSerializer: true })
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
ajv: {
|
||||
customOptions: { },
|
||||
mode: 'JTD'
|
||||
},
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factoryValidator,
|
||||
buildSerializer: factorySerializer
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.register(async function plugin (app, opts) {
|
||||
app.post('/', {
|
||||
schema: {
|
||||
body: jtdSchema,
|
||||
response: {
|
||||
200: {
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
createdAt: { type: 'timestamp' },
|
||||
karma: { type: 'int32' },
|
||||
isAdmin: { type: 'boolean' }
|
||||
}
|
||||
},
|
||||
400: jtdSchema
|
||||
}
|
||||
}
|
||||
}, async () => {
|
||||
return {
|
||||
id: '123',
|
||||
createdAt: new Date('1999-01-31T23:00:00.000Z'),
|
||||
karma: 42,
|
||||
isAdmin: true,
|
||||
remove: 'me'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
app.register(async function plugin (app, opts) {
|
||||
app.post('/two', {
|
||||
schema: {
|
||||
body: jtdSchema,
|
||||
response: {
|
||||
400: jtdSchema
|
||||
}
|
||||
}
|
||||
}, () => {})
|
||||
})
|
||||
|
||||
{
|
||||
const res = await app.inject({
|
||||
url: '/',
|
||||
method: 'POST',
|
||||
payload: {
|
||||
version: '1',
|
||||
foo: 'not a number'
|
||||
}
|
||||
})
|
||||
|
||||
t.equal(res.statusCode, 400)
|
||||
t.same(res.json(), { version: 'undefined' })
|
||||
}
|
||||
|
||||
{
|
||||
const res = await app.inject({
|
||||
url: '/',
|
||||
method: 'POST',
|
||||
payload: {
|
||||
version: '1',
|
||||
foo: 32
|
||||
}
|
||||
})
|
||||
|
||||
t.equal(res.statusCode, 200)
|
||||
t.same(res.json(), {
|
||||
id: '123',
|
||||
createdAt: '1999-01-31T23:00:00.000Z',
|
||||
karma: 42,
|
||||
isAdmin: true
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.test('fastify integration within JTD serializer and custom options', async t => {
|
||||
const factorySerializer = AjvCompiler({ jtdSerializer: true })
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
serializerOpts: {
|
||||
allErrors: true,
|
||||
logger: 'wrong-value'
|
||||
},
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildSerializer: factorySerializer
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.post('/', {
|
||||
schema: {
|
||||
response: {
|
||||
200: {
|
||||
properties: {
|
||||
test: { type: 'boolean' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}, async () => { })
|
||||
|
||||
try {
|
||||
await app.ready()
|
||||
t.fail('should throw')
|
||||
} catch (error) {
|
||||
t.equal(error.message, 'logger must implement log, warn and error methods', 'the wrong setting is forwarded to ajv/jtd')
|
||||
}
|
||||
})
|
||||
203
node_modules/@fastify/ajv-compiler/test/standalone.test.js
generated
vendored
Normal file
203
node_modules/@fastify/ajv-compiler/test/standalone.test.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
const t = require('tap')
|
||||
const fastify = require('fastify')
|
||||
const sanitize = require('sanitize-filename')
|
||||
|
||||
const { StandaloneValidator: AjvStandaloneValidator } = require('../')
|
||||
|
||||
function generateFileName (routeOpts) {
|
||||
return `/ajv-generated-${sanitize(routeOpts.schema.$id)}-${routeOpts.method}-${routeOpts.httpPart}-${sanitize(routeOpts.url)}.js`
|
||||
}
|
||||
|
||||
const generatedFileNames = []
|
||||
|
||||
t.test('standalone', t => {
|
||||
t.plan(4)
|
||||
|
||||
t.teardown(async () => {
|
||||
for (const fileName of generatedFileNames) {
|
||||
await fs.promises.unlink(path.join(__dirname, fileName))
|
||||
}
|
||||
})
|
||||
|
||||
t.test('errors', t => {
|
||||
t.plan(2)
|
||||
t.throws(() => {
|
||||
AjvStandaloneValidator()
|
||||
}, 'missing restoreFunction')
|
||||
t.throws(() => {
|
||||
AjvStandaloneValidator({ readMode: false })
|
||||
}, 'missing storeFunction')
|
||||
})
|
||||
|
||||
t.test('generate standalone code', t => {
|
||||
t.plan(5)
|
||||
|
||||
const base = {
|
||||
$id: 'urn:schema:base',
|
||||
definitions: {
|
||||
hello: { type: 'string' }
|
||||
},
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: '#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const refSchema = {
|
||||
$id: 'urn:schema:ref',
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: 'urn:schema:base#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const endpointSchema = {
|
||||
schema: {
|
||||
$id: 'urn:schema:endpoint',
|
||||
$ref: 'urn:schema:ref'
|
||||
}
|
||||
}
|
||||
|
||||
const schemaMap = {
|
||||
[base.$id]: base,
|
||||
[refSchema.$id]: refSchema
|
||||
}
|
||||
|
||||
const factory = AjvStandaloneValidator({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaValidationCode) {
|
||||
t.same(routeOpts, endpointSchema)
|
||||
t.type(schemaValidationCode, 'string')
|
||||
fs.writeFileSync(path.join(__dirname, '/ajv-generated.js'), schemaValidationCode)
|
||||
generatedFileNames.push('/ajv-generated.js')
|
||||
t.pass('stored the validation function')
|
||||
}
|
||||
})
|
||||
|
||||
const compiler = factory(schemaMap)
|
||||
compiler(endpointSchema)
|
||||
t.pass('compiled the endpoint schema')
|
||||
|
||||
t.test('usage standalone code', t => {
|
||||
t.plan(3)
|
||||
const standaloneValidate = require('./ajv-generated')
|
||||
|
||||
const valid = standaloneValidate({ hello: 'world' })
|
||||
t.ok(valid)
|
||||
|
||||
const invalid = standaloneValidate({ hello: [] })
|
||||
t.notOk(invalid)
|
||||
|
||||
t.ok(standaloneValidate)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('fastify integration - writeMode', async t => {
|
||||
t.plan(6)
|
||||
|
||||
const factory = AjvStandaloneValidator({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaValidationCode) {
|
||||
const fileName = generateFileName(routeOpts)
|
||||
t.ok(routeOpts)
|
||||
fs.writeFileSync(path.join(__dirname, fileName), schemaValidationCode)
|
||||
t.pass('stored the validation function')
|
||||
generatedFileNames.push(fileName)
|
||||
},
|
||||
restoreFunction () {
|
||||
t.fail('write mode ON')
|
||||
}
|
||||
})
|
||||
|
||||
const app = buildApp(factory)
|
||||
await app.ready()
|
||||
})
|
||||
|
||||
t.test('fastify integration - readMode', async t => {
|
||||
t.plan(6)
|
||||
|
||||
const factory = AjvStandaloneValidator({
|
||||
readMode: true,
|
||||
storeFunction () {
|
||||
t.fail('read mode ON')
|
||||
},
|
||||
restoreFunction (routeOpts) {
|
||||
t.pass('restore the validation function')
|
||||
const fileName = generateFileName(routeOpts)
|
||||
return require(path.join(__dirname, fileName))
|
||||
}
|
||||
})
|
||||
|
||||
const app = buildApp(factory)
|
||||
await app.ready()
|
||||
|
||||
let res = await app.inject({
|
||||
url: '/foo',
|
||||
method: 'POST',
|
||||
payload: { hello: [] }
|
||||
})
|
||||
t.equal(res.statusCode, 400)
|
||||
|
||||
res = await app.inject({
|
||||
url: '/bar?lang=invalid',
|
||||
method: 'GET'
|
||||
})
|
||||
t.equal(res.statusCode, 400)
|
||||
|
||||
res = await app.inject({
|
||||
url: '/bar?lang=it',
|
||||
method: 'GET'
|
||||
})
|
||||
t.equal(res.statusCode, 200)
|
||||
})
|
||||
|
||||
function buildApp (factory) {
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildValidator: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.addSchema({
|
||||
$id: 'urn:schema:foo',
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string' },
|
||||
id: { type: 'integer' }
|
||||
}
|
||||
})
|
||||
|
||||
app.post('/foo', {
|
||||
schema: {
|
||||
body: {
|
||||
$id: 'urn:schema:body',
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: 'urn:schema:foo#/properties/name' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}, () => { return 'ok' })
|
||||
|
||||
app.get('/bar', {
|
||||
schema: {
|
||||
query: {
|
||||
$id: 'urn:schema:query',
|
||||
type: 'object',
|
||||
properties: {
|
||||
lang: { type: 'string', enum: ['it', 'en'] }
|
||||
}
|
||||
}
|
||||
}
|
||||
}, () => { return 'ok' })
|
||||
|
||||
return app
|
||||
}
|
||||
})
|
||||
72
node_modules/@fastify/ajv-compiler/types/index.d.ts
generated
vendored
Normal file
72
node_modules/@fastify/ajv-compiler/types/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
import _ajv, { AnySchema, Options as AjvOptions, ValidateFunction } from 'ajv'
|
||||
import AjvJTD, { JTDOptions } from 'ajv/dist/jtd'
|
||||
import type { Options, ErrorObject } from 'ajv'
|
||||
import { AnyValidateFunction } from 'ajv/dist/core'
|
||||
|
||||
type Ajv = _ajv
|
||||
type AjvSerializerGenerator = typeof AjvCompiler
|
||||
|
||||
type AjvJTDCompile = AjvJTD['compileSerializer']
|
||||
type AjvCompile = (schema: AnySchema, _meta?: boolean) => AnyValidateFunction
|
||||
|
||||
declare function buildCompilerFromPool (externalSchemas: { [key: string]: AnySchema | AnySchema[] }, options?: { mode: 'JTD'; customOptions?: JTDOptions; onCreate?: (ajvInstance: Ajv) => void }): AjvCompile
|
||||
declare function buildCompilerFromPool (externalSchemas: { [key: string]: AnySchema | AnySchema[] }, options?: { mode?: never; customOptions?: AjvOptions; onCreate?: (ajvInstance: Ajv) => void }): AjvCompile
|
||||
|
||||
declare function buildSerializerFromPool (externalSchemas: any, serializerOpts?: { mode?: never; } & JTDOptions): AjvJTDCompile
|
||||
|
||||
declare function AjvCompiler (opts: { jtdSerializer: true }): AjvCompiler.BuildSerializerFromPool
|
||||
declare function AjvCompiler (opts?: { jtdSerializer?: false }): AjvCompiler.BuildCompilerFromPool
|
||||
|
||||
declare function StandaloneValidator (options: AjvCompiler.StandaloneOptions): AjvCompiler.BuildCompilerFromPool
|
||||
|
||||
declare namespace AjvCompiler {
|
||||
export type { Options, ErrorObject }
|
||||
export { Ajv }
|
||||
|
||||
export type BuildSerializerFromPool = typeof buildSerializerFromPool
|
||||
|
||||
export type BuildCompilerFromPool = typeof buildCompilerFromPool
|
||||
|
||||
export const AjvReference: Symbol
|
||||
|
||||
export enum HttpParts {
|
||||
Body = 'body',
|
||||
Headers = 'headers',
|
||||
Params = 'params',
|
||||
Query = 'querystring',
|
||||
}
|
||||
|
||||
export type RouteDefinition = {
|
||||
method: string,
|
||||
url: string,
|
||||
httpPart: HttpParts,
|
||||
schema?: unknown,
|
||||
}
|
||||
|
||||
export type StandaloneRestoreFunction = (opts: RouteDefinition) => ValidateFunction
|
||||
|
||||
export type StandaloneStoreFunction = (opts: RouteDefinition, schemaValidationCode: string) => void
|
||||
|
||||
export type StandaloneOptionsReadModeOn = {
|
||||
readMode: true;
|
||||
restoreFunction?: StandaloneRestoreFunction
|
||||
}
|
||||
|
||||
export type StandaloneOptionsReadModeOff = {
|
||||
readMode?: false | undefined;
|
||||
storeFunction?: StandaloneStoreFunction;
|
||||
}
|
||||
|
||||
export type StandaloneOptions = StandaloneOptionsReadModeOn | StandaloneOptionsReadModeOff
|
||||
|
||||
export type ValidatorFactory = BuildCompilerFromPool | BuildSerializerFromPool
|
||||
|
||||
export type ValidatorCompiler = ReturnType<ValidatorFactory>
|
||||
|
||||
export { StandaloneValidator }
|
||||
|
||||
export const AjvCompiler: AjvSerializerGenerator
|
||||
export { AjvCompiler as default }
|
||||
}
|
||||
|
||||
export = AjvCompiler
|
||||
226
node_modules/@fastify/ajv-compiler/types/index.test-d.ts
generated
vendored
Normal file
226
node_modules/@fastify/ajv-compiler/types/index.test-d.ts
generated
vendored
Normal file
@@ -0,0 +1,226 @@
|
||||
import { AnySchemaObject, ValidateFunction } from 'ajv'
|
||||
import { AnyValidateFunction } from 'ajv/dist/core'
|
||||
import { expectAssignable, expectType } from 'tsd'
|
||||
import AjvCompiler, { AjvReference, ValidatorFactory, StandaloneValidator, RouteDefinition, ErrorObject, BuildCompilerFromPool, BuildSerializerFromPool, ValidatorCompiler } from '..'
|
||||
|
||||
{
|
||||
const compiler = AjvCompiler({})
|
||||
expectType<BuildCompilerFromPool>(compiler)
|
||||
}
|
||||
{
|
||||
const compiler = AjvCompiler()
|
||||
expectType<BuildCompilerFromPool>(compiler)
|
||||
}
|
||||
{
|
||||
const compiler = AjvCompiler({ jtdSerializer: false })
|
||||
expectType<BuildCompilerFromPool>(compiler)
|
||||
}
|
||||
|
||||
{
|
||||
const factory = AjvCompiler({ jtdSerializer: false })
|
||||
expectType<BuildCompilerFromPool>(factory)
|
||||
factory({}, {
|
||||
onCreate (ajv) {
|
||||
expectType<import('ajv').default>(ajv)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
{
|
||||
const compiler = AjvCompiler({ jtdSerializer: true })
|
||||
expectType<BuildSerializerFromPool>(compiler)
|
||||
}
|
||||
const reader = StandaloneValidator({
|
||||
readMode: true,
|
||||
restoreFunction: (route: RouteDefinition) => {
|
||||
expectAssignable<RouteDefinition>(route)
|
||||
return {} as ValidateFunction
|
||||
},
|
||||
})
|
||||
expectAssignable<ValidatorFactory>(reader)
|
||||
|
||||
const writer = StandaloneValidator({
|
||||
readMode: false,
|
||||
storeFunction: (route: RouteDefinition, code: string) => {
|
||||
expectAssignable<RouteDefinition>(route)
|
||||
expectAssignable<string>(code)
|
||||
},
|
||||
})
|
||||
expectAssignable<ValidatorFactory>(writer)
|
||||
|
||||
expectType<unknown>(({} as ErrorObject).data)
|
||||
expectType<string>(({} as ErrorObject).instancePath)
|
||||
expectType<string>(({} as ErrorObject).keyword)
|
||||
expectType<string | undefined>(({} as ErrorObject).message)
|
||||
expectType<Record<string, any>>(({} as ErrorObject).params)
|
||||
expectType<AnySchemaObject | undefined>(({} as ErrorObject).parentSchema)
|
||||
expectType<string | undefined>(({} as ErrorObject).propertyName)
|
||||
expectType<unknown>(({} as ErrorObject).schema)
|
||||
expectType<string>(({} as ErrorObject).schemaPath)
|
||||
|
||||
expectType<Symbol>(AjvReference)
|
||||
|
||||
{
|
||||
const jtdSchema = {
|
||||
discriminator: 'version',
|
||||
mapping: {
|
||||
1: {
|
||||
properties: {
|
||||
foo: { type: 'uint8' }
|
||||
}
|
||||
},
|
||||
2: {
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const externalSchemas1 = {
|
||||
foo: {
|
||||
definitions: {
|
||||
coordinates: {
|
||||
properties: {
|
||||
lat: { type: 'float32' },
|
||||
lng: { type: 'float32' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const factory = AjvCompiler({ jtdSerializer: true })
|
||||
expectType<BuildSerializerFromPool>(factory)
|
||||
const compiler = factory(externalSchemas1, {})
|
||||
expectAssignable<Function>(compiler)
|
||||
const serializeFunc = compiler({ schema: jtdSchema })
|
||||
expectType<(data: unknown) => string>(serializeFunc)
|
||||
expectType<string>(serializeFunc({ version: '1', foo: 42 }))
|
||||
}
|
||||
// JTD
|
||||
{
|
||||
const factory = AjvCompiler()
|
||||
expectType<BuildCompilerFromPool>(factory)
|
||||
|
||||
const jtdSchema = {
|
||||
discriminator: 'version',
|
||||
mapping: {
|
||||
1: {
|
||||
properties: {
|
||||
foo: { type: 'uint8' }
|
||||
}
|
||||
},
|
||||
2: {
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const compiler = factory({}, {
|
||||
customOptions: {},
|
||||
mode: 'JTD'
|
||||
})
|
||||
expectAssignable<ValidatorCompiler>(compiler)
|
||||
const validatorFunc = compiler({ schema: jtdSchema })
|
||||
expectAssignable<ValidateFunction>(validatorFunc)
|
||||
|
||||
expectType<boolean | Promise<any>>(validatorFunc({
|
||||
version: '2',
|
||||
foo: []
|
||||
}))
|
||||
}
|
||||
|
||||
// generate standalone code
|
||||
{
|
||||
const base = {
|
||||
$id: 'urn:schema:base',
|
||||
definitions: {
|
||||
hello: { type: 'string' }
|
||||
},
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: '#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const refSchema = {
|
||||
$id: 'urn:schema:ref',
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: 'urn:schema:base#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const endpointSchema = {
|
||||
schema: {
|
||||
$id: 'urn:schema:endpoint',
|
||||
$ref: 'urn:schema:ref'
|
||||
}
|
||||
}
|
||||
|
||||
const schemaMap = {
|
||||
[base.$id]: base,
|
||||
[refSchema.$id]: refSchema
|
||||
}
|
||||
|
||||
const factory = StandaloneValidator({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaValidationCode) {
|
||||
expectType<RouteDefinition>(routeOpts)
|
||||
expectType<string>(schemaValidationCode)
|
||||
}
|
||||
})
|
||||
expectAssignable<ValidatorFactory>(factory)
|
||||
|
||||
const compiler = factory(schemaMap)
|
||||
expectAssignable<ValidatorCompiler>(compiler)
|
||||
expectAssignable<Function>(compiler(endpointSchema))
|
||||
}
|
||||
|
||||
{
|
||||
const base = {
|
||||
$id: 'urn:schema:base',
|
||||
definitions: {
|
||||
hello: { type: 'string' }
|
||||
},
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: '#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const refSchema = {
|
||||
$id: 'urn:schema:ref',
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: 'urn:schema:base#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const endpointSchema = {
|
||||
schema: {
|
||||
$id: 'urn:schema:endpoint',
|
||||
$ref: 'urn:schema:ref'
|
||||
}
|
||||
}
|
||||
|
||||
const schemaMap = {
|
||||
[base.$id]: base,
|
||||
[refSchema.$id]: refSchema
|
||||
}
|
||||
const factory = StandaloneValidator({
|
||||
readMode: true,
|
||||
restoreFunction (routeOpts) {
|
||||
expectType<RouteDefinition>(routeOpts)
|
||||
return {} as ValidateFunction
|
||||
}
|
||||
})
|
||||
expectAssignable<ValidatorFactory>(factory)
|
||||
|
||||
const compiler = factory(schemaMap)
|
||||
expectAssignable<ValidatorCompiler>(compiler)
|
||||
expectType<AnyValidateFunction<any>>(compiler(endpointSchema))
|
||||
}
|
||||
2
node_modules/@fastify/error/.gitattributes
generated
vendored
Normal file
2
node_modules/@fastify/error/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Set default behavior to automatically convert line endings
|
||||
* text=auto eol=lf
|
||||
13
node_modules/@fastify/error/.github/dependabot.yml
generated
vendored
Normal file
13
node_modules/@fastify/error/.github/dependabot.yml
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
28
node_modules/@fastify/error/.github/workflows/ci.yml
generated
vendored
Normal file
28
node_modules/@fastify/error/.github/workflows/ci.yml
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
uses: fastify/workflows/.github/workflows/plugins-ci.yml@v5
|
||||
with:
|
||||
license-check: true
|
||||
lint: true
|
||||
21
node_modules/@fastify/error/LICENSE
generated
vendored
Normal file
21
node_modules/@fastify/error/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Fastify
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
140
node_modules/@fastify/error/README.md
generated
vendored
Normal file
140
node_modules/@fastify/error/README.md
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
# @fastify/error
|
||||
|
||||
[](https://github.com/fastify/fastify-error/actions/workflows/ci.yml)
|
||||
[](https://www.npmjs.com/package/@fastify/error)
|
||||
[](https://github.com/neostandard/neostandard)
|
||||
|
||||
A small utility, used by Fastify itself, for generating consistent error objects across your codebase and plugins.
|
||||
|
||||
### Install
|
||||
```
|
||||
npm i @fastify/error
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
The module exports a function that you can use for consistent error objects, it takes 4 parameters:
|
||||
|
||||
```js
|
||||
createError(code, message [, statusCode [, Base [, captureStackTrace]]])
|
||||
```
|
||||
|
||||
- `code` (`string`, required) - The error code, you can access it later with `error.code`. For consistency, we recommend prefixing plugin error codes with `FST_`
|
||||
- `message` (`string`, required) - The error message. You can also use interpolated strings for formatting the message.
|
||||
- `statusCode` (`number`, optional) - The status code that Fastify will use if the error is sent via HTTP.
|
||||
- `Base` (`ErrorConstructor`, optional) - The base error object that will be used. (eg `TypeError`, `RangeError`)
|
||||
- `captureStackTrace` (`boolean`, optional) - Whether to capture the stack trace or not.
|
||||
|
||||
```js
|
||||
const createError = require('@fastify/error')
|
||||
const CustomError = createError('ERROR_CODE', 'Hello')
|
||||
console.log(new CustomError()) // error.message => 'Hello'
|
||||
```
|
||||
|
||||
How to use an interpolated string:
|
||||
```js
|
||||
const createError = require('@fastify/error')
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s')
|
||||
console.log(new CustomError('world')) // error.message => 'Hello world'
|
||||
```
|
||||
|
||||
How to add cause:
|
||||
```js
|
||||
const createError = require('@fastify/error')
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s')
|
||||
console.log(new CustomError('world', {cause: new Error('cause')}))
|
||||
// error.message => 'Hello world'
|
||||
// error.cause => Error('cause')
|
||||
```
|
||||
|
||||
### TypeScript
|
||||
|
||||
It is possible to limit your error constructor with a generic type using TypeScript:
|
||||
|
||||
```ts
|
||||
const CustomError = createError<[string]>('ERROR_CODE', 'Hello %s')
|
||||
new CustomError('world')
|
||||
//@ts-expect-error
|
||||
new CustomError(1)
|
||||
```
|
||||
|
||||
### instanceof
|
||||
|
||||
All errors created with `createError` will be instances of the base error constructor you provided, or `Error` if none was provided.
|
||||
|
||||
```js
|
||||
const createError = require('@fastify/error')
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s', 500, TypeError)
|
||||
const customError = new CustomError('world')
|
||||
|
||||
console.log(customError instanceof CustomError) // true
|
||||
console.log(customError instanceof TypeError) // true
|
||||
console.log(customError instanceof Error) // true
|
||||
```
|
||||
|
||||
All instantiated errors are instances of the `FastifyError` class, which can be required directly from the module.
|
||||
|
||||
```js
|
||||
const { createError, FastifyError } = require('@fastify/error')
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s', 500, TypeError)
|
||||
const customError = new CustomError('world')
|
||||
|
||||
console.log(customError instanceof FastifyError) // true
|
||||
```
|
||||
|
||||
A `FastifyError` created by `createError` can extend another `FastifyError` while maintaining correct `instanceof` behavior.
|
||||
|
||||
```js
|
||||
const { createError, FastifyError } = require('@fastify/error')
|
||||
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s', 500, TypeError)
|
||||
const ChildCustomError = createError('CHILD_ERROR_CODE', 'Hello %s', 500, CustomError)
|
||||
|
||||
const customError = new ChildCustomError('world')
|
||||
|
||||
console.log(customError instanceof ChildCustomError) // true
|
||||
console.log(customError instanceof CustomError) // true
|
||||
console.log(customError instanceof FastifyError) // true
|
||||
console.log(customError instanceof TypeError) // true
|
||||
console.log(customError instanceof Error) // true
|
||||
```
|
||||
|
||||
If `fastify-error` is installed multiple times directly or as a transitive dependency, `instanceof` checks for errors created by `createError` will still work correctly across these installations, as long as their error codes (e.g., `FST_ERR_CUSTOM_ERROR`) are identical.
|
||||
|
||||
```js
|
||||
const { createError, FastifyError } = require('@fastify/error')
|
||||
|
||||
// CustomError from `@fastify/some-plugin` is created with `createError` and
|
||||
// has its own `@fastify/error` installation as dependency. CustomError has
|
||||
// FST_ERR_CUSTOM_ERROR as code.
|
||||
const { CustomError: CustomErrorFromPlugin } = require('@fastify/some-plugin')
|
||||
|
||||
const CustomError = createError('FST_ERR_CUSTOM_ERROR', 'Hello %s', 500)
|
||||
|
||||
const customError = new CustomError('world')
|
||||
const customErrorFromPlugin = new CustomErrorFromPlugin('world')
|
||||
|
||||
console.log(customError instanceof CustomError) // true
|
||||
console.log(customError instanceof CustomErrorFromPlugin) // true
|
||||
console.log(customErrorFromPlugin instanceof CustomError) // true
|
||||
console.log(customErrorFromPlugin instanceof CustomErrorFromPlugin) // true
|
||||
```
|
||||
|
||||
Changing the code of an instantiated Error will not change the result of the `instanceof` operator.
|
||||
|
||||
```js
|
||||
const { createError, FastifyError } = require('@fastify/error')
|
||||
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s', 500, TypeError)
|
||||
const AnotherCustomError = createError('ANOTHER_ERROR_CODE', 'Hello %s', 500, CustomError)
|
||||
|
||||
const customError = new CustomError('world')
|
||||
customError.code = 'ANOTHER_ERROR_CODE'
|
||||
|
||||
console.log(customError instanceof CustomError) // true
|
||||
console.log(customError instanceof AnotherCustomError) // false
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Licensed under [MIT](./LICENSE).
|
||||
9
node_modules/@fastify/error/benchmarks/create.js
generated
vendored
Normal file
9
node_modules/@fastify/error/benchmarks/create.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
const benchmark = require('benchmark')
|
||||
const createError = require('..')
|
||||
|
||||
new benchmark.Suite()
|
||||
.add('create FastifyError', function () { createError('CODE', 'Not available') }, { minSamples: 100 })
|
||||
.on('cycle', function onCycle (event) { console.log(String(event.target)) })
|
||||
.run({ async: false })
|
||||
18
node_modules/@fastify/error/benchmarks/instantiate.js
generated
vendored
Normal file
18
node_modules/@fastify/error/benchmarks/instantiate.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
'use strict'
|
||||
|
||||
const benchmark = require('benchmark')
|
||||
const createError = require('..')
|
||||
|
||||
const FastifyError = createError('CODE', 'Not available')
|
||||
const FastifyError1 = createError('CODE', 'Not %s available')
|
||||
const FastifyError2 = createError('CODE', 'Not %s available %s')
|
||||
|
||||
const cause = new Error('cause')
|
||||
new benchmark.Suite()
|
||||
.add('instantiate Error', function () { new Error() }, { minSamples: 100 }) // eslint-disable-line no-new
|
||||
.add('instantiate FastifyError', function () { new FastifyError() }, { minSamples: 100 }) // eslint-disable-line no-new
|
||||
.add('instantiate FastifyError arg 1', function () { new FastifyError1('q') }, { minSamples: 100 }) // eslint-disable-line no-new
|
||||
.add('instantiate FastifyError arg 2', function () { new FastifyError2('qq', 'ss') }, { minSamples: 100 }) // eslint-disable-line no-new
|
||||
.add('instantiate FastifyError cause', function () { new FastifyError2({ cause }) }, { minSamples: 100 }) // eslint-disable-line no-new
|
||||
.on('cycle', function onCycle (event) { console.log(String(event.target)) })
|
||||
.run({ async: false })
|
||||
13
node_modules/@fastify/error/benchmarks/no-stack.js
generated
vendored
Normal file
13
node_modules/@fastify/error/benchmarks/no-stack.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
'use strict'
|
||||
|
||||
const benchmark = require('benchmark')
|
||||
const createError = require('..')
|
||||
|
||||
const FastifyError = createError('CODE', 'Not available')
|
||||
Error.stackTraceLimit = 0
|
||||
|
||||
new benchmark.Suite()
|
||||
.add('no-stack instantiate Error', function () { new Error() }, { minSamples: 100 }) // eslint-disable-line no-new
|
||||
.add('no-stack instantiate FastifyError', function () { new FastifyError() }, { minSamples: 100 }) // eslint-disable-line no-new
|
||||
.on('cycle', function onCycle (event) { console.log(String(event.target)) })
|
||||
.run({ async: false })
|
||||
11
node_modules/@fastify/error/benchmarks/toString.js
generated
vendored
Normal file
11
node_modules/@fastify/error/benchmarks/toString.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const benchmark = require('benchmark')
|
||||
const createError = require('..')
|
||||
|
||||
const FastifyError = createError('CODE', 'Not available')
|
||||
|
||||
new benchmark.Suite()
|
||||
.add('FastifyError toString', function () { new FastifyError().toString() }, { minSamples: 100 })
|
||||
.on('cycle', function onCycle (event) { console.log(String(event.target)) })
|
||||
.run({ async: false })
|
||||
6
node_modules/@fastify/error/eslint.config.js
generated
vendored
Normal file
6
node_modules/@fastify/error/eslint.config.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('neostandard')({
|
||||
ignores: require('neostandard').resolveIgnoresFromGitignore(),
|
||||
ts: true
|
||||
})
|
||||
100
node_modules/@fastify/error/index.js
generated
vendored
Normal file
100
node_modules/@fastify/error/index.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
'use strict'
|
||||
|
||||
const { format } = require('node:util')
|
||||
|
||||
function toString () {
|
||||
return `${this.name} [${this.code}]: ${this.message}`
|
||||
}
|
||||
|
||||
const FastifyGenericErrorSymbol = Symbol.for('fastify-error-generic')
|
||||
|
||||
function createError (code, message, statusCode = 500, Base = Error, captureStackTrace = createError.captureStackTrace) {
|
||||
const shouldCreateFastifyGenericError = code === FastifyGenericErrorSymbol
|
||||
|
||||
if (shouldCreateFastifyGenericError) {
|
||||
code = 'FST_ERR'
|
||||
}
|
||||
|
||||
if (!code) throw new Error('Fastify error code must not be empty')
|
||||
if (!message) throw new Error('Fastify error message must not be empty')
|
||||
|
||||
code = code.toUpperCase()
|
||||
!statusCode && (statusCode = undefined)
|
||||
|
||||
const FastifySpecificErrorSymbol = Symbol.for(`fastify-error ${code}`)
|
||||
|
||||
function FastifyError (...args) {
|
||||
if (!new.target) {
|
||||
return new FastifyError(...args)
|
||||
}
|
||||
|
||||
this.code = code
|
||||
this.name = 'FastifyError'
|
||||
this.statusCode = statusCode
|
||||
|
||||
const lastElement = args.length - 1
|
||||
if (lastElement !== -1 && args[lastElement] && typeof args[lastElement] === 'object' && 'cause' in args[lastElement]) {
|
||||
this.cause = args.pop().cause
|
||||
}
|
||||
|
||||
this.message = format(message, ...args)
|
||||
|
||||
Error.stackTraceLimit && captureStackTrace && Error.captureStackTrace(this, FastifyError)
|
||||
}
|
||||
|
||||
FastifyError.prototype = Object.create(Base.prototype, {
|
||||
constructor: {
|
||||
value: FastifyError,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true
|
||||
},
|
||||
[FastifyGenericErrorSymbol]: {
|
||||
value: true,
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
configurable: false
|
||||
},
|
||||
[FastifySpecificErrorSymbol]: {
|
||||
value: true,
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
configurable: false
|
||||
}
|
||||
})
|
||||
|
||||
if (shouldCreateFastifyGenericError) {
|
||||
Object.defineProperty(FastifyError, Symbol.hasInstance, {
|
||||
value (instance) {
|
||||
return instance && instance[FastifyGenericErrorSymbol]
|
||||
},
|
||||
configurable: false,
|
||||
writable: false,
|
||||
enumerable: false
|
||||
})
|
||||
} else {
|
||||
Object.defineProperty(FastifyError, Symbol.hasInstance, {
|
||||
value (instance) {
|
||||
return instance && instance[FastifySpecificErrorSymbol]
|
||||
},
|
||||
configurable: false,
|
||||
writable: false,
|
||||
enumerable: false
|
||||
})
|
||||
}
|
||||
|
||||
FastifyError.prototype[Symbol.toStringTag] = 'Error'
|
||||
|
||||
FastifyError.prototype.toString = toString
|
||||
|
||||
return FastifyError
|
||||
}
|
||||
|
||||
createError.captureStackTrace = true
|
||||
|
||||
const FastifyErrorConstructor = createError(FastifyGenericErrorSymbol, 'Fastify Error', 500, Error)
|
||||
|
||||
module.exports = createError
|
||||
module.exports.FastifyError = FastifyErrorConstructor
|
||||
module.exports.default = createError
|
||||
module.exports.createError = createError
|
||||
75
node_modules/@fastify/error/package.json
generated
vendored
Normal file
75
node_modules/@fastify/error/package.json
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"name": "@fastify/error",
|
||||
"version": "4.2.0",
|
||||
"description": "A small utility, used by Fastify itself, for generating consistent error objects across your codebase and plugins.",
|
||||
"main": "index.js",
|
||||
"type": "commonjs",
|
||||
"types": "types/index.d.ts",
|
||||
"scripts": {
|
||||
"lint": "eslint",
|
||||
"lint:fix": "eslint --fix",
|
||||
"test": "npm run test:unit && npm run test:typescript",
|
||||
"test:unit": "c8 --100 node --test",
|
||||
"test:typescript": "tsd"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/fastify/fastify-error.git"
|
||||
},
|
||||
"keywords": [
|
||||
"fastify",
|
||||
"error",
|
||||
"utility",
|
||||
"plugin"
|
||||
],
|
||||
"author": "Tomas Della Vedova",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Matteo Collina",
|
||||
"email": "hello@matteocollina.com"
|
||||
},
|
||||
{
|
||||
"name": "James Sumners",
|
||||
"url": "https://james.sumners.info"
|
||||
},
|
||||
{
|
||||
"name": "Aras Abbasi",
|
||||
"email": "aras.abbasi@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Frazer Smith",
|
||||
"email": "frazer.dev@icloud.com",
|
||||
"url": "https://github.com/fdawgs"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/fastify/fastify-error/issues"
|
||||
},
|
||||
"homepage": "https://github.com/fastify/fastify-error#readme",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"devDependencies": {
|
||||
"benchmark": "^2.1.4",
|
||||
"c8": "^10.1.2",
|
||||
"eslint": "^9.17.0",
|
||||
"neostandard": "^0.12.0",
|
||||
"tsd": "^0.32.0"
|
||||
},
|
||||
"tsd": {
|
||||
"compilerOptions": {
|
||||
"esModuleInterop": true
|
||||
}
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
}
|
||||
}
|
||||
232
node_modules/@fastify/error/test/index.test.js
generated
vendored
Normal file
232
node_modules/@fastify/error/test/index.test.js
generated
vendored
Normal file
@@ -0,0 +1,232 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('node:test')
|
||||
const { createError, FastifyError } = require('..')
|
||||
|
||||
test('Create error with zero parameter', (t) => {
|
||||
t.plan(6)
|
||||
|
||||
const NewError = createError('CODE', 'Not available')
|
||||
const err = new NewError()
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.equal(err.message, 'Not available')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Create error with 1 parameter', (t) => {
|
||||
t.plan(6)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s')
|
||||
const err = new NewError('alice')
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.equal(err.message, 'hey alice')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Create error with 1 parameter set to undefined', (t) => {
|
||||
t.plan(1)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s')
|
||||
const err = new NewError(undefined)
|
||||
t.assert.equal(err.message, 'hey undefined')
|
||||
})
|
||||
|
||||
test('Create error with 2 parameters', (t) => {
|
||||
t.plan(6)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s, I like your %s')
|
||||
const err = new NewError('alice', 'attitude')
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.equal(err.message, 'hey alice, I like your attitude')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Create error with 2 parameters set to undefined', (t) => {
|
||||
t.plan(1)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s, I like your %s')
|
||||
const err = new NewError(undefined, undefined)
|
||||
t.assert.equal(err.message, 'hey undefined, I like your undefined')
|
||||
})
|
||||
|
||||
test('Create error with 3 parameters', (t) => {
|
||||
t.plan(6)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s, I like your %s %s')
|
||||
const err = new NewError('alice', 'attitude', 'see you')
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.equal(err.message, 'hey alice, I like your attitude see you')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Create error with 3 parameters set to undefined', (t) => {
|
||||
t.plan(4)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s, I like your %s %s')
|
||||
const err = new NewError(undefined, undefined, undefined)
|
||||
t.assert.equal(err.message, 'hey undefined, I like your undefined undefined')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Create error with 4 parameters set to undefined', (t) => {
|
||||
t.plan(4)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s, I like your %s %s and %s')
|
||||
const err = new NewError(undefined, undefined, undefined, undefined)
|
||||
t.assert.equal(
|
||||
err.message,
|
||||
'hey undefined, I like your undefined undefined and undefined'
|
||||
)
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Create error with no statusCode property', (t) => {
|
||||
t.plan(6)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s', 0)
|
||||
const err = new NewError('dude')
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.equal(err.message, 'hey dude')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, undefined)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Should throw when error code has no fastify code', (t) => {
|
||||
t.plan(1)
|
||||
t.assert.throws(
|
||||
() => createError(),
|
||||
new Error('Fastify error code must not be empty')
|
||||
)
|
||||
})
|
||||
|
||||
test('Should throw when error code has no message', (t) => {
|
||||
t.assert.throws(
|
||||
() => createError('code'),
|
||||
new Error('Fastify error message must not be empty')
|
||||
)
|
||||
})
|
||||
|
||||
test('Create error with different base', (t) => {
|
||||
t.plan(7)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s', 500, TypeError)
|
||||
const err = new NewError('dude')
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.ok(err instanceof TypeError)
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.equal(err.message, 'hey dude')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Create error with different base (no stack) (global)', (t) => {
|
||||
t.plan(7)
|
||||
|
||||
createError.captureStackTrace = false
|
||||
const NewError = createError('CODE', 'hey %s', 500, TypeError)
|
||||
const err = new NewError('dude')
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.ok(err instanceof TypeError)
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.equal(err.message, 'hey dude')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.equal(err.stack, undefined)
|
||||
createError.captureStackTrace = true
|
||||
})
|
||||
|
||||
test('Create error with different base (no stack) (parameter)', (t) => {
|
||||
t.plan(7)
|
||||
|
||||
const NewError = createError('CODE', 'hey %s', 500, TypeError, false)
|
||||
const err = new NewError('dude')
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.ok(err instanceof TypeError)
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.equal(err.message, 'hey dude')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.equal(err.stack, undefined)
|
||||
})
|
||||
|
||||
test('FastifyError.toString returns code', (t) => {
|
||||
t.plan(1)
|
||||
|
||||
const NewError = createError('CODE', 'foo')
|
||||
const err = new NewError()
|
||||
t.assert.equal(err.toString(), 'FastifyError [CODE]: foo')
|
||||
})
|
||||
|
||||
test('Create the error without the new keyword', (t) => {
|
||||
t.plan(6)
|
||||
|
||||
const NewError = createError('CODE', 'Not available')
|
||||
const err = NewError()
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.equal(err.name, 'FastifyError')
|
||||
t.assert.equal(err.message, 'Not available')
|
||||
t.assert.equal(err.code, 'CODE')
|
||||
t.assert.equal(err.statusCode, 500)
|
||||
t.assert.ok(err.stack)
|
||||
})
|
||||
|
||||
test('Create an error with cause', (t) => {
|
||||
t.plan(2)
|
||||
|
||||
const cause = new Error('HEY')
|
||||
const NewError = createError('CODE', 'Not available')
|
||||
const err = NewError({ cause })
|
||||
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.equal(err.cause, cause)
|
||||
})
|
||||
|
||||
test('Create an error with cause and message', (t) => {
|
||||
t.plan(2)
|
||||
|
||||
const cause = new Error('HEY')
|
||||
const NewError = createError('CODE', 'Not available: %s')
|
||||
const err = NewError('foo', { cause })
|
||||
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.equal(err.cause, cause)
|
||||
})
|
||||
|
||||
test('Create an error with last argument null', (t) => {
|
||||
t.plan(2)
|
||||
|
||||
const cause = new Error('HEY')
|
||||
const NewError = createError('CODE', 'Not available')
|
||||
const err = NewError({ cause }, null)
|
||||
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.ifError(err.cause)
|
||||
})
|
||||
|
||||
test('check if FastifyError is instantiable', (t) => {
|
||||
t.plan(2)
|
||||
|
||||
const err = new FastifyError()
|
||||
|
||||
t.assert.ok(err instanceof FastifyError)
|
||||
t.assert.ok(err instanceof Error)
|
||||
})
|
||||
263
node_modules/@fastify/error/test/instanceof.test.js
generated
vendored
Normal file
263
node_modules/@fastify/error/test/instanceof.test.js
generated
vendored
Normal file
@@ -0,0 +1,263 @@
|
||||
'use strict'
|
||||
|
||||
const cp = require('node:child_process')
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
const os = require('node:os')
|
||||
const test = require('node:test')
|
||||
const { createError, FastifyError } = require('..')
|
||||
|
||||
test('Readme: All errors created with `createError` will be instances of the base error constructor you provided, or `Error` if none was provided.', (t) => {
|
||||
t.plan(3)
|
||||
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s', 500, TypeError)
|
||||
const customError = new CustomError('world')
|
||||
|
||||
t.assert.ok(customError instanceof CustomError)
|
||||
t.assert.ok(customError instanceof TypeError)
|
||||
t.assert.ok(customError instanceof Error)
|
||||
})
|
||||
|
||||
test('Readme: All instantiated errors will be instances of the `FastifyError` class. The `FastifyError` class can be required from the module directly.', (t) => {
|
||||
t.plan(1)
|
||||
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s', 500, TypeError)
|
||||
const customError = new CustomError('world')
|
||||
|
||||
t.assert.ok(customError instanceof FastifyError)
|
||||
})
|
||||
|
||||
test('Readme: It is possible to create a `FastifyError` that extends another `FastifyError`, created by `createError`, while instanceof working correctly.', (t) => {
|
||||
t.plan(5)
|
||||
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s', 500, TypeError)
|
||||
const ChildCustomError = createError('CHILD_ERROR_CODE', 'Hello %s', 500, CustomError)
|
||||
|
||||
const customError = new ChildCustomError('world')
|
||||
|
||||
t.assert.ok(customError instanceof ChildCustomError)
|
||||
t.assert.ok(customError instanceof CustomError)
|
||||
t.assert.ok(customError instanceof FastifyError)
|
||||
t.assert.ok(customError instanceof TypeError)
|
||||
t.assert.ok(customError instanceof Error)
|
||||
})
|
||||
|
||||
test('Readme: Changing the code of an instantiated Error will not change the result of the `instanceof` operator.', (t) => {
|
||||
t.plan(3)
|
||||
|
||||
const CustomError = createError('ERROR_CODE', 'Hello %s', 500, TypeError)
|
||||
const AnotherCustomError = createError('ANOTHER_ERROR_CODE', 'Hello %s', 500, CustomError)
|
||||
|
||||
const customError = new CustomError('world')
|
||||
customError.code = 'ANOTHER_ERROR_CODE'
|
||||
|
||||
t.assert.ok(customError instanceof CustomError)
|
||||
t.assert.ok(customError instanceof AnotherCustomError === false)
|
||||
t.assert.ok(customError instanceof FastifyError)
|
||||
})
|
||||
|
||||
test('check if createError creates an Error which is instanceof Error', (t) => {
|
||||
t.plan(3)
|
||||
|
||||
const CustomFastifyError = createError('CODE', 'Not available')
|
||||
const err = CustomFastifyError()
|
||||
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.ok(err instanceof SyntaxError === false)
|
||||
t.assert.ok(err instanceof TypeError === false)
|
||||
})
|
||||
|
||||
test('check if createError creates an Error which is instanceof FastifyError', (t) => {
|
||||
t.plan(4)
|
||||
|
||||
const CustomFastifyError = createError('CODE', 'Not available')
|
||||
const err = CustomFastifyError()
|
||||
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.ok(err instanceof FastifyError)
|
||||
t.assert.ok(err instanceof SyntaxError === false)
|
||||
t.assert.ok(err instanceof TypeError === false)
|
||||
})
|
||||
|
||||
test('check if createError creates an Error with the right BaseConstructor', (t) => {
|
||||
t.plan(2)
|
||||
|
||||
const CustomFastifyError = createError('CODE', 'Not available', 500, TypeError)
|
||||
const err = CustomFastifyError()
|
||||
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.ok(err instanceof TypeError)
|
||||
})
|
||||
|
||||
test('check if createError creates an Error with the right BaseConstructor, which is a FastifyError', (t) => {
|
||||
t.plan(6)
|
||||
|
||||
const BaseFastifyError = createError('CODE', 'Not available', 500, TypeError)
|
||||
const CustomFastifyError = createError('CODE', 'Not available', 500, BaseFastifyError)
|
||||
const err = CustomFastifyError()
|
||||
|
||||
t.assert.ok(err instanceof Error)
|
||||
t.assert.ok(err instanceof TypeError)
|
||||
t.assert.ok(err instanceof FastifyError)
|
||||
t.assert.ok(err instanceof BaseFastifyError)
|
||||
t.assert.ok(err instanceof CustomFastifyError)
|
||||
t.assert.ok(err instanceof SyntaxError === false)
|
||||
})
|
||||
|
||||
// for more information see https://github.com/fastify/fastify-error/pull/86#issuecomment-1301466407
|
||||
test('ensure that instanceof works accross different installations of the fastify-error module', async (t) => {
|
||||
const assertsPlanned = 5
|
||||
t.plan(assertsPlanned)
|
||||
|
||||
// We need to create a test environment where fastify-error is installed in two different locations
|
||||
// and then we will check if the error created in one location is instanceof the error created in the other location
|
||||
// This is done by creating a test directory with the following structure:
|
||||
|
||||
// /
|
||||
// ├── index.js
|
||||
// └── node_modules/
|
||||
// ├── fastify-error/
|
||||
// │ └── index.js
|
||||
// └── dep/
|
||||
// ├── index.js
|
||||
// └── node_modules/
|
||||
// └── fastify-error/
|
||||
// └── index.js
|
||||
|
||||
const testDirectoryPrefix = 'fastify-error-instanceof-test-'
|
||||
|
||||
const testCwd = path.resolve(os.tmpdir(), `${testDirectoryPrefix}${Math.random().toString(36).substring(2, 15)}`)
|
||||
fs.mkdirSync(testCwd, { recursive: true })
|
||||
|
||||
// Create the index.js. It will be executed as a forked process, so we need to
|
||||
// use process.send to send messages back to the parent process.
|
||||
fs.writeFileSync(path.resolve(testCwd, 'index.js'), `
|
||||
'use strict'
|
||||
|
||||
const path = require('node:path')
|
||||
const { createError, FastifyError } = require('fastify-error')
|
||||
const { foo } = require('dep')
|
||||
|
||||
const actualPathOfFastifyError = require.resolve('fastify-error')
|
||||
const expectedPathOfFastifyError = path.resolve('node_modules', 'fastify-error', 'index.js')
|
||||
|
||||
// Ensure that fastify-error is required from the node_modules directory of the test-project
|
||||
if (actualPathOfFastifyError !== expectedPathOfFastifyError) {
|
||||
console.error('actualPathOfFastifyError', actualPathOfFastifyError)
|
||||
console.error('expectedPathOfFastifyError', expectedPathOfFastifyError)
|
||||
throw new Error('fastify-error should be required from the node_modules directory of the test-project')
|
||||
}
|
||||
|
||||
const Boom = createError('Boom', 'Boom', 500)
|
||||
const ChildBoom = createError('ChildBoom', 'Boom', 500, Boom)
|
||||
const NotChildBoom = createError('NotChildBoom', 'NotChildBoom', 500, Boom)
|
||||
|
||||
try {
|
||||
foo()
|
||||
} catch (err) {
|
||||
process.send(err instanceof Error)
|
||||
process.send(err instanceof FastifyError)
|
||||
process.send(err instanceof NotChildBoom)
|
||||
process.send(err instanceof Boom)
|
||||
process.send(err instanceof ChildBoom)
|
||||
}
|
||||
`)
|
||||
|
||||
// Create /node_modules/fastify-error directory
|
||||
// Copy the index.js file to the fastify-error directory
|
||||
fs.mkdirSync(path.resolve(testCwd, 'node_modules', 'fastify-error'), { recursive: true })
|
||||
fs.copyFileSync(path.resolve(process.cwd(), 'index.js'), path.resolve(testCwd, 'node_modules', 'fastify-error', 'index.js'))
|
||||
|
||||
// Create /node_modules/dep/node_modules/fastify-error directory
|
||||
// Copy the index.js to the fastify-error directory
|
||||
fs.mkdirSync(path.resolve(testCwd, 'node_modules', 'dep', 'node_modules', 'fastify-error'), { recursive: true })
|
||||
fs.copyFileSync(path.resolve(process.cwd(), 'index.js'), path.resolve(testCwd, 'node_modules', 'dep', 'node_modules', 'fastify-error', 'index.js'))
|
||||
|
||||
// Create /node_modules/dep/index.js. It will export a function foo which will
|
||||
// throw an error when called. The error will be an instance of ChildBoom, created
|
||||
// by the fastify-error module in the node_modules directory of dep.
|
||||
fs.writeFileSync(path.resolve(testCwd, 'node_modules', 'dep', 'index.js'), `
|
||||
'use strict'
|
||||
|
||||
const path = require('node:path')
|
||||
const { createError } = require('fastify-error')
|
||||
|
||||
const actualPathOfFastifyError = require.resolve('fastify-error')
|
||||
const expectedPathOfFastifyError = path.resolve('node_modules', 'dep', 'node_modules', 'fastify-error', 'index.js')
|
||||
|
||||
// Ensure that fastify-error is required from the node_modules directory of the test-project
|
||||
if (actualPathOfFastifyError !== expectedPathOfFastifyError) {
|
||||
console.error('actualPathOfFastifyError', actualPathOfFastifyError)
|
||||
console.error('expectedPathOfFastifyError', expectedPathOfFastifyError)
|
||||
throw new Error('fastify-error should be required from the node_modules directory of dep')
|
||||
}
|
||||
|
||||
const Boom = createError('Boom', 'Boom', 500)
|
||||
const ChildBoom = createError('ChildBoom', 'Boom', 500, Boom)
|
||||
|
||||
module.exports.foo = function foo () {
|
||||
throw new ChildBoom('foo go Boom')
|
||||
}
|
||||
`)
|
||||
|
||||
const finishedPromise = {
|
||||
promise: undefined,
|
||||
reject: undefined,
|
||||
resolve: undefined,
|
||||
}
|
||||
|
||||
finishedPromise.promise = new Promise((resolve, reject) => {
|
||||
finishedPromise.resolve = resolve
|
||||
finishedPromise.reject = reject
|
||||
})
|
||||
|
||||
const child = cp.fork(path.resolve(testCwd, 'index.js'), {
|
||||
cwd: testCwd,
|
||||
stdio: 'inherit',
|
||||
env: {
|
||||
...process.env,
|
||||
NODE_OPTIONS: '--no-warnings'
|
||||
},
|
||||
})
|
||||
|
||||
let messageCount = 0
|
||||
child.on('message', message => {
|
||||
try {
|
||||
switch (messageCount) {
|
||||
case 0:
|
||||
t.assert.strictEqual(message, true, 'instanceof Error')
|
||||
break
|
||||
case 1:
|
||||
t.assert.strictEqual(message, true, 'instanceof FastifyError')
|
||||
break
|
||||
case 2:
|
||||
t.assert.strictEqual(message, false, 'instanceof NotChildBoom')
|
||||
break
|
||||
case 3:
|
||||
t.assert.strictEqual(message, true, 'instanceof Boom')
|
||||
break
|
||||
case 4:
|
||||
t.assert.strictEqual(message, true, 'instanceof ChildBoom')
|
||||
break
|
||||
}
|
||||
if (++messageCount === assertsPlanned) {
|
||||
finishedPromise.resolve()
|
||||
}
|
||||
} catch (err) {
|
||||
finishedPromise.reject(err)
|
||||
}
|
||||
})
|
||||
|
||||
child.on('error', err => {
|
||||
finishedPromise.reject(err)
|
||||
})
|
||||
|
||||
await finishedPromise.promise
|
||||
|
||||
// Cleanup
|
||||
// As we are creating the test-setup on the fly in the /tmp directory, we can remove it
|
||||
// safely when we are done. It is not relevant for the test if the deletion fails.
|
||||
try {
|
||||
fs.rmSync(testCwd, { recursive: true, force: true })
|
||||
} catch {}
|
||||
})
|
||||
49
node_modules/@fastify/error/types/index.d.ts
generated
vendored
Normal file
49
node_modules/@fastify/error/types/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
declare function createError<C extends string, SC extends number, Arg extends unknown[] = [any?, any?, any?]> (
|
||||
code: C,
|
||||
message: string,
|
||||
statusCode: SC,
|
||||
Base?: ErrorConstructor,
|
||||
captureStackTrace?: boolean
|
||||
): createError.FastifyErrorConstructor<{ code: C, statusCode: SC }, Arg>
|
||||
|
||||
declare function createError<C extends string, Arg extends unknown[] = [any?, any?, any?]> (
|
||||
code: C,
|
||||
message: string,
|
||||
statusCode?: number,
|
||||
Base?: ErrorConstructor,
|
||||
captureStackTrace?: boolean
|
||||
): createError.FastifyErrorConstructor<{ code: C }, Arg>
|
||||
|
||||
declare function createError<Arg extends unknown[] = [any?, any?, any?]> (
|
||||
code: string,
|
||||
message: string,
|
||||
statusCode?: number,
|
||||
Base?: ErrorConstructor,
|
||||
captureStackTrace?: boolean
|
||||
): createError.FastifyErrorConstructor<{ code: string }, Arg>
|
||||
|
||||
type CreateError = typeof createError
|
||||
|
||||
declare namespace createError {
|
||||
export interface FastifyError extends Error {
|
||||
code: string
|
||||
name: string
|
||||
statusCode?: number
|
||||
}
|
||||
|
||||
export interface FastifyErrorConstructor<
|
||||
E extends { code: string, statusCode?: number } = { code: string, statusCode?: number },
|
||||
T extends unknown[] = [any?, any?, any?]
|
||||
> {
|
||||
new(...arg: T): FastifyError & E
|
||||
(...arg: T): FastifyError & E
|
||||
readonly prototype: FastifyError & E
|
||||
}
|
||||
|
||||
export const FastifyError: FastifyErrorConstructor
|
||||
|
||||
export const createError: CreateError
|
||||
export { createError as default }
|
||||
}
|
||||
|
||||
export = createError
|
||||
92
node_modules/@fastify/error/types/index.test-d.ts
generated
vendored
Normal file
92
node_modules/@fastify/error/types/index.test-d.ts
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
import createError, { FastifyError, FastifyErrorConstructor } from '..'
|
||||
import { expectType, expectError } from 'tsd'
|
||||
|
||||
const CustomError = createError('ERROR_CODE', 'message')
|
||||
expectType<FastifyErrorConstructor<{ code: 'ERROR_CODE' }>>(CustomError)
|
||||
const err = new CustomError()
|
||||
expectType<FastifyError & { code: 'ERROR_CODE' }>(err)
|
||||
expectType<'ERROR_CODE'>(err.code)
|
||||
expectType<string>(err.message)
|
||||
expectType<number | undefined>(err.statusCode)
|
||||
|
||||
const CustomErrorNoStackTrace = createError('ERROR_CODE', 'message', undefined, undefined, false)
|
||||
expectType<FastifyErrorConstructor<{ code: 'ERROR_CODE' }>>(CustomErrorNoStackTrace)
|
||||
const errNoStackTrace = new CustomErrorNoStackTrace()
|
||||
expectType<FastifyError & { code: 'ERROR_CODE' }>(errNoStackTrace)
|
||||
expectType<'ERROR_CODE'>(errNoStackTrace.code)
|
||||
expectType<string>(errNoStackTrace.message)
|
||||
expectType<number | undefined>(errNoStackTrace.statusCode)
|
||||
|
||||
const CustomTypedError = createError('OTHER_CODE', 'message', 400)
|
||||
expectType<FastifyErrorConstructor<{ code: 'OTHER_CODE', statusCode: 400 }>>(CustomTypedError)
|
||||
const typed = new CustomTypedError()
|
||||
expectType<FastifyError & { code: 'OTHER_CODE', statusCode: 400 }>(typed)
|
||||
expectType<'OTHER_CODE'>(typed.code)
|
||||
expectType<string>(typed.message)
|
||||
expectType<400>(typed.statusCode)
|
||||
|
||||
/* eslint-disable no-new */
|
||||
const CustomTypedArgError = createError<[string]>('OTHER_CODE', 'expect %s message', 400)
|
||||
CustomTypedArgError('a')
|
||||
expectError(CustomTypedArgError('a', 'b'))
|
||||
expectError(new CustomTypedArgError('a', 'b'))
|
||||
expectError(CustomTypedArgError(1))
|
||||
expectError(new CustomTypedArgError(1))
|
||||
|
||||
const CustomTypedArgError2 = createError<string, number, [string]>('OTHER_CODE', 'expect %s message', 400)
|
||||
CustomTypedArgError2('a')
|
||||
expectError(CustomTypedArgError2('a', 'b'))
|
||||
expectError(new CustomTypedArgError2('a', 'b'))
|
||||
expectError(CustomTypedArgError2(1))
|
||||
expectError(new CustomTypedArgError2(1))
|
||||
|
||||
const CustomTypedArgError3 = createError<string, number, [string, string]>('OTHER_CODE', 'expect %s message but got %s', 400)
|
||||
expectError(CustomTypedArgError3('a'))
|
||||
CustomTypedArgError3('a', 'b')
|
||||
new CustomTypedArgError3('a', 'b')
|
||||
expectError(CustomTypedArgError3(1))
|
||||
expectError(new CustomTypedArgError3(1))
|
||||
expectError(new CustomTypedArgError3(1, 2))
|
||||
expectError(new CustomTypedArgError3('1', 2))
|
||||
expectError(new CustomTypedArgError3(1, '2'))
|
||||
|
||||
const CustomTypedArgError4 = createError<string, number, [string, string]>('OTHER_CODE', 'expect %s message but got %s', 400)
|
||||
expectError(CustomTypedArgError4('a'))
|
||||
CustomTypedArgError4('a', 'b')
|
||||
new CustomTypedArgError4('a', 'b')
|
||||
expectError(CustomTypedArgError4(1))
|
||||
expectError(new CustomTypedArgError4(1))
|
||||
expectError(new CustomTypedArgError4(1, 2))
|
||||
expectError(new CustomTypedArgError4('1', 2))
|
||||
expectError(new CustomTypedArgError4(1, '2'))
|
||||
|
||||
const CustomTypedArgError5 = createError<[string, string, string, string]>('OTHER_CODE', 'expect %s message but got %s. Please contact %s by emailing to %s', 400)
|
||||
expectError(CustomTypedArgError5('a'))
|
||||
expectError(new CustomTypedArgError5('a', 'b'))
|
||||
expectError(new CustomTypedArgError5('a', 'b', 'c'))
|
||||
CustomTypedArgError5('a', 'b', 'c', 'd')
|
||||
expectError(new CustomTypedArgError5('a', 'b', 'c', 'd', 'e'))
|
||||
|
||||
const CustomTypedArgError6 = createError<string, number, [string, string, string, string]>('OTHER_CODE', 'expect %s message but got %s. Please contact %s by emailing to %s', 400)
|
||||
expectError(CustomTypedArgError6('a'))
|
||||
expectError(new CustomTypedArgError6('a', 'b'))
|
||||
expectError(new CustomTypedArgError6('a', 'b', 'c'))
|
||||
CustomTypedArgError6('a', 'b', 'c', 'd')
|
||||
expectError(new CustomTypedArgError6('a', 'b', 'c', 'd', 'e'))
|
||||
|
||||
const CustomErrorWithErrorConstructor = createError('ERROR_CODE', 'message', 500, TypeError)
|
||||
expectType<FastifyErrorConstructor<{ code: 'ERROR_CODE', statusCode: 500 }>>(CustomErrorWithErrorConstructor)
|
||||
CustomErrorWithErrorConstructor({ cause: new Error('Error') })
|
||||
const customErrorWithErrorConstructor = CustomErrorWithErrorConstructor()
|
||||
if (customErrorWithErrorConstructor instanceof FastifyError) {
|
||||
expectType<'ERROR_CODE'>(customErrorWithErrorConstructor.code)
|
||||
expectType<string>(customErrorWithErrorConstructor.message)
|
||||
expectType<500>(customErrorWithErrorConstructor.statusCode)
|
||||
}
|
||||
|
||||
const error = new FastifyError('ERROR_CODE', 'message', 500)
|
||||
if (error instanceof FastifyError) {
|
||||
expectType<string>(error.code)
|
||||
expectType<string>(error.message)
|
||||
expectType<number | undefined>(error.statusCode)
|
||||
}
|
||||
1
node_modules/@fastify/fast-json-stringify-compiler/.eslintrc
generated
vendored
Normal file
1
node_modules/@fastify/fast-json-stringify-compiler/.eslintrc
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"extends": "standard"}
|
||||
2
node_modules/@fastify/fast-json-stringify-compiler/.gitattributes
generated
vendored
Normal file
2
node_modules/@fastify/fast-json-stringify-compiler/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Set default behavior to automatically convert line endings
|
||||
* text=auto eol=lf
|
||||
13
node_modules/@fastify/fast-json-stringify-compiler/.github/dependabot.yml
generated
vendored
Normal file
13
node_modules/@fastify/fast-json-stringify-compiler/.github/dependabot.yml
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
28
node_modules/@fastify/fast-json-stringify-compiler/.github/workflows/ci.yml
generated
vendored
Normal file
28
node_modules/@fastify/fast-json-stringify-compiler/.github/workflows/ci.yml
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
uses: fastify/workflows/.github/workflows/plugins-ci.yml@v5
|
||||
with:
|
||||
license-check: true
|
||||
lint: true
|
||||
21
node_modules/@fastify/fast-json-stringify-compiler/LICENSE
generated
vendored
Normal file
21
node_modules/@fastify/fast-json-stringify-compiler/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 Fastify
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
128
node_modules/@fastify/fast-json-stringify-compiler/README.md
generated
vendored
Normal file
128
node_modules/@fastify/fast-json-stringify-compiler/README.md
generated
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
# @fastify/fast-json-stringify-compiler
|
||||
|
||||
[](https://github.com/fastify/fast-json-stringify-compiler/actions/workflows/ci.yml)
|
||||
[](https://www.npmjs.com/package/@fastify/fast-json-stringify-compiler)
|
||||
[](https://github.com/neostandard/neostandard)
|
||||
|
||||
Build and manage the [`fast-json-stringify`](https://www.npmjs.com/package/fast-json-stringify) instances for the Fastify framework.
|
||||
This package is responsible for compiling the application's `response` JSON schemas into optimized functions to speed up the response time.
|
||||
|
||||
## Versions
|
||||
|
||||
| `@fastify/fast-json-stringify-compiler` | `fast-json-stringify` | Supported `fastify` |
|
||||
|----------------------------------------:|----------------------:|--------------------:|
|
||||
| v1.x | v3.x | ^3.x |
|
||||
| v2.x | v3.x | ^4.x |
|
||||
| v3.x | v4.x | ^4.x |
|
||||
| v4.x | v5.x | ^5.x |
|
||||
|
||||
### fast-json-stringify Configuration
|
||||
|
||||
The `fast-json-stringify` configuration is the default one. You can check the default settings in the [`fast-json-stringify` option](https://github.com/fastify/fast-json-stringify/#options) documentation.
|
||||
|
||||
You can also override the default configuration by passing the [`serializerOpts`](https://fastify.dev/docs/latest/Reference/Server/#serializeropts) configuration to the Fastify instance.
|
||||
|
||||
## Usage
|
||||
|
||||
This module is already used as default by Fastify.
|
||||
If you need to provide to your server instance a different version, refer to [the official doc](https://fastify.dev/docs/latest/Reference/Server/#schemacontroller).
|
||||
|
||||
### fast-json-stringify Standalone
|
||||
|
||||
`fast-json-stringify@v4.1.0` introduces the [standalone feature](https://github.com/fastify/fast-json-stringify#standalone) that lets you pre-compile your schemas and use them in your application for a faster startup.
|
||||
|
||||
To use this feature, you must be aware of the following:
|
||||
|
||||
1. You must generate and save the application's compiled schemas.
|
||||
2. Read the compiled schemas from the file and provide them back to your Fastify application.
|
||||
|
||||
|
||||
#### Generate and save the compiled schemas
|
||||
|
||||
Fastify helps you to generate the serialization schemas functions and it is your choice to save them where you want.
|
||||
To accomplish this, you must use a new compiler: `@fastify/fast-json-stringify-compiler/standalone`.
|
||||
|
||||
You must provide 2 parameters to this compiler:
|
||||
|
||||
- `readMode: false`: a boolean to indicate that you want to generate the schemas functions string.
|
||||
- `storeFunction`" a sync function that must store the source code of the schemas functions. You may provide an async function too, but you must manage errors.
|
||||
|
||||
When `readMode: false`, **the compiler is meant to be used in development ONLY**.
|
||||
|
||||
|
||||
```js
|
||||
const { StandaloneSerializer } = require('@fastify/fast-json-stringify-compiler')
|
||||
|
||||
const factory = StandaloneSerializer({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaSerializationCode) {
|
||||
// routeOpts is like: { schema, method, url, httpStatus }
|
||||
// schemaSerializationCode is a string source code that is the compiled schema function
|
||||
const fileName = generateFileName(routeOpts)
|
||||
fs.writeFileSync(path.join(__dirname, fileName), schemaSerializationCode)
|
||||
}
|
||||
})
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildSerializer: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// ... add all your routes with schemas ...
|
||||
|
||||
app.ready().then(() => {
|
||||
// at this stage all your schemas are compiled and stored in the file system
|
||||
// now it is important to turn off the readMode
|
||||
})
|
||||
```
|
||||
|
||||
#### Read the compiled schemas functions
|
||||
|
||||
At this stage, you should have a file for every route's schema.
|
||||
To use them, you must use the `@fastify/fast-json-stringify-compiler/standalone` with the parameters:
|
||||
|
||||
- `readMode: true`: a boolean to indicate that you want to read and use the schemas functions string.
|
||||
- `restoreFunction`" a sync function that must return a function to serialize the route's payload.
|
||||
|
||||
Important keep away before you continue reading the documentation:
|
||||
|
||||
- when you use the `readMode: true`, the application schemas are not compiled (they are ignored). So, if you change your schemas, you must recompile them!
|
||||
- as you can see, you must relate the route's schema to the file name using the `routeOpts` object. You may use the `routeOpts.schema.$id` field to do so, it is up to you to define a unique schema identifier.
|
||||
|
||||
```js
|
||||
const { StandaloneSerializer } = require('@fastify/fast-json-stringify-compiler')
|
||||
|
||||
const factory = StandaloneSerializer({
|
||||
readMode: true,
|
||||
restoreFunction (routeOpts) {
|
||||
// routeOpts is like: { schema, method, url, httpStatus }
|
||||
const fileName = generateFileName(routeOpts)
|
||||
return require(path.join(__dirname, fileName))
|
||||
}
|
||||
})
|
||||
|
||||
const app = fastify({
|
||||
jsonShorthand: false,
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildSerializer: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// ... add all your routes with schemas as before...
|
||||
|
||||
app.listen({ port: 3000 })
|
||||
```
|
||||
|
||||
### How it works
|
||||
|
||||
This module provides a factory function to produce [Serializer Compilers](https://fastify.dev/docs/latest/Reference/Server/#serializercompiler) functions.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under [MIT](./LICENSE).
|
||||
6
node_modules/@fastify/fast-json-stringify-compiler/eslint.config.js
generated
vendored
Normal file
6
node_modules/@fastify/fast-json-stringify-compiler/eslint.config.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('neostandard')({
|
||||
ignores: require('neostandard').resolveIgnoresFromGitignore(),
|
||||
ts: true
|
||||
})
|
||||
8
node_modules/@fastify/fast-json-stringify-compiler/index.js
generated
vendored
Normal file
8
node_modules/@fastify/fast-json-stringify-compiler/index.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
const { SerializerSelector, StandaloneSerializer } = require('./standalone')
|
||||
|
||||
module.exports = SerializerSelector
|
||||
module.exports.default = SerializerSelector
|
||||
module.exports.SerializerSelector = SerializerSelector
|
||||
module.exports.StandaloneSerializer = StandaloneSerializer
|
||||
71
node_modules/@fastify/fast-json-stringify-compiler/package.json
generated
vendored
Normal file
71
node_modules/@fastify/fast-json-stringify-compiler/package.json
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"name": "@fastify/fast-json-stringify-compiler",
|
||||
"description": "Build and manage the fast-json-stringify instances for the fastify framework",
|
||||
"version": "5.0.3",
|
||||
"main": "index.js",
|
||||
"type": "commonjs",
|
||||
"types": "types/index.d.ts",
|
||||
"scripts": {
|
||||
"lint": "eslint",
|
||||
"lint:fix": "eslint --fix",
|
||||
"unit": "c8 --100 node --test",
|
||||
"test": "npm run unit && npm run test:typescript",
|
||||
"test:typescript": "tsd"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/fastify/fast-json-stringify-compiler.git"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Manuel Spigolon <manuel.spigolon@nearform.com> (https://github.com/Eomm)",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Matteo Collina",
|
||||
"email": "hello@matteocollina.com"
|
||||
},
|
||||
{
|
||||
"name": "Aras Abbasi",
|
||||
"email": "aras.abbasi@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "James Sumners",
|
||||
"url": "https://james.sumners.info"
|
||||
},
|
||||
{
|
||||
"name": "Frazer Smith",
|
||||
"email": "frazer.dev@icloud.com",
|
||||
"url": "https://github.com/fdawgs"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/fastify/fast-json-stringify-compiler/issues"
|
||||
},
|
||||
"homepage": "https://github.com/fastify/fast-json-stringify-compiler#readme",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"devDependencies": {
|
||||
"@fastify/pre-commit": "^2.1.0",
|
||||
"c8": "^10.1.3",
|
||||
"eslint": "^9.17.0",
|
||||
"fastify": "^5.0.0",
|
||||
"neostandard": "^0.12.0",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"tsd": "^0.31.0"
|
||||
},
|
||||
"pre-commit": [
|
||||
"lint",
|
||||
"test"
|
||||
],
|
||||
"dependencies": {
|
||||
"fast-json-stringify": "^6.0.0"
|
||||
}
|
||||
}
|
||||
58
node_modules/@fastify/fast-json-stringify-compiler/standalone.js
generated
vendored
Normal file
58
node_modules/@fastify/fast-json-stringify-compiler/standalone.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
'use strict'
|
||||
|
||||
const fastJsonStringify = require('fast-json-stringify')
|
||||
|
||||
function SerializerSelector () {
|
||||
return function buildSerializerFactory (externalSchemas, serializerOpts) {
|
||||
const fjsOpts = Object.assign({}, serializerOpts, { schema: externalSchemas })
|
||||
return responseSchemaCompiler.bind(null, fjsOpts)
|
||||
}
|
||||
}
|
||||
|
||||
function responseSchemaCompiler (fjsOpts, { schema /* method, url, httpStatus */ }) {
|
||||
if (fjsOpts.schema && schema.$id && fjsOpts.schema[schema.$id]) {
|
||||
fjsOpts.schema = { ...fjsOpts.schema }
|
||||
delete fjsOpts.schema[schema.$id]
|
||||
}
|
||||
return fastJsonStringify(schema, fjsOpts)
|
||||
}
|
||||
|
||||
function StandaloneSerializer (options = { readMode: true }) {
|
||||
if (options.readMode === true && typeof options.restoreFunction !== 'function') {
|
||||
throw new Error('You must provide a function for the restoreFunction-option when readMode ON')
|
||||
}
|
||||
|
||||
if (options.readMode !== true && typeof options.storeFunction !== 'function') {
|
||||
throw new Error('You must provide a function for the storeFunction-option when readMode OFF')
|
||||
}
|
||||
|
||||
if (options.readMode === true) {
|
||||
// READ MODE: it behalf only in the restore function provided by the user
|
||||
return function wrapper () {
|
||||
return function (opts) {
|
||||
return options.restoreFunction(opts)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WRITE MODE: it behalf on the default SerializerSelector, wrapping the API to run the Ajv Standalone code generation
|
||||
const factory = SerializerSelector()
|
||||
return function wrapper (externalSchemas, serializerOpts = {}) {
|
||||
// to generate the serialization source code, this option is mandatory
|
||||
serializerOpts.mode = 'standalone'
|
||||
|
||||
const compiler = factory(externalSchemas, serializerOpts)
|
||||
return function (opts) { // { schema/*, method, url, httpPart */ }
|
||||
const serializeFuncCode = compiler(opts)
|
||||
|
||||
options.storeFunction(opts, serializeFuncCode)
|
||||
|
||||
// eslint-disable-next-line no-new-func
|
||||
return new Function(serializeFuncCode)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.SerializerSelector = SerializerSelector
|
||||
module.exports.StandaloneSerializer = StandaloneSerializer
|
||||
module.exports.default = StandaloneSerializer
|
||||
26
node_modules/@fastify/fast-json-stringify-compiler/test/duplicate-schema.test.js
generated
vendored
Normal file
26
node_modules/@fastify/fast-json-stringify-compiler/test/duplicate-schema.test.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const FjsCompiler = require('../index')
|
||||
|
||||
test('Use input schema duplicate in the externalSchemas', async t => {
|
||||
t.plan(1)
|
||||
const externalSchemas = {
|
||||
schema1: {
|
||||
$id: 'schema1',
|
||||
type: 'number'
|
||||
},
|
||||
schema2: {
|
||||
$id: 'schema2',
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
const factory = FjsCompiler()
|
||||
const compiler = factory(externalSchemas)
|
||||
|
||||
compiler({ schema: externalSchemas.schema1 })
|
||||
compiler({ schema: externalSchemas.schema2 })
|
||||
|
||||
t.assert.ok(true)
|
||||
})
|
||||
78
node_modules/@fastify/fast-json-stringify-compiler/test/plugin.test.js
generated
vendored
Normal file
78
node_modules/@fastify/fast-json-stringify-compiler/test/plugin.test.js
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const fastify = require('fastify')
|
||||
const FjsCompiler = require('../index')
|
||||
|
||||
const echo = async (req) => { return req.body }
|
||||
|
||||
const sampleSchema = Object.freeze({
|
||||
$id: 'example1',
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string' }
|
||||
}
|
||||
})
|
||||
|
||||
const externalSchemas1 = Object.freeze({})
|
||||
const externalSchemas2 = Object.freeze({
|
||||
foo: {
|
||||
$id: 'foo',
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const fastifyFjsOptionsDefault = Object.freeze({})
|
||||
|
||||
test('basic usage', t => {
|
||||
t.plan(1)
|
||||
const factory = FjsCompiler()
|
||||
const compiler = factory(externalSchemas1, fastifyFjsOptionsDefault)
|
||||
const serializeFunc = compiler({ schema: sampleSchema })
|
||||
const result = serializeFunc({ name: 'hello' })
|
||||
t.assert.equal(result, '{"name":"hello"}')
|
||||
})
|
||||
|
||||
test('fastify integration', async t => {
|
||||
const factory = FjsCompiler()
|
||||
|
||||
const app = fastify({
|
||||
serializerOpts: {
|
||||
rounding: 'ceil'
|
||||
},
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildSerializer: factory
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
app.addSchema(externalSchemas2.foo)
|
||||
|
||||
app.post('/', {
|
||||
handler: echo,
|
||||
schema: {
|
||||
response: {
|
||||
200: {
|
||||
$ref: 'foo#'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const res = await app.inject({
|
||||
url: '/',
|
||||
method: 'POST',
|
||||
payload: {
|
||||
version: '1',
|
||||
foo: 'this is not a number',
|
||||
name: 'serialize me'
|
||||
}
|
||||
})
|
||||
|
||||
t.assert.equal(res.statusCode, 200)
|
||||
t.assert.deepStrictEqual(res.json(), { name: 'serialize me' })
|
||||
})
|
||||
230
node_modules/@fastify/fast-json-stringify-compiler/test/standalone.test.js
generated
vendored
Normal file
230
node_modules/@fastify/fast-json-stringify-compiler/test/standalone.test.js
generated
vendored
Normal file
@@ -0,0 +1,230 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
const { test } = require('node:test')
|
||||
const fastify = require('fastify')
|
||||
const sanitize = require('sanitize-filename')
|
||||
|
||||
const { StandaloneSerializer: FjsStandaloneCompiler } = require('../')
|
||||
|
||||
const generatedFileNames = []
|
||||
|
||||
function generateFileName (routeOpts) {
|
||||
const fileName = `/fjs-generated-${sanitize(routeOpts.schema.$id)}-${routeOpts.method}-${routeOpts.httpPart}-${sanitize(routeOpts.url)}.js`
|
||||
generatedFileNames.push(fileName)
|
||||
return fileName
|
||||
}
|
||||
|
||||
test('standalone', async t => {
|
||||
t.plan(5)
|
||||
|
||||
t.after(async () => {
|
||||
for (const fileName of generatedFileNames) {
|
||||
try {
|
||||
await fs.promises.unlink(path.join(__dirname, fileName))
|
||||
} catch {}
|
||||
}
|
||||
})
|
||||
|
||||
t.test('errors', t => {
|
||||
t.plan(2)
|
||||
t.assert.throws(() => {
|
||||
FjsStandaloneCompiler()
|
||||
}, 'missing restoreFunction')
|
||||
t.assert.throws(() => {
|
||||
FjsStandaloneCompiler({ readMode: false })
|
||||
}, 'missing storeFunction')
|
||||
})
|
||||
|
||||
t.test('generate standalone code', t => {
|
||||
t.plan(5)
|
||||
|
||||
const base = {
|
||||
$id: 'urn:schema:base',
|
||||
definitions: {
|
||||
hello: { type: 'string' }
|
||||
},
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: '#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const refSchema = {
|
||||
$id: 'urn:schema:ref',
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: 'urn:schema:base#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const endpointSchema = {
|
||||
schema: {
|
||||
$id: 'urn:schema:endpoint',
|
||||
$ref: 'urn:schema:ref'
|
||||
}
|
||||
}
|
||||
|
||||
const schemaMap = {
|
||||
[base.$id]: base,
|
||||
[refSchema.$id]: refSchema
|
||||
}
|
||||
|
||||
const factory = FjsStandaloneCompiler({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaSerializerCode) {
|
||||
t.assert.deepStrictEqual(routeOpts, endpointSchema)
|
||||
t.assert.ok(typeof schemaSerializerCode === 'string')
|
||||
fs.writeFileSync(path.join(__dirname, '/fjs-generated.js'), schemaSerializerCode)
|
||||
generatedFileNames.push('/fjs-generated.js')
|
||||
t.assert.ok('stored the serializer function')
|
||||
}
|
||||
})
|
||||
|
||||
const compiler = factory(schemaMap)
|
||||
compiler(endpointSchema)
|
||||
t.assert.ok('compiled the endpoint schema')
|
||||
|
||||
t.test('usage standalone code', t => {
|
||||
t.plan(3)
|
||||
const standaloneSerializer = require('./fjs-generated')
|
||||
t.assert.ok(standaloneSerializer)
|
||||
|
||||
const valid = standaloneSerializer({ hello: 'world' })
|
||||
t.assert.deepStrictEqual(valid, JSON.stringify({ hello: 'world' }))
|
||||
|
||||
const invalid = standaloneSerializer({ hello: [] })
|
||||
t.assert.deepStrictEqual(invalid, '{"hello":""}')
|
||||
})
|
||||
})
|
||||
|
||||
t.test('fastify integration - writeMode', async t => {
|
||||
t.plan(4)
|
||||
|
||||
const factory = FjsStandaloneCompiler({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaSerializationCode) {
|
||||
const fileName = generateFileName(routeOpts)
|
||||
t.assert.ok(routeOpts)
|
||||
fs.writeFileSync(path.join(__dirname, fileName), schemaSerializationCode)
|
||||
t.assert.ok(`stored the serializer function ${fileName}`)
|
||||
},
|
||||
restoreFunction () {
|
||||
t.fail('write mode ON')
|
||||
}
|
||||
})
|
||||
|
||||
const app = buildApp(factory)
|
||||
await app.ready()
|
||||
})
|
||||
|
||||
await t.test('fastify integration - writeMode forces standalone', async t => {
|
||||
t.plan(4)
|
||||
|
||||
const factory = FjsStandaloneCompiler({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaSerializationCode) {
|
||||
const fileName = generateFileName(routeOpts)
|
||||
t.assert.ok(routeOpts)
|
||||
fs.writeFileSync(path.join(__dirname, fileName), schemaSerializationCode)
|
||||
t.assert.ok(`stored the serializer function ${fileName}`)
|
||||
},
|
||||
restoreFunction () {
|
||||
t.fail('write mode ON')
|
||||
}
|
||||
})
|
||||
|
||||
const app = buildApp(factory, {
|
||||
mode: 'not-standalone',
|
||||
rounding: 'ceil'
|
||||
})
|
||||
|
||||
await app.ready()
|
||||
})
|
||||
|
||||
await t.test('fastify integration - readMode', async t => {
|
||||
t.plan(6)
|
||||
|
||||
const factory = FjsStandaloneCompiler({
|
||||
readMode: true,
|
||||
storeFunction () {
|
||||
t.fail('read mode ON')
|
||||
},
|
||||
restoreFunction (routeOpts) {
|
||||
const fileName = generateFileName(routeOpts)
|
||||
t.assert.ok(`restore the serializer function ${fileName}}`)
|
||||
return require(path.join(__dirname, fileName))
|
||||
}
|
||||
})
|
||||
|
||||
const app = buildApp(factory)
|
||||
await app.ready()
|
||||
|
||||
let res = await app.inject({
|
||||
url: '/foo',
|
||||
method: 'POST'
|
||||
})
|
||||
t.assert.equal(res.statusCode, 200)
|
||||
t.assert.equal(res.payload, JSON.stringify({ hello: 'world' }))
|
||||
|
||||
res = await app.inject({
|
||||
url: '/bar?lang=it',
|
||||
method: 'GET'
|
||||
})
|
||||
t.assert.equal(res.statusCode, 200)
|
||||
t.assert.equal(res.payload, JSON.stringify({ lang: 'en' }))
|
||||
})
|
||||
|
||||
function buildApp (factory, serializerOpts) {
|
||||
const app = fastify({
|
||||
exposeHeadRoutes: false,
|
||||
jsonShorthand: false,
|
||||
schemaController: {
|
||||
compilersFactory: {
|
||||
buildSerializer: factory
|
||||
}
|
||||
},
|
||||
serializerOpts
|
||||
})
|
||||
|
||||
app.addSchema({
|
||||
$id: 'urn:schema:foo',
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string' },
|
||||
id: { type: 'integer' }
|
||||
}
|
||||
})
|
||||
|
||||
app.post('/foo', {
|
||||
schema: {
|
||||
response: {
|
||||
200: {
|
||||
$id: 'urn:schema:response',
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: 'urn:schema:foo#/properties/name' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}, () => { return { hello: 'world' } })
|
||||
|
||||
app.get('/bar', {
|
||||
schema: {
|
||||
response: {
|
||||
200: {
|
||||
$id: 'urn:schema:response:bar',
|
||||
type: 'object',
|
||||
properties: {
|
||||
lang: { type: 'string', enum: ['it', 'en'] }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}, () => { return { lang: 'en' } })
|
||||
|
||||
return app
|
||||
}
|
||||
})
|
||||
41
node_modules/@fastify/fast-json-stringify-compiler/types/index.d.ts
generated
vendored
Normal file
41
node_modules/@fastify/fast-json-stringify-compiler/types/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Options } from 'fast-json-stringify'
|
||||
|
||||
type FastJsonStringifyFactory = () => SerializerSelector.SerializerFactory
|
||||
|
||||
declare namespace SerializerSelector {
|
||||
export type SerializerFactory = (
|
||||
externalSchemas?: unknown,
|
||||
options?: Options
|
||||
) => SerializerCompiler
|
||||
|
||||
export type SerializerCompiler = (routeDef: RouteDefinition) => Serializer
|
||||
export type Serializer = (doc: any) => string
|
||||
|
||||
export type RouteDefinition = {
|
||||
method: string;
|
||||
url: string;
|
||||
httpStatus: string;
|
||||
schema?: unknown;
|
||||
}
|
||||
|
||||
export type StandaloneOptions = StandaloneOptionsReadModeOn | StandaloneOptionsReadModeOff
|
||||
|
||||
export type StandaloneOptionsReadModeOn = {
|
||||
readMode: true;
|
||||
restoreFunction?(opts: RouteDefinition): Serializer;
|
||||
}
|
||||
|
||||
export type StandaloneOptionsReadModeOff = {
|
||||
readMode?: false | undefined;
|
||||
storeFunction?(opts: RouteDefinition, schemaSerializationCode: string): void;
|
||||
}
|
||||
|
||||
export type { Options }
|
||||
export const SerializerSelector: FastJsonStringifyFactory
|
||||
export function StandaloneSerializer (options: StandaloneOptions): SerializerFactory
|
||||
|
||||
export { SerializerSelector as default }
|
||||
}
|
||||
|
||||
declare function SerializerSelector (...params: Parameters<FastJsonStringifyFactory>): ReturnType<FastJsonStringifyFactory>
|
||||
export = SerializerSelector
|
||||
142
node_modules/@fastify/fast-json-stringify-compiler/types/index.test-d.ts
generated
vendored
Normal file
142
node_modules/@fastify/fast-json-stringify-compiler/types/index.test-d.ts
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
import { expectAssignable, expectError, expectType } from 'tsd'
|
||||
import SerializerSelector, {
|
||||
RouteDefinition,
|
||||
Serializer,
|
||||
SerializerCompiler,
|
||||
SerializerFactory,
|
||||
SerializerSelector as SerializerSelectorNamed,
|
||||
StandaloneSerializer,
|
||||
} from '..'
|
||||
|
||||
/**
|
||||
* SerializerSelector
|
||||
*/
|
||||
|
||||
{
|
||||
const compiler = SerializerSelector()
|
||||
expectType<SerializerFactory>(compiler)
|
||||
}
|
||||
|
||||
{
|
||||
const compiler = SerializerSelectorNamed()
|
||||
expectType<SerializerFactory>(compiler)
|
||||
}
|
||||
|
||||
{
|
||||
const sampleSchema = {
|
||||
$id: 'example1',
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string' }
|
||||
}
|
||||
}
|
||||
|
||||
const externalSchemas1 = {}
|
||||
|
||||
const factory = SerializerSelector()
|
||||
expectType<SerializerFactory>(factory)
|
||||
const compiler = factory(externalSchemas1, {})
|
||||
expectType<SerializerCompiler>(compiler)
|
||||
const serializeFunc = compiler({ schema: sampleSchema, method: '', url: '', httpStatus: '' })
|
||||
expectType<Serializer>(serializeFunc)
|
||||
|
||||
expectType<string>(serializeFunc({ name: 'hello' }))
|
||||
}
|
||||
|
||||
/**
|
||||
* StandaloneSerializer
|
||||
*/
|
||||
|
||||
const reader = StandaloneSerializer({
|
||||
readMode: true,
|
||||
restoreFunction: (route: RouteDefinition) => {
|
||||
expectAssignable<RouteDefinition>(route)
|
||||
return {} as Serializer
|
||||
},
|
||||
})
|
||||
expectType<SerializerFactory>(reader)
|
||||
|
||||
const writer = StandaloneSerializer({
|
||||
readMode: false,
|
||||
storeFunction: (route: RouteDefinition, code: string) => {
|
||||
expectAssignable<RouteDefinition>(route)
|
||||
expectAssignable<string>(code)
|
||||
},
|
||||
})
|
||||
expectType<SerializerFactory>(writer)
|
||||
|
||||
{
|
||||
const base = {
|
||||
$id: 'urn:schema:base',
|
||||
definitions: {
|
||||
hello: { type: 'string' }
|
||||
},
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: '#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const refSchema = {
|
||||
$id: 'urn:schema:ref',
|
||||
type: 'object',
|
||||
properties: {
|
||||
hello: { $ref: 'urn:schema:base#/definitions/hello' }
|
||||
}
|
||||
}
|
||||
|
||||
const endpointSchema = {
|
||||
method: '',
|
||||
url: '',
|
||||
httpStatus: '',
|
||||
schema: {
|
||||
$id: 'urn:schema:endpoint',
|
||||
$ref: 'urn:schema:ref'
|
||||
}
|
||||
}
|
||||
|
||||
const schemaMap = {
|
||||
[base.$id]: base,
|
||||
[refSchema.$id]: refSchema
|
||||
}
|
||||
|
||||
expectError(StandaloneSerializer({
|
||||
readMode: true,
|
||||
storeFunction () { }
|
||||
}))
|
||||
expectError(StandaloneSerializer({
|
||||
readMode: false,
|
||||
restoreFunction () {}
|
||||
}))
|
||||
expectError(StandaloneSerializer({
|
||||
restoreFunction () {}
|
||||
}))
|
||||
|
||||
expectType<SerializerFactory>(StandaloneSerializer({
|
||||
storeFunction (routeOpts, schemaSerializerCode) {
|
||||
expectType<RouteDefinition>(routeOpts)
|
||||
expectType<string>(schemaSerializerCode)
|
||||
}
|
||||
}))
|
||||
|
||||
expectType<SerializerFactory>(StandaloneSerializer({
|
||||
readMode: true,
|
||||
restoreFunction (routeOpts) {
|
||||
expectType<RouteDefinition>(routeOpts)
|
||||
return {} as Serializer
|
||||
}
|
||||
}))
|
||||
|
||||
const factory = StandaloneSerializer({
|
||||
readMode: false,
|
||||
storeFunction (routeOpts, schemaSerializerCode) {
|
||||
expectType<RouteDefinition>(routeOpts)
|
||||
expectType<string>(schemaSerializerCode)
|
||||
}
|
||||
})
|
||||
expectType<SerializerFactory>(factory)
|
||||
|
||||
const compiler = factory(schemaMap)
|
||||
expectType<SerializerCompiler>(compiler)
|
||||
expectType<Serializer>(compiler(endpointSchema))
|
||||
}
|
||||
23
node_modules/@fastify/forwarded/LICENSE
generated
vendored
Normal file
23
node_modules/@fastify/forwarded/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2021 Fastify collaborators
|
||||
Copyright (c) 2014-2017 Douglas Christopher Wilson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
43
node_modules/@fastify/forwarded/README.md
generated
vendored
Normal file
43
node_modules/@fastify/forwarded/README.md
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# @fastify/forwarded
|
||||
|
||||

|
||||
[](https://www.npmjs.com/package/@fastify/forwarded)
|
||||
[](https://standardjs.com/)
|
||||
|
||||
Parse HTTP X-Forwarded-For header.
|
||||
|
||||
Updated version of the great https://github.com/jshttp/forwarded.
|
||||
Implements https://github.com/jshttp/forwarded/pull/9.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
$ npm i @fastify/forwarded
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var forwarded = require('@fastify/forwarded')
|
||||
```
|
||||
|
||||
### forwarded(req)
|
||||
|
||||
```js
|
||||
var addresses = forwarded(req)
|
||||
```
|
||||
|
||||
Parse the `X-Forwarded-For` header from the request. Returns an array
|
||||
of the addresses, including the socket address for the `req`, in reverse
|
||||
order (i.e. index `0` is the socket address and the last index is the
|
||||
furthest address, typically the end-user).
|
||||
|
||||
## Testing
|
||||
|
||||
```sh
|
||||
$ npm test
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
59
node_modules/@fastify/forwarded/index.js
generated
vendored
Normal file
59
node_modules/@fastify/forwarded/index.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
/*!
|
||||
* forwarded
|
||||
* Copyright(c) 2014-2017 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Get all addresses in the request used in the `X-Forwarded-For` header.
|
||||
*/
|
||||
function forwarded (req) {
|
||||
if (!req) {
|
||||
throw new TypeError('argument req is required')
|
||||
}
|
||||
|
||||
const header = req.headers['x-forwarded-for']
|
||||
const socketAddr = req.socket.remoteAddress
|
||||
|
||||
if (!header || typeof header !== 'string') {
|
||||
return [socketAddr]
|
||||
} else if (header.indexOf(',') === -1) {
|
||||
const remote = header.trim()
|
||||
return (remote.length)
|
||||
? [socketAddr, remote]
|
||||
: [socketAddr]
|
||||
} else {
|
||||
return parse(header, socketAddr)
|
||||
}
|
||||
}
|
||||
|
||||
function parse (header, socketAddr) {
|
||||
const result = [socketAddr]
|
||||
|
||||
let end = header.length
|
||||
let start = end
|
||||
let char
|
||||
let i
|
||||
|
||||
for (i = end - 1; i >= 0; --i) {
|
||||
char = header[i]
|
||||
if (char === ' ') {
|
||||
(start === end) && (start = end = i)
|
||||
} else if (char === ',') {
|
||||
(start !== end) && result.push(header.slice(start, end))
|
||||
start = end = i
|
||||
} else {
|
||||
start = i
|
||||
}
|
||||
}
|
||||
|
||||
(start !== end) && result.push(header.substring(start, end))
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = forwarded
|
||||
module.exports.default = forwarded
|
||||
module.exports.forwarded = forwarded
|
||||
48
node_modules/@fastify/forwarded/package.json
generated
vendored
Normal file
48
node_modules/@fastify/forwarded/package.json
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"name": "@fastify/forwarded",
|
||||
"description": "Parse HTTP X-Forwarded-For header",
|
||||
"version": "3.0.0",
|
||||
"type": "commonjs",
|
||||
"contributors": [
|
||||
"Matteo Collina <hello@matteocollina.com>",
|
||||
"Douglas Christopher Wilson <doug@somethingdoug.com>",
|
||||
"Aras Abbasi <aras.abbasi@gmail.com"
|
||||
],
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"x-forwarded-for",
|
||||
"http",
|
||||
"req"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/fastify/forwarded.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/fastify/forwarded/issues"
|
||||
},
|
||||
"homepage": "https://github.com/fastify/forwarded#readme",
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.14.9",
|
||||
"benchmark": "2.1.4",
|
||||
"standard": "^17.1.0",
|
||||
"tap": "^18.8.0",
|
||||
"tsd": "^0.31.1"
|
||||
},
|
||||
"types": "types/index.d.ts",
|
||||
"files": [
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"index.js",
|
||||
"types/index.d.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"bench": "node benchmark/index.js",
|
||||
"bench:combined": "node benchmark/combined.js",
|
||||
"lint": "standard",
|
||||
"lint:fix": "standard --fix",
|
||||
"test": "npm run test:unit && npm run test:typescript",
|
||||
"test:unit": "tap",
|
||||
"test:typescript": "tsd"
|
||||
}
|
||||
}
|
||||
14
node_modules/@fastify/forwarded/types/index.d.ts
generated
vendored
Normal file
14
node_modules/@fastify/forwarded/types/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import { IncomingMessage } from 'http';
|
||||
|
||||
type Forwarded = (req: IncomingMessage) => string[]
|
||||
|
||||
declare namespace forwarded {
|
||||
export const forwarded: Forwarded
|
||||
export { forwarded as default }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all addresses in the request used in the `X-Forwarded-For` header.
|
||||
*/
|
||||
declare function forwarded(...params: Parameters<Forwarded>): ReturnType<Forwarded>
|
||||
export = forwarded
|
||||
2
node_modules/@fastify/merge-json-schemas/.gitattributes
generated
vendored
Normal file
2
node_modules/@fastify/merge-json-schemas/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Set default behavior to automatically convert line endings
|
||||
* text=auto eol=lf
|
||||
13
node_modules/@fastify/merge-json-schemas/.github/dependabot.yml
generated
vendored
Normal file
13
node_modules/@fastify/merge-json-schemas/.github/dependabot.yml
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
23
node_modules/@fastify/merge-json-schemas/.github/workflows/ci.yml
generated
vendored
Normal file
23
node_modules/@fastify/merge-json-schemas/.github/workflows/ci.yml
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- next
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
uses: fastify/workflows/.github/workflows/plugins-ci.yml@v5
|
||||
with:
|
||||
license-check: true
|
||||
lint: true
|
||||
21
node_modules/@fastify/merge-json-schemas/LICENSE
generated
vendored
Normal file
21
node_modules/@fastify/merge-json-schemas/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 Fastify
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
119
node_modules/@fastify/merge-json-schemas/README.md
generated
vendored
Normal file
119
node_modules/@fastify/merge-json-schemas/README.md
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
# @fastify/merge-json-schemas
|
||||
|
||||
[](https://github.com/fastify/merge-json-schemas/actions/workflows/ci.yml)
|
||||
[](https://www.npmjs.com/package/@fastify/merge-json-schemas)
|
||||
[](https://github.com/neostandard/neostandard)
|
||||
|
||||
__merge-json-schemas__ is a JavaScript library that builds a logical product (AND) for multiple [JSON schemas](https://json-schema.org/draft/2020-12/json-schema-core#name-introduction).
|
||||
|
||||
- [Installation](#installation)
|
||||
- [Usage](#usage)
|
||||
- [API](#api)
|
||||
- [mergeSchemas(schemas, options)](#mergeschemasschemas-options)
|
||||
- [resolvers](#resolvers)
|
||||
- [defaultResolver](#defaultresolver)
|
||||
- [License](#license)
|
||||
|
||||
<a name="installation"></a>
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm i @fastify/merge-json-schemas
|
||||
```
|
||||
|
||||
<a name="usage"></a>
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
const assert = require('node:assert')
|
||||
const { mergeSchemas } = require('@fastify/merge-json-schemas');
|
||||
|
||||
const schema1 = {
|
||||
$id: 'schema1',
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string', enum: ['foo1', 'foo2'] },
|
||||
bar: { type: 'string', minLength: 3 }
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
$id: 'schema1',
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string', enum: ['foo1', 'foo3'] },
|
||||
bar: { type: 'string', minLength: 5 }
|
||||
},
|
||||
required: ['foo']
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2])
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
$id: 'schema1',
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string', enum: ['foo1'] },
|
||||
bar: { type: 'string', minLength: 5 }
|
||||
},
|
||||
required: ['foo']
|
||||
})
|
||||
```
|
||||
|
||||
<a name="api"></a>
|
||||
|
||||
## API
|
||||
|
||||
<a name="merge-schemas"></a>
|
||||
|
||||
#### mergeSchemas(schemas, options)
|
||||
|
||||
Builds a logical conjunction (AND) of multiple [JSON schemas](https://json-schema.org/draft/2020-12/json-schema-core#name-introduction).
|
||||
|
||||
- `schemas` __\<objects[]\>__ - list of JSON schemas to merge
|
||||
- `options` __\<object\>__ - optional options
|
||||
- `resolvers` __\<object\>__ - custom resolvers for JSON schema keywords. Each key is the name of a JSON schema keyword. Each value is a resolver function. See [keywordResolver](#keywordresolver-keyword-values-mergedschema-parentschemas-options)
|
||||
- `defaultResolver` __\<function\>__ - custom default resolver for JSON schema keywords. See [keywordResolver](#keywordresolver-keyword-values-mergedschema-parentschemas-options)
|
||||
- `onConflict` __\<string\>__ - action to take when a conflict is found. Used by the default `defaultResolver`. Default is `throw`. Possible values are:
|
||||
- `throw` - throws an error multiple different schemas for the same keyword are found
|
||||
- `ignore` - do nothing if multiple different schemas for the same keyword are found
|
||||
- `first` - use the value of the first schema if multiple different schemas for the same keyword are found
|
||||
|
||||
#### resolvers
|
||||
|
||||
A list of default resolvers that __merge-json-schema__ uses to merge JSON schemas. You can override the default resolvers by passing a list of custom resolvers in the `options` argument of `mergeSchemas`. See [keywordResolver](#keywordresolver-keyword-values-mergedschema-parentschemas-options).
|
||||
|
||||
#### defaultResolver
|
||||
|
||||
A default resolver that __merge-json-schema__ uses to merge JSON schemas. Default resolver is used when no custom resolver is defined for a JSON schema keyword. By default, the default resolver works as follows:
|
||||
|
||||
- If only one schema contains the keyword, the value of the keyword is used as the merged value
|
||||
- If multiple schemas contain the exact same value for the keyword, the value of the keyword is used as the merged value
|
||||
- If multiple schemas contain different values for the keyword, it throws an error
|
||||
|
||||
#### keywordResolver (keyword, values, mergedSchema, parentSchemas, options)
|
||||
|
||||
__merge-json-schema__ uses a set of resolvers to merge JSON schemas. Each resolver is associated with a JSON schema keyword. The resolver is called when the keyword is found in the schemas to merge. The resolver is called with the following arguments:
|
||||
|
||||
- `keyword` __\<string\>__ - the name of the keyword to merge
|
||||
- `values` __\<any[]\>__ - the values of the keyword to merge. The length of the array is equal to the number of schemas to merge. If a schema does not contain the keyword, the value is `undefined`
|
||||
- `mergedSchema` __\<object\>__ - an instance of the merged schema
|
||||
- `parentSchemas` __\<object[]\>__ - the list of parent schemas
|
||||
- `options` __\<object\>__ - the options passed to `mergeSchemas`
|
||||
|
||||
The resolver must set the merged value of the `keyword` in the `mergedSchema` object.
|
||||
|
||||
__Example:__ resolver for the `minNumber` keyword.
|
||||
|
||||
```javascript
|
||||
function minNumberResolver (keyword, values, mergedSchema) {
|
||||
mergedSchema[keyword] = Math.min(...values)
|
||||
}
|
||||
```
|
||||
|
||||
<a name="license"></a>
|
||||
|
||||
## License
|
||||
|
||||
Licensed under [MIT](./LICENSE).
|
||||
6
node_modules/@fastify/merge-json-schemas/eslint.config.js
generated
vendored
Normal file
6
node_modules/@fastify/merge-json-schemas/eslint.config.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('neostandard')({
|
||||
ignores: require('neostandard').resolveIgnoresFromGitignore(),
|
||||
ts: true
|
||||
})
|
||||
357
node_modules/@fastify/merge-json-schemas/index.js
generated
vendored
Normal file
357
node_modules/@fastify/merge-json-schemas/index.js
generated
vendored
Normal file
@@ -0,0 +1,357 @@
|
||||
'use strict'
|
||||
|
||||
const { dequal: deepEqual } = require('dequal')
|
||||
const resolvers = require('./lib/resolvers')
|
||||
const errors = require('./lib/errors')
|
||||
|
||||
const keywordsResolvers = {
|
||||
$id: resolvers.skip,
|
||||
type: resolvers.hybridArraysIntersection,
|
||||
enum: resolvers.arraysIntersection,
|
||||
minLength: resolvers.maxNumber,
|
||||
maxLength: resolvers.minNumber,
|
||||
minimum: resolvers.maxNumber,
|
||||
maximum: resolvers.minNumber,
|
||||
multipleOf: resolvers.commonMultiple,
|
||||
exclusiveMinimum: resolvers.maxNumber,
|
||||
exclusiveMaximum: resolvers.minNumber,
|
||||
minItems: resolvers.maxNumber,
|
||||
maxItems: resolvers.minNumber,
|
||||
maxProperties: resolvers.minNumber,
|
||||
minProperties: resolvers.maxNumber,
|
||||
const: resolvers.allEqual,
|
||||
default: resolvers.allEqual,
|
||||
format: resolvers.allEqual,
|
||||
required: resolvers.arraysUnion,
|
||||
properties: mergeProperties,
|
||||
patternProperties: mergeObjects,
|
||||
additionalProperties: mergeSchemasResolver,
|
||||
items: mergeItems,
|
||||
additionalItems: mergeAdditionalItems,
|
||||
definitions: mergeObjects,
|
||||
$defs: mergeObjects,
|
||||
nullable: resolvers.booleanAnd,
|
||||
oneOf: mergeOneOf,
|
||||
anyOf: mergeOneOf,
|
||||
allOf: resolvers.arraysUnion,
|
||||
not: mergeSchemasResolver,
|
||||
if: mergeIfThenElseSchemas,
|
||||
then: resolvers.skip,
|
||||
else: resolvers.skip,
|
||||
dependencies: mergeDependencies,
|
||||
dependentRequired: mergeDependencies,
|
||||
dependentSchemas: mergeObjects,
|
||||
propertyNames: mergeSchemasResolver,
|
||||
uniqueItems: resolvers.booleanOr,
|
||||
contains: mergeSchemasResolver
|
||||
}
|
||||
|
||||
function mergeSchemasResolver (keyword, values, mergedSchema, _schemas, options) {
|
||||
mergedSchema[keyword] = _mergeSchemas(values, options)
|
||||
}
|
||||
|
||||
function cartesianProduct (arrays) {
|
||||
let result = [[]]
|
||||
|
||||
for (const array of arrays) {
|
||||
const temp = []
|
||||
for (const x of result) {
|
||||
for (const y of array) {
|
||||
temp.push([...x, y])
|
||||
}
|
||||
}
|
||||
result = temp
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function mergeOneOf (keyword, values, mergedSchema, _schemas, options) {
|
||||
if (values.length === 1) {
|
||||
mergedSchema[keyword] = values[0]
|
||||
return
|
||||
}
|
||||
|
||||
const product = cartesianProduct(values)
|
||||
const mergedOneOf = []
|
||||
for (const combination of product) {
|
||||
try {
|
||||
const mergedSchema = _mergeSchemas(combination, options)
|
||||
if (mergedSchema !== undefined) {
|
||||
mergedOneOf.push(mergedSchema)
|
||||
}
|
||||
} catch (error) {
|
||||
// If this combination is not valid, we can ignore it.
|
||||
if (error instanceof errors.MergeError) continue
|
||||
throw error
|
||||
}
|
||||
}
|
||||
mergedSchema[keyword] = mergedOneOf
|
||||
}
|
||||
|
||||
function getSchemaForItem (schema, index) {
|
||||
const { items, additionalItems } = schema
|
||||
|
||||
if (Array.isArray(items)) {
|
||||
if (index < items.length) {
|
||||
return items[index]
|
||||
}
|
||||
return additionalItems
|
||||
}
|
||||
|
||||
if (items !== undefined) {
|
||||
return items
|
||||
}
|
||||
|
||||
return additionalItems
|
||||
}
|
||||
|
||||
function mergeItems (keyword, values, mergedSchema, schemas, options) {
|
||||
let maxArrayItemsLength = 0
|
||||
for (const itemsSchema of values) {
|
||||
if (Array.isArray(itemsSchema)) {
|
||||
maxArrayItemsLength = Math.max(maxArrayItemsLength, itemsSchema.length)
|
||||
}
|
||||
}
|
||||
|
||||
if (maxArrayItemsLength === 0) {
|
||||
mergedSchema[keyword] = _mergeSchemas(values, options)
|
||||
return
|
||||
}
|
||||
|
||||
const mergedItemsSchemas = []
|
||||
for (let i = 0; i < maxArrayItemsLength; i++) {
|
||||
const indexItemSchemas = []
|
||||
for (const schema of schemas) {
|
||||
const itemSchema = getSchemaForItem(schema, i)
|
||||
if (itemSchema !== undefined) {
|
||||
indexItemSchemas.push(itemSchema)
|
||||
}
|
||||
}
|
||||
mergedItemsSchemas[i] = _mergeSchemas(indexItemSchemas, options)
|
||||
}
|
||||
mergedSchema[keyword] = mergedItemsSchemas
|
||||
}
|
||||
|
||||
function mergeAdditionalItems (keyword, values, mergedSchema, schemas, options) {
|
||||
let hasArrayItems = false
|
||||
for (const schema of schemas) {
|
||||
if (Array.isArray(schema.items)) {
|
||||
hasArrayItems = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasArrayItems) {
|
||||
mergedSchema[keyword] = _mergeSchemas(values, options)
|
||||
return
|
||||
}
|
||||
|
||||
const mergedAdditionalItemsSchemas = []
|
||||
for (const schema of schemas) {
|
||||
let additionalItemsSchema = schema.additionalItems
|
||||
if (
|
||||
additionalItemsSchema === undefined &&
|
||||
!Array.isArray(schema.items)
|
||||
) {
|
||||
additionalItemsSchema = schema.items
|
||||
}
|
||||
if (additionalItemsSchema !== undefined) {
|
||||
mergedAdditionalItemsSchemas.push(additionalItemsSchema)
|
||||
}
|
||||
}
|
||||
|
||||
mergedSchema[keyword] = _mergeSchemas(mergedAdditionalItemsSchemas, options)
|
||||
}
|
||||
|
||||
function getSchemaForProperty (schema, propertyName) {
|
||||
const { properties, patternProperties, additionalProperties } = schema
|
||||
|
||||
if (properties?.[propertyName] !== undefined) {
|
||||
return properties[propertyName]
|
||||
}
|
||||
|
||||
for (const pattern of Object.keys(patternProperties ?? {})) {
|
||||
const regexp = new RegExp(pattern)
|
||||
if (regexp.test(propertyName)) {
|
||||
return patternProperties[pattern]
|
||||
}
|
||||
}
|
||||
|
||||
return additionalProperties
|
||||
}
|
||||
|
||||
function mergeProperties (keyword, _values, mergedSchema, schemas, options) {
|
||||
const foundProperties = {}
|
||||
for (const currentSchema of schemas) {
|
||||
const properties = currentSchema.properties ?? {}
|
||||
for (const propertyName of Object.keys(properties)) {
|
||||
if (foundProperties[propertyName] !== undefined) continue
|
||||
|
||||
const propertySchema = properties[propertyName]
|
||||
foundProperties[propertyName] = [propertySchema]
|
||||
|
||||
for (const anotherSchema of schemas) {
|
||||
if (currentSchema === anotherSchema) continue
|
||||
|
||||
const propertySchema = getSchemaForProperty(anotherSchema, propertyName)
|
||||
if (propertySchema !== undefined) {
|
||||
foundProperties[propertyName].push(propertySchema)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedProperties = {}
|
||||
for (const property of Object.keys(foundProperties)) {
|
||||
const propertySchemas = foundProperties[property]
|
||||
mergedProperties[property] = _mergeSchemas(propertySchemas, options)
|
||||
}
|
||||
mergedSchema[keyword] = mergedProperties
|
||||
}
|
||||
|
||||
function mergeObjects (keyword, values, mergedSchema, _schemas, options) {
|
||||
const objectsProperties = {}
|
||||
|
||||
for (const properties of values) {
|
||||
for (const propertyName of Object.keys(properties)) {
|
||||
if (objectsProperties[propertyName] === undefined) {
|
||||
objectsProperties[propertyName] = []
|
||||
}
|
||||
objectsProperties[propertyName].push(properties[propertyName])
|
||||
}
|
||||
}
|
||||
|
||||
const mergedProperties = {}
|
||||
for (const propertyName of Object.keys(objectsProperties)) {
|
||||
const propertySchemas = objectsProperties[propertyName]
|
||||
const mergedPropertySchema = _mergeSchemas(propertySchemas, options)
|
||||
mergedProperties[propertyName] = mergedPropertySchema
|
||||
}
|
||||
|
||||
mergedSchema[keyword] = mergedProperties
|
||||
}
|
||||
|
||||
function mergeIfThenElseSchemas (_keyword, _values, mergedSchema, schemas, options) {
|
||||
for (let i = 0; i < schemas.length; i++) {
|
||||
const subSchema = {
|
||||
if: schemas[i].if,
|
||||
then: schemas[i].then,
|
||||
else: schemas[i].else
|
||||
}
|
||||
|
||||
if (subSchema.if === undefined) continue
|
||||
|
||||
if (mergedSchema.if === undefined) {
|
||||
mergedSchema.if = subSchema.if
|
||||
if (subSchema.then !== undefined) {
|
||||
mergedSchema.then = subSchema.then
|
||||
}
|
||||
if (subSchema.else !== undefined) {
|
||||
mergedSchema.else = subSchema.else
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (mergedSchema.then !== undefined) {
|
||||
mergedSchema.then = _mergeSchemas([mergedSchema.then, subSchema], options)
|
||||
}
|
||||
if (mergedSchema.else !== undefined) {
|
||||
mergedSchema.else = _mergeSchemas([mergedSchema.else, subSchema], options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function mergeDependencies (keyword, values, mergedSchema) {
|
||||
const mergedDependencies = {}
|
||||
for (const dependencies of values) {
|
||||
for (const propertyName of Object.keys(dependencies)) {
|
||||
if (mergedDependencies[propertyName] === undefined) {
|
||||
mergedDependencies[propertyName] = []
|
||||
}
|
||||
const mergedPropertyDependencies = mergedDependencies[propertyName]
|
||||
for (const propertyDependency of dependencies[propertyName]) {
|
||||
if (!mergedPropertyDependencies.includes(propertyDependency)) {
|
||||
mergedPropertyDependencies.push(propertyDependency)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
mergedSchema[keyword] = mergedDependencies
|
||||
}
|
||||
|
||||
function _mergeSchemas (schemas, options) {
|
||||
if (schemas.length === 0) return {}
|
||||
if (schemas.length === 1) return schemas[0]
|
||||
|
||||
const mergedSchema = {}
|
||||
const keywords = {}
|
||||
|
||||
let allSchemasAreTrue = true
|
||||
|
||||
for (const schema of schemas) {
|
||||
if (schema === false) return false
|
||||
if (schema === true) continue
|
||||
allSchemasAreTrue = false
|
||||
|
||||
for (const keyword of Object.keys(schema)) {
|
||||
if (keywords[keyword] === undefined) {
|
||||
keywords[keyword] = []
|
||||
}
|
||||
keywords[keyword].push(schema[keyword])
|
||||
}
|
||||
}
|
||||
|
||||
if (allSchemasAreTrue) return true
|
||||
|
||||
for (const keyword of Object.keys(keywords)) {
|
||||
const keywordValues = keywords[keyword]
|
||||
const resolver = options.resolvers[keyword] ?? options.defaultResolver
|
||||
resolver(keyword, keywordValues, mergedSchema, schemas, options)
|
||||
}
|
||||
|
||||
return mergedSchema
|
||||
}
|
||||
|
||||
function defaultResolver (keyword, values, mergedSchema, _schemas, options) {
|
||||
const onConflict = options.onConflict ?? 'throw'
|
||||
|
||||
if (values.length === 1 || onConflict === 'first') {
|
||||
mergedSchema[keyword] = values[0]
|
||||
return
|
||||
}
|
||||
|
||||
let allValuesEqual = true
|
||||
for (let i = 1; i < values.length; i++) {
|
||||
if (!deepEqual(values[i], values[0])) {
|
||||
allValuesEqual = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (allValuesEqual) {
|
||||
mergedSchema[keyword] = values[0]
|
||||
return
|
||||
}
|
||||
|
||||
if (onConflict === 'throw') {
|
||||
throw new errors.ResolverNotFoundError(keyword, values)
|
||||
}
|
||||
if (onConflict === 'skip') {
|
||||
return
|
||||
}
|
||||
throw new errors.InvalidOnConflictOptionError(onConflict)
|
||||
}
|
||||
|
||||
function mergeSchemas (schemas, options = {}) {
|
||||
if (options.defaultResolver === undefined) {
|
||||
options.defaultResolver = defaultResolver
|
||||
}
|
||||
|
||||
options.resolvers = { ...keywordsResolvers, ...options.resolvers }
|
||||
|
||||
const mergedSchema = _mergeSchemas(schemas, options)
|
||||
return mergedSchema
|
||||
}
|
||||
|
||||
module.exports = { mergeSchemas, keywordsResolvers, defaultResolver, ...errors }
|
||||
36
node_modules/@fastify/merge-json-schemas/lib/errors.js
generated
vendored
Normal file
36
node_modules/@fastify/merge-json-schemas/lib/errors.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict'
|
||||
|
||||
class MergeError extends Error {
|
||||
constructor (keyword, schemas) {
|
||||
super()
|
||||
this.name = 'JsonSchemaMergeError'
|
||||
this.code = 'JSON_SCHEMA_MERGE_ERROR'
|
||||
this.message = `Failed to merge "${keyword}" keyword schemas.`
|
||||
this.schemas = schemas
|
||||
}
|
||||
}
|
||||
|
||||
class ResolverNotFoundError extends Error {
|
||||
constructor (keyword, schemas) {
|
||||
super()
|
||||
this.name = 'JsonSchemaMergeError'
|
||||
this.code = 'JSON_SCHEMA_MERGE_ERROR'
|
||||
this.message = `Resolver for "${keyword}" keyword not found.`
|
||||
this.schemas = schemas
|
||||
}
|
||||
}
|
||||
|
||||
class InvalidOnConflictOptionError extends Error {
|
||||
constructor (onConflict) {
|
||||
super()
|
||||
this.name = 'JsonSchemaMergeError'
|
||||
this.code = 'JSON_SCHEMA_MERGE_ERROR'
|
||||
this.message = `Invalid "onConflict" option: "${onConflict}".`
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MergeError,
|
||||
ResolverNotFoundError,
|
||||
InvalidOnConflictOptionError
|
||||
}
|
||||
127
node_modules/@fastify/merge-json-schemas/lib/resolvers.js
generated
vendored
Normal file
127
node_modules/@fastify/merge-json-schemas/lib/resolvers.js
generated
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
'use strict'
|
||||
|
||||
const { dequal: deepEqual } = require('dequal')
|
||||
const { MergeError } = require('./errors')
|
||||
|
||||
function _arraysIntersection (arrays) {
|
||||
let intersection = arrays[0]
|
||||
for (let i = 1; i < arrays.length; i++) {
|
||||
intersection = intersection.filter(
|
||||
value => arrays[i].includes(value)
|
||||
)
|
||||
}
|
||||
return intersection
|
||||
}
|
||||
|
||||
function arraysIntersection (keyword, values, mergedSchema) {
|
||||
const intersection = _arraysIntersection(values)
|
||||
if (intersection.length === 0) {
|
||||
throw new MergeError(keyword, values)
|
||||
}
|
||||
mergedSchema[keyword] = intersection
|
||||
}
|
||||
|
||||
function hybridArraysIntersection (keyword, values, mergedSchema) {
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
if (!Array.isArray(values[i])) {
|
||||
values[i] = [values[i]]
|
||||
}
|
||||
}
|
||||
|
||||
const intersection = _arraysIntersection(values)
|
||||
if (intersection.length === 0) {
|
||||
throw new MergeError(keyword, values)
|
||||
}
|
||||
|
||||
if (intersection.length === 1) {
|
||||
mergedSchema[keyword] = intersection[0]
|
||||
} else {
|
||||
mergedSchema[keyword] = intersection
|
||||
}
|
||||
}
|
||||
|
||||
function arraysUnion (keyword, values, mergedSchema) {
|
||||
const union = []
|
||||
|
||||
for (const array of values) {
|
||||
for (const value of array) {
|
||||
if (!union.includes(value)) {
|
||||
union.push(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mergedSchema[keyword] = union
|
||||
}
|
||||
|
||||
function minNumber (keyword, values, mergedSchema) {
|
||||
mergedSchema[keyword] = Math.min(...values)
|
||||
}
|
||||
|
||||
function maxNumber (keyword, values, mergedSchema) {
|
||||
mergedSchema[keyword] = Math.max(...values)
|
||||
}
|
||||
|
||||
function commonMultiple (keyword, values, mergedSchema) {
|
||||
const gcd = (a, b) => (!b ? a : gcd(b, a % b))
|
||||
const lcm = (a, b) => (a * b) / gcd(a, b)
|
||||
|
||||
let scale = 1
|
||||
for (const value of values) {
|
||||
while (value * scale % 1 !== 0) {
|
||||
scale *= 10
|
||||
}
|
||||
}
|
||||
|
||||
let multiple = values[0] * scale
|
||||
for (const value of values) {
|
||||
multiple = lcm(multiple, value * scale)
|
||||
}
|
||||
|
||||
mergedSchema[keyword] = multiple / scale
|
||||
}
|
||||
|
||||
function allEqual (keyword, values, mergedSchema) {
|
||||
const firstValue = values[0]
|
||||
for (let i = 1; i < values.length; i++) {
|
||||
if (!deepEqual(values[i], firstValue)) {
|
||||
throw new MergeError(keyword, values)
|
||||
}
|
||||
}
|
||||
mergedSchema[keyword] = firstValue
|
||||
}
|
||||
|
||||
function skip () {}
|
||||
|
||||
function booleanAnd (keyword, values, mergedSchema) {
|
||||
for (const value of values) {
|
||||
if (value === false) {
|
||||
mergedSchema[keyword] = false
|
||||
return
|
||||
}
|
||||
}
|
||||
mergedSchema[keyword] = true
|
||||
}
|
||||
|
||||
function booleanOr (keyword, values, mergedSchema) {
|
||||
for (const value of values) {
|
||||
if (value === true) {
|
||||
mergedSchema[keyword] = true
|
||||
return
|
||||
}
|
||||
}
|
||||
mergedSchema[keyword] = false
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
arraysIntersection,
|
||||
hybridArraysIntersection,
|
||||
arraysUnion,
|
||||
minNumber,
|
||||
maxNumber,
|
||||
commonMultiple,
|
||||
allEqual,
|
||||
booleanAnd,
|
||||
booleanOr,
|
||||
skip
|
||||
}
|
||||
67
node_modules/@fastify/merge-json-schemas/package.json
generated
vendored
Normal file
67
node_modules/@fastify/merge-json-schemas/package.json
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
{
|
||||
"name": "@fastify/merge-json-schemas",
|
||||
"version": "0.2.1",
|
||||
"description": "Builds a logical conjunction (AND) of multiple JSON schemas",
|
||||
"main": "index.js",
|
||||
"type": "commonjs",
|
||||
"types": "types/index.d.ts",
|
||||
"scripts": {
|
||||
"lint": "eslint",
|
||||
"lint:fix": "eslint --fix",
|
||||
"test": "npm run test:unit && npm run test:types",
|
||||
"test:unit": "c8 --100 node --test",
|
||||
"test:types": "tsd"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/fastify/merge-json-schemas.git"
|
||||
},
|
||||
"keywords": [
|
||||
"json",
|
||||
"schema",
|
||||
"merge",
|
||||
"allOf"
|
||||
],
|
||||
"author": "Ivan Tymoshenko <ivan@tymoshenko.me>",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Matteo Collina",
|
||||
"email": "hello@matteocollina.com"
|
||||
},
|
||||
{
|
||||
"name": "Frazer Smith",
|
||||
"email": "frazer.dev@icloud.com",
|
||||
"url": "https://github.com/fdawgs"
|
||||
},
|
||||
{
|
||||
"name": "Gürgün Dayıoğlu",
|
||||
"email": "hey@gurgun.day",
|
||||
"url": "https://heyhey.to/G"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/fastify/merge-json-schemas/issues"
|
||||
},
|
||||
"homepage": "https://github.com/fastify/merge-json-schemas#readme",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"devDependencies": {
|
||||
"@fastify/pre-commit": "^2.1.0",
|
||||
"c8": "^10.1.3",
|
||||
"eslint": "^9.17.0",
|
||||
"neostandard": "^0.12.0",
|
||||
"tsd": "^0.31.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"dequal": "^2.0.3"
|
||||
}
|
||||
}
|
||||
164
node_modules/@fastify/merge-json-schemas/test/additional-items.test.js
generated
vendored
Normal file
164
node_modules/@fastify/merge-json-schemas/test/additional-items.test.js
generated
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and additionalItems = false keyword', () => {
|
||||
const schema1 = { type: 'array' }
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
additionalItems: false
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
additionalItems: false
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two schemas with boolean additionalItems', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
additionalItems: true
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
additionalItems: false
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
additionalItems: false
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge additionalItems schema with false value', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
additionalItems: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
additionalItems: false
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
additionalItems: false
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge additionalItems schema with true value', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
additionalItems: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
additionalItems: true
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
additionalItems: {
|
||||
type: 'string'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two additionalItems schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
additionalItems: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
additionalItems: {
|
||||
type: 'string', minLength: 1
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
additionalItems: {
|
||||
type: 'string', minLength: 1
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge additionalItems with items array', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
items: [
|
||||
{ type: 'string', const: 'foo1' },
|
||||
{ type: 'string', const: 'foo2' },
|
||||
{ type: 'string', const: 'foo3' }
|
||||
]
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
additionalItems: {
|
||||
type: 'string', minLength: 42
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
items: [
|
||||
{ type: 'string', const: 'foo1', minLength: 42 },
|
||||
{ type: 'string', const: 'foo2', minLength: 42 },
|
||||
{ type: 'string', const: 'foo3', minLength: 42 }
|
||||
],
|
||||
additionalItems: {
|
||||
type: 'string', minLength: 42
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge items array and additionalItems with items array', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
items: [
|
||||
{ type: 'string', const: 'foo1' },
|
||||
{ type: 'string', const: 'foo2' },
|
||||
{ type: 'string', const: 'foo3' }
|
||||
]
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
items: [
|
||||
{ type: 'string', minLength: 1 },
|
||||
{ type: 'string', minLength: 2 }
|
||||
],
|
||||
additionalItems: {
|
||||
type: 'string', minLength: 3
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
items: [
|
||||
{ type: 'string', const: 'foo1', minLength: 1 },
|
||||
{ type: 'string', const: 'foo2', minLength: 2 },
|
||||
{ type: 'string', const: 'foo3', minLength: 3 }
|
||||
],
|
||||
additionalItems: {
|
||||
type: 'string', minLength: 3
|
||||
}
|
||||
})
|
||||
})
|
||||
129
node_modules/@fastify/merge-json-schemas/test/additional-properties.test.js
generated
vendored
Normal file
129
node_modules/@fastify/merge-json-schemas/test/additional-properties.test.js
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and additionalProperties=false keyword', () => {
|
||||
const schema1 = { type: 'object' }
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
additionalProperties: false
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
additionalProperties: false
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two schemas with boolean additionalProperties', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
additionalProperties: true
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
additionalProperties: false
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
additionalProperties: false
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge additionalProperties schema with false value', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
additionalProperties: false
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
additionalProperties: false
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge additionalProperties schema with true value', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
additionalProperties: true
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
type: 'string'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two additionalProperties schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
type: 'string', minLength: 1
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
type: 'string', minLength: 1
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two additionalProperties and properties schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: ['string', 'number'] }
|
||||
},
|
||||
additionalProperties: {
|
||||
type: 'string', minLength: 1
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
},
|
||||
additionalProperties: {
|
||||
type: 'string', minLength: 1
|
||||
}
|
||||
})
|
||||
})
|
||||
43
node_modules/@fastify/merge-json-schemas/test/all-of.test.js
generated
vendored
Normal file
43
node_modules/@fastify/merge-json-schemas/test/all-of.test.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and allOf keyword', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
allOf: [
|
||||
{ type: 'string', const: 'foo' }
|
||||
]
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
allOf: [
|
||||
{ type: 'string', const: 'foo' }
|
||||
]
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge schemas with allOfs schemas', () => {
|
||||
const schema1 = {
|
||||
allOf: [
|
||||
{ type: 'number', minimum: 0 }
|
||||
]
|
||||
}
|
||||
const schema2 = {
|
||||
allOf: [
|
||||
{ type: 'string', const: 'foo' }
|
||||
]
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
allOf: [
|
||||
{ type: 'number', minimum: 0 },
|
||||
{ type: 'string', const: 'foo' }
|
||||
]
|
||||
})
|
||||
})
|
||||
81
node_modules/@fastify/merge-json-schemas/test/any-of.test.js
generated
vendored
Normal file
81
node_modules/@fastify/merge-json-schemas/test/any-of.test.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and anyOf keyword', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
anyOf: [
|
||||
{ type: 'string', const: 'foo' }
|
||||
]
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
anyOf: [
|
||||
{ type: 'string', const: 'foo' }
|
||||
]
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two schemas with anyOfs schemas', () => {
|
||||
const schema1 = {
|
||||
anyOf: [
|
||||
{ type: 'string', enum: ['foo1', 'foo2', 'foo3'] },
|
||||
{ type: 'string', enum: ['foo3', 'foo4', 'foo5'] }
|
||||
]
|
||||
}
|
||||
const schema2 = {
|
||||
anyOf: [
|
||||
{ type: 'string', enum: ['foo2', 'foo3', 'foo4'] },
|
||||
{ type: 'string', enum: ['foo3', 'foo6', 'foo7'] }
|
||||
]
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
anyOf: [
|
||||
{ type: 'string', enum: ['foo2', 'foo3'] },
|
||||
{ type: 'string', enum: ['foo3'] },
|
||||
{ type: 'string', enum: ['foo3', 'foo4'] },
|
||||
{ type: 'string', enum: ['foo3'] }
|
||||
]
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge three schemas with anyOfs schemas', () => {
|
||||
const schema1 = {
|
||||
anyOf: [
|
||||
{ type: 'string', enum: ['foo1', 'foo2', 'foo3', 'foo4'] },
|
||||
{ type: 'string', enum: ['foo3', 'foo4', 'foo5', 'foo7'] }
|
||||
]
|
||||
}
|
||||
const schema2 = {
|
||||
anyOf: [
|
||||
{ type: 'string', enum: ['foo2', 'foo3', 'foo4', 'foo5'] },
|
||||
{ type: 'string', enum: ['foo3', 'foo6', 'foo7', 'foo8'] }
|
||||
]
|
||||
}
|
||||
|
||||
const schema3 = {
|
||||
anyOf: [
|
||||
{ type: 'string', enum: ['foo1', 'foo3', 'foo5', 'foo7'] },
|
||||
{ type: 'string', enum: ['foo2', 'foo4', 'foo6', 'foo8'] }
|
||||
]
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2, schema3], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
anyOf: [
|
||||
{ type: 'string', enum: ['foo3'] },
|
||||
{ type: 'string', enum: ['foo2', 'foo4'] },
|
||||
{ type: 'string', enum: ['foo3'] },
|
||||
{ type: 'string', enum: ['foo3', 'foo5'] },
|
||||
{ type: 'string', enum: ['foo4'] },
|
||||
{ type: 'string', enum: ['foo3', 'foo7'] }
|
||||
]
|
||||
})
|
||||
})
|
||||
58
node_modules/@fastify/merge-json-schemas/test/const.test.js
generated
vendored
Normal file
58
node_modules/@fastify/merge-json-schemas/test/const.test.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and string const keyword', () => {
|
||||
const schema1 = { type: 'string' }
|
||||
const schema2 = { type: 'string', const: 'foo' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', const: 'foo' })
|
||||
})
|
||||
|
||||
test('should merge equal string const keywords', () => {
|
||||
const schema1 = { type: 'string', const: 'foo' }
|
||||
const schema2 = { type: 'string', const: 'foo' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', const: 'foo' })
|
||||
})
|
||||
|
||||
test('should merge equal object const keywords', () => {
|
||||
const schema1 = { type: 'string', const: { foo: 'bar' } }
|
||||
const schema2 = { type: 'string', const: { foo: 'bar' } }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', const: { foo: 'bar' } })
|
||||
})
|
||||
|
||||
test('should throw an error if const string values are different', () => {
|
||||
const schema1 = { type: 'string', const: 'foo' }
|
||||
const schema2 = { type: 'string', const: 'bar' }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Failed to merge "const" keyword schemas.',
|
||||
schemas: ['foo', 'bar']
|
||||
})
|
||||
})
|
||||
|
||||
test('should throw an error if const object values are different', () => {
|
||||
const schema1 = { type: 'object', const: { foo: 'bar' } }
|
||||
const schema2 = { type: 'object', const: { foo: 'baz' } }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Failed to merge "const" keyword schemas.',
|
||||
schemas: [{ foo: 'bar' }, { foo: 'baz' }]
|
||||
})
|
||||
})
|
||||
55
node_modules/@fastify/merge-json-schemas/test/contains.test.js
generated
vendored
Normal file
55
node_modules/@fastify/merge-json-schemas/test/contains.test.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and contains keyword', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
contains: {
|
||||
type: 'integer',
|
||||
minimum: 5
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
contains: {
|
||||
type: 'integer',
|
||||
minimum: 5
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two contains keyword schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
contains: {
|
||||
type: 'integer',
|
||||
minimum: 5,
|
||||
maximum: 14
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
contains: {
|
||||
type: 'integer',
|
||||
minimum: 9,
|
||||
maximum: 10
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
contains: {
|
||||
type: 'integer',
|
||||
minimum: 9,
|
||||
maximum: 10
|
||||
}
|
||||
})
|
||||
})
|
||||
50
node_modules/@fastify/merge-json-schemas/test/custom-resolvers.test.js
generated
vendored
Normal file
50
node_modules/@fastify/merge-json-schemas/test/custom-resolvers.test.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should use a custom resolver instead of default one', () => {
|
||||
const schema1 = { type: 'string' }
|
||||
const schema2 = { type: 'number' }
|
||||
|
||||
const mergedSchema = mergeSchemas(
|
||||
[schema1, schema2],
|
||||
{
|
||||
resolvers: {
|
||||
type: (keyword, values, mergedSchema, schemas) => {
|
||||
assert.strictEqual(keyword, 'type')
|
||||
assert.deepStrictEqual(values, ['string', 'number'])
|
||||
assert.deepStrictEqual(schemas, [schema1, schema2])
|
||||
|
||||
mergedSchema[keyword] = 'custom-type'
|
||||
}
|
||||
},
|
||||
defaultResolver
|
||||
}
|
||||
)
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'custom-type' })
|
||||
})
|
||||
|
||||
test('should use a custom resolver for unknown keyword', () => {
|
||||
const schema1 = { customKeyword: 'string' }
|
||||
const schema2 = { customKeyword: 'number' }
|
||||
|
||||
const mergedSchema = mergeSchemas(
|
||||
[schema1, schema2],
|
||||
{
|
||||
resolvers: {
|
||||
customKeyword: (keyword, values, mergedSchema, schemas) => {
|
||||
assert.strictEqual(keyword, 'customKeyword')
|
||||
assert.deepStrictEqual(values, ['string', 'number'])
|
||||
assert.deepStrictEqual(schemas, [schema1, schema2])
|
||||
|
||||
mergedSchema[keyword] = 'custom-type'
|
||||
}
|
||||
},
|
||||
defaultResolver
|
||||
}
|
||||
)
|
||||
assert.deepStrictEqual(mergedSchema, { customKeyword: 'custom-type' })
|
||||
})
|
||||
111
node_modules/@fastify/merge-json-schemas/test/default-resolver.test.js
generated
vendored
Normal file
111
node_modules/@fastify/merge-json-schemas/test/default-resolver.test.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
|
||||
test('should merge an unknown keyword with an empty schema', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = { customKeyword: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2])
|
||||
assert.deepStrictEqual(mergedSchema, { customKeyword: 42 })
|
||||
})
|
||||
|
||||
test('should merge two equal unknown keywords', () => {
|
||||
const schema1 = { customKeyword: 42 }
|
||||
const schema2 = { customKeyword: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2])
|
||||
assert.deepStrictEqual(mergedSchema, { customKeyword: 42 })
|
||||
})
|
||||
|
||||
test('should merge two equal unknown object keywords', () => {
|
||||
const schema1 = { type: 'string', customKeyword: { foo: 'bar' } }
|
||||
const schema2 = { type: 'string', customKeyword: { foo: 'bar' } }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2])
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'string',
|
||||
customKeyword: { foo: 'bar' }
|
||||
})
|
||||
})
|
||||
|
||||
test('should use custom defaultResolver if passed', () => {
|
||||
const schema1 = { type: 'string', customKeyword: 42 }
|
||||
const schema2 = { type: 'string', customKeyword: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas(
|
||||
[schema1, schema2],
|
||||
{
|
||||
defaultResolver: (keyword, values, mergedSchema, schemas) => {
|
||||
assert.strictEqual(keyword, 'customKeyword')
|
||||
assert.deepStrictEqual(values, [42, 43])
|
||||
assert.deepStrictEqual(schemas, [schema1, schema2])
|
||||
|
||||
mergedSchema.customKeyword = 'custom-value-42'
|
||||
}
|
||||
}
|
||||
)
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'string',
|
||||
customKeyword: 'custom-value-42'
|
||||
})
|
||||
})
|
||||
|
||||
test('should trow an error when merging two different unknown keywords', () => {
|
||||
const schema1 = { customKeyword: 42 }
|
||||
const schema2 = { customKeyword: 43 }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2])
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Resolver for "customKeyword" keyword not found.',
|
||||
schemas: [42, 43]
|
||||
})
|
||||
})
|
||||
|
||||
test('should trow an error when merging two different unknown keywords with onConflict = throw', () => {
|
||||
const schema1 = { customKeyword: 42 }
|
||||
const schema2 = { customKeyword: 43 }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2], { onConflict: 'throw' })
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Resolver for "customKeyword" keyword not found.',
|
||||
schemas: [42, 43]
|
||||
})
|
||||
})
|
||||
|
||||
test('should skip the keyword schemas if onConflict = skip', () => {
|
||||
const schema1 = { customKeyword: 42 }
|
||||
const schema2 = { customKeyword: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { onConflict: 'skip' })
|
||||
assert.deepStrictEqual(mergedSchema, {})
|
||||
})
|
||||
|
||||
test('should pick first schema if onConflict = first', () => {
|
||||
const schema1 = { customKeyword: 42 }
|
||||
const schema2 = { customKeyword: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { onConflict: 'first' })
|
||||
assert.deepStrictEqual(mergedSchema, { customKeyword: 42 })
|
||||
})
|
||||
|
||||
test('should throw an error if pass wrong onConflict value', () => {
|
||||
const schema1 = { customKeyword: 42 }
|
||||
const schema2 = { customKeyword: 43 }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2], { onConflict: 'foo' })
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Invalid "onConflict" option: "foo".'
|
||||
})
|
||||
})
|
||||
50
node_modules/@fastify/merge-json-schemas/test/default.test.js
generated
vendored
Normal file
50
node_modules/@fastify/merge-json-schemas/test/default.test.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and string default keyword', () => {
|
||||
const schema1 = { type: 'string' }
|
||||
const schema2 = { type: 'string', default: 'foo' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', default: 'foo' })
|
||||
})
|
||||
|
||||
test('should merge equal string default keywords', () => {
|
||||
const schema1 = { type: 'string', default: 'foo' }
|
||||
const schema2 = { type: 'string', default: 'foo' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', default: 'foo' })
|
||||
})
|
||||
|
||||
test('should throw an error if default string values are different', () => {
|
||||
const schema1 = { type: 'string', default: 'foo' }
|
||||
const schema2 = { type: 'string', default: 'bar' }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Failed to merge "default" keyword schemas.',
|
||||
schemas: ['foo', 'bar']
|
||||
})
|
||||
})
|
||||
|
||||
test('should throw an error if default object values are different', () => {
|
||||
const schema1 = { type: 'object', default: { foo: 'bar' } }
|
||||
const schema2 = { type: 'object', default: { foo: 'baz' } }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Failed to merge "default" keyword schemas.',
|
||||
schemas: [{ foo: 'bar' }, { foo: 'baz' }]
|
||||
})
|
||||
})
|
||||
46
node_modules/@fastify/merge-json-schemas/test/definitions.test.js
generated
vendored
Normal file
46
node_modules/@fastify/merge-json-schemas/test/definitions.test.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and definitions keyword', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
definitions: {
|
||||
foo: { type: 'string', const: 'foo' }
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
definitions: {
|
||||
foo: { type: 'string', const: 'foo' }
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two definition schemas', () => {
|
||||
const schema1 = {
|
||||
definitions: {
|
||||
foo: { type: 'string', enum: ['foo', 'bar'] },
|
||||
bar: { type: 'string', enum: ['foo', 'bar'] }
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
definitions: {
|
||||
foo: { type: 'string', enum: ['foo'] },
|
||||
baz: { type: 'string' }
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
definitions: {
|
||||
foo: { type: 'string', enum: ['foo'] },
|
||||
bar: { type: 'string', enum: ['foo', 'bar'] },
|
||||
baz: { type: 'string' }
|
||||
}
|
||||
})
|
||||
})
|
||||
46
node_modules/@fastify/merge-json-schemas/test/defs.test.js
generated
vendored
Normal file
46
node_modules/@fastify/merge-json-schemas/test/defs.test.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and $defs keyword', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
$defs: {
|
||||
foo: { type: 'string', const: 'foo' }
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
$defs: {
|
||||
foo: { type: 'string', const: 'foo' }
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two definition schemas', () => {
|
||||
const schema1 = {
|
||||
$defs: {
|
||||
foo: { type: 'string', enum: ['foo', 'bar'] },
|
||||
bar: { type: 'string', enum: ['foo', 'bar'] }
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
$defs: {
|
||||
foo: { type: 'string', enum: ['foo'] },
|
||||
baz: { type: 'string' }
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
$defs: {
|
||||
foo: { type: 'string', enum: ['foo'] },
|
||||
bar: { type: 'string', enum: ['foo', 'bar'] },
|
||||
baz: { type: 'string' }
|
||||
}
|
||||
})
|
||||
})
|
||||
75
node_modules/@fastify/merge-json-schemas/test/dependencies.test.js
generated
vendored
Normal file
75
node_modules/@fastify/merge-json-schemas/test/dependencies.test.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and dependencies keyword', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' }
|
||||
},
|
||||
dependencies: {
|
||||
foo: ['bar']
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' }
|
||||
},
|
||||
dependencies: {
|
||||
foo: ['bar']
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two dependencies keyword schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
que: { type: 'string' }
|
||||
},
|
||||
dependencies: {
|
||||
foo: ['bar', 'que'],
|
||||
bar: ['que']
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
baz: { type: 'string' }
|
||||
},
|
||||
dependencies: {
|
||||
foo: ['baz'],
|
||||
baz: ['foo']
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
que: { type: 'string' },
|
||||
baz: { type: 'string' }
|
||||
},
|
||||
dependencies: {
|
||||
foo: ['bar', 'que', 'baz'],
|
||||
bar: ['que'],
|
||||
baz: ['foo']
|
||||
}
|
||||
})
|
||||
})
|
||||
75
node_modules/@fastify/merge-json-schemas/test/dependent-required.test.js
generated
vendored
Normal file
75
node_modules/@fastify/merge-json-schemas/test/dependent-required.test.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and dependentRequired keyword', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' }
|
||||
},
|
||||
dependentRequired: {
|
||||
foo: ['bar']
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' }
|
||||
},
|
||||
dependentRequired: {
|
||||
foo: ['bar']
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two dependentRequired keyword schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
que: { type: 'string' }
|
||||
},
|
||||
dependentRequired: {
|
||||
foo: ['bar', 'que'],
|
||||
bar: ['que']
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
baz: { type: 'string' }
|
||||
},
|
||||
dependentRequired: {
|
||||
foo: ['baz'],
|
||||
baz: ['foo']
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
que: { type: 'string' },
|
||||
baz: { type: 'string' }
|
||||
},
|
||||
dependentRequired: {
|
||||
foo: ['bar', 'que', 'baz'],
|
||||
bar: ['que'],
|
||||
baz: ['foo']
|
||||
}
|
||||
})
|
||||
})
|
||||
76
node_modules/@fastify/merge-json-schemas/test/dependent-schemas.test.js
generated
vendored
Normal file
76
node_modules/@fastify/merge-json-schemas/test/dependent-schemas.test.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and dependentRequired keyword', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' }
|
||||
},
|
||||
dependentSchemas: {
|
||||
foo: { required: ['bar'] }
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' }
|
||||
},
|
||||
dependentSchemas: {
|
||||
foo: { required: ['bar'] }
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two dependentRequired keyword schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
que: { type: 'string' }
|
||||
},
|
||||
dependentSchemas: {
|
||||
foo: { required: ['bar', 'que'] },
|
||||
bar: { required: ['que'] }
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
baz: { type: 'string' }
|
||||
},
|
||||
dependentSchemas: {
|
||||
foo: { required: ['baz'] },
|
||||
baz: { required: ['foo'] }
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'string' },
|
||||
que: { type: 'string' },
|
||||
baz: { type: 'string' }
|
||||
},
|
||||
dependentSchemas: {
|
||||
foo: { required: ['bar', 'que', 'baz'] },
|
||||
bar: { required: ['que'] },
|
||||
baz: { required: ['foo'] }
|
||||
}
|
||||
})
|
||||
})
|
||||
44
node_modules/@fastify/merge-json-schemas/test/enum.test.js
generated
vendored
Normal file
44
node_modules/@fastify/merge-json-schemas/test/enum.test.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and string enum values', () => {
|
||||
const schema1 = { type: 'string' }
|
||||
const schema2 = { type: 'string', enum: ['foo', 'bar'] }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', enum: ['foo', 'bar'] })
|
||||
})
|
||||
|
||||
test('should merge equal string enum values', () => {
|
||||
const schema1 = { type: 'string', enum: ['foo', 'bar'] }
|
||||
const schema2 = { type: 'string', enum: ['foo', 'bar'] }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', enum: ['foo', 'bar'] })
|
||||
})
|
||||
|
||||
test('should merge different string enum values', () => {
|
||||
const schema1 = { type: 'string', enum: ['foo', 'bar'] }
|
||||
const schema2 = { type: 'string', enum: ['foo', 'baz'] }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', enum: ['foo'] })
|
||||
})
|
||||
|
||||
test('should throw an error if can not merge enum values', () => {
|
||||
const schema1 = { type: 'string', enum: ['foo', 'bar'] }
|
||||
const schema2 = { type: 'string', enum: ['baz', 'qux'] }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Failed to merge "enum" keyword schemas.',
|
||||
schemas: [['foo', 'bar'], ['baz', 'qux']]
|
||||
})
|
||||
})
|
||||
30
node_modules/@fastify/merge-json-schemas/test/exclusive-maximum.test.js
generated
vendored
Normal file
30
node_modules/@fastify/merge-json-schemas/test/exclusive-maximum.test.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and exclusiveMaximum keyword', () => {
|
||||
const schema1 = { type: 'number' }
|
||||
const schema2 = { type: 'number', exclusiveMaximum: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'number', exclusiveMaximum: 42 })
|
||||
})
|
||||
|
||||
test('should merge equal exclusiveMaximum values', () => {
|
||||
const schema1 = { type: 'number', exclusiveMaximum: 42 }
|
||||
const schema2 = { type: 'number', exclusiveMaximum: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'number', exclusiveMaximum: 42 })
|
||||
})
|
||||
|
||||
test('should merge different exclusiveMaximum values', () => {
|
||||
const schema1 = { type: 'integer', exclusiveMaximum: 42 }
|
||||
const schema2 = { type: 'integer', exclusiveMaximum: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'integer', exclusiveMaximum: 42 })
|
||||
})
|
||||
30
node_modules/@fastify/merge-json-schemas/test/exclusive-minimum.test.js
generated
vendored
Normal file
30
node_modules/@fastify/merge-json-schemas/test/exclusive-minimum.test.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and exclusiveMinimum keyword', () => {
|
||||
const schema1 = { type: 'number' }
|
||||
const schema2 = { type: 'number', exclusiveMinimum: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'number', exclusiveMinimum: 42 })
|
||||
})
|
||||
|
||||
test('should merge equal exclusiveMinimum values', () => {
|
||||
const schema1 = { type: 'number', exclusiveMinimum: 42 }
|
||||
const schema2 = { type: 'number', exclusiveMinimum: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'number', exclusiveMinimum: 42 })
|
||||
})
|
||||
|
||||
test('should merge different exclusiveMinimum values', () => {
|
||||
const schema1 = { type: 'integer', exclusiveMinimum: 42 }
|
||||
const schema2 = { type: 'integer', exclusiveMinimum: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'integer', exclusiveMinimum: 43 })
|
||||
})
|
||||
36
node_modules/@fastify/merge-json-schemas/test/format.test.js
generated
vendored
Normal file
36
node_modules/@fastify/merge-json-schemas/test/format.test.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and string format keyword', () => {
|
||||
const schema1 = { type: 'string' }
|
||||
const schema2 = { type: 'string', format: 'date-time' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', format: 'date-time' })
|
||||
})
|
||||
|
||||
test('should merge equal string format keywords', () => {
|
||||
const schema1 = { type: 'string', format: 'date-time' }
|
||||
const schema2 = { type: 'string', format: 'date-time' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', format: 'date-time' })
|
||||
})
|
||||
|
||||
test('should throw an error if format keyword values are different', () => {
|
||||
const schema1 = { type: 'string', format: 'date-time' }
|
||||
const schema2 = { type: 'string', format: 'date' }
|
||||
|
||||
assert.throws(() => {
|
||||
mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
}, {
|
||||
name: 'JsonSchemaMergeError',
|
||||
code: 'JSON_SCHEMA_MERGE_ERROR',
|
||||
message: 'Failed to merge "format" keyword schemas.',
|
||||
schemas: ['date-time', 'date']
|
||||
})
|
||||
})
|
||||
22
node_modules/@fastify/merge-json-schemas/test/id.test.js
generated
vendored
Normal file
22
node_modules/@fastify/merge-json-schemas/test/id.test.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should skip $id keyword if they are equal', () => {
|
||||
const schema1 = { $id: 'foo', type: 'string' }
|
||||
const schema2 = { $id: 'foo', type: 'string' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string' })
|
||||
})
|
||||
|
||||
test('should skip $id keyword if they are different', () => {
|
||||
const schema1 = { $id: 'foo', type: 'string' }
|
||||
const schema2 = { $id: 'bar', type: 'string' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string' })
|
||||
})
|
||||
550
node_modules/@fastify/merge-json-schemas/test/if-then-else.test.js
generated
vendored
Normal file
550
node_modules/@fastify/merge-json-schemas/test/if-then-else.test.js
generated
vendored
Normal file
@@ -0,0 +1,550 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and if/then/else keywords', () => {
|
||||
const schema1 = {}
|
||||
const schema2 = {
|
||||
if: {
|
||||
type: 'string',
|
||||
const: 'foo'
|
||||
},
|
||||
then: {
|
||||
type: 'string',
|
||||
const: 'bar'
|
||||
},
|
||||
else: {
|
||||
type: 'string',
|
||||
const: 'baz'
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
if: {
|
||||
type: 'string',
|
||||
const: 'foo'
|
||||
},
|
||||
then: {
|
||||
type: 'string',
|
||||
const: 'bar'
|
||||
},
|
||||
else: {
|
||||
type: 'string',
|
||||
const: 'baz'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge if/then/else schema with an empty schema', () => {
|
||||
const schema1 = {
|
||||
if: {
|
||||
type: 'string',
|
||||
const: 'foo'
|
||||
},
|
||||
then: {
|
||||
type: 'string',
|
||||
const: 'bar'
|
||||
},
|
||||
else: {
|
||||
type: 'string',
|
||||
const: 'baz'
|
||||
}
|
||||
}
|
||||
const schema2 = {}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
if: {
|
||||
type: 'string',
|
||||
const: 'foo'
|
||||
},
|
||||
then: {
|
||||
type: 'string',
|
||||
const: 'bar'
|
||||
},
|
||||
else: {
|
||||
type: 'string',
|
||||
const: 'baz'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two if/then/else schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz1: { type: 'string', const: 'baz1' }
|
||||
}
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz2: { type: 'string', const: 'baz2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz2: { type: 'string', const: 'baz2' }
|
||||
}
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz1: { type: 'string', const: 'baz1' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz2: { type: 'string', const: 'baz2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge three if/then/else schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz1: { type: 'string', const: 'baz1' }
|
||||
}
|
||||
}
|
||||
}
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz2: { type: 'string', const: 'baz2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
const schema3 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo3: { type: 'string', const: 'foo3' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar3: { type: 'string', const: 'bar3' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz3: { type: 'string', const: 'baz3' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2, schema3], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo3: { type: 'string', const: 'foo3' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar3: { type: 'string', const: 'bar3' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz3: { type: 'string', const: 'baz3' }
|
||||
}
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz2: { type: 'string', const: 'baz2' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo3: { type: 'string', const: 'foo3' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar3: { type: 'string', const: 'bar3' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz3: { type: 'string', const: 'baz3' }
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz1: { type: 'string', const: 'baz1' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo3: { type: 'string', const: 'foo3' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar3: { type: 'string', const: 'bar3' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz3: { type: 'string', const: 'baz3' }
|
||||
}
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz2: { type: 'string', const: 'baz2' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo3: { type: 'string', const: 'foo3' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar3: { type: 'string', const: 'bar3' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
baz3: { type: 'string', const: 'baz3' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should two if/then keyword schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should two if/else keyword schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should two if/then and if/else keyword schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should two if/else and if/then keyword schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'object',
|
||||
if: {
|
||||
properties: {
|
||||
foo1: { type: 'string', const: 'foo1' }
|
||||
}
|
||||
},
|
||||
else: {
|
||||
properties: {
|
||||
bar1: { type: 'string', const: 'bar1' }
|
||||
},
|
||||
if: {
|
||||
properties: {
|
||||
foo2: { type: 'string', const: 'foo2' }
|
||||
}
|
||||
},
|
||||
then: {
|
||||
properties: {
|
||||
bar2: { type: 'string', const: 'bar2' }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
152
node_modules/@fastify/merge-json-schemas/test/items.test.js
generated
vendored
Normal file
152
node_modules/@fastify/merge-json-schemas/test/items.test.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and items keyword', () => {
|
||||
const schema1 = { type: 'array' }
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two equal item schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' }
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two different sets of item schemas', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'number' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
baz: { type: 'boolean' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
bar: { type: 'number' },
|
||||
baz: { type: 'boolean' }
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('should merge two different sets of item schemas with additionalItems', () => {
|
||||
const schema1 = {
|
||||
type: 'array',
|
||||
items: [
|
||||
{
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string', const: 'foo' }
|
||||
}
|
||||
}
|
||||
],
|
||||
additionalItems: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
baz: { type: 'string', const: 'baz' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const schema2 = {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
baz: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {
|
||||
type: 'array',
|
||||
items: [
|
||||
{
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string', const: 'foo' },
|
||||
baz: { type: 'string' }
|
||||
}
|
||||
}
|
||||
],
|
||||
additionalItems: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
foo: { type: 'string' },
|
||||
baz: { type: 'string', const: 'baz' }
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
30
node_modules/@fastify/merge-json-schemas/test/max-items.test.js
generated
vendored
Normal file
30
node_modules/@fastify/merge-json-schemas/test/max-items.test.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and maxItems keyword', () => {
|
||||
const schema1 = { type: 'array' }
|
||||
const schema2 = { type: 'array', maxItems: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'array', maxItems: 42 })
|
||||
})
|
||||
|
||||
test('should merge equal maxItems values', () => {
|
||||
const schema1 = { type: 'array', maxItems: 42 }
|
||||
const schema2 = { type: 'array', maxItems: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'array', maxItems: 42 })
|
||||
})
|
||||
|
||||
test('should merge different maxItems values', () => {
|
||||
const schema1 = { type: 'array', maxItems: 42 }
|
||||
const schema2 = { type: 'array', maxItems: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'array', maxItems: 42 })
|
||||
})
|
||||
30
node_modules/@fastify/merge-json-schemas/test/max-length.test.js
generated
vendored
Normal file
30
node_modules/@fastify/merge-json-schemas/test/max-length.test.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and maxLength keyword', () => {
|
||||
const schema1 = { type: 'string' }
|
||||
const schema2 = { type: 'string', maxLength: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', maxLength: 42 })
|
||||
})
|
||||
|
||||
test('should merge equal maxLength values', () => {
|
||||
const schema1 = { type: 'string', maxLength: 42 }
|
||||
const schema2 = { type: 'string', maxLength: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', maxLength: 42 })
|
||||
})
|
||||
|
||||
test('should merge different maxLength values', () => {
|
||||
const schema1 = { type: 'string', maxLength: 42 }
|
||||
const schema2 = { type: 'string', maxLength: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'string', maxLength: 42 })
|
||||
})
|
||||
30
node_modules/@fastify/merge-json-schemas/test/max-properties.test.js
generated
vendored
Normal file
30
node_modules/@fastify/merge-json-schemas/test/max-properties.test.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and maxProperties keyword', () => {
|
||||
const schema1 = { type: 'object' }
|
||||
const schema2 = { type: 'object', maxProperties: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'object', maxProperties: 42 })
|
||||
})
|
||||
|
||||
test('should merge equal maxProperties values', () => {
|
||||
const schema1 = { type: 'object', maxProperties: 42 }
|
||||
const schema2 = { type: 'object', maxProperties: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'object', maxProperties: 42 })
|
||||
})
|
||||
|
||||
test('should merge different maxProperties values', () => {
|
||||
const schema1 = { type: 'object', maxProperties: 42 }
|
||||
const schema2 = { type: 'object', maxProperties: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'object', maxProperties: 42 })
|
||||
})
|
||||
30
node_modules/@fastify/merge-json-schemas/test/maximum.test.js
generated
vendored
Normal file
30
node_modules/@fastify/merge-json-schemas/test/maximum.test.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should merge empty schema and maximum keyword', () => {
|
||||
const schema1 = { type: 'number' }
|
||||
const schema2 = { type: 'number', maximum: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'number', maximum: 42 })
|
||||
})
|
||||
|
||||
test('should merge equal maximum values', () => {
|
||||
const schema1 = { type: 'number', maximum: 42 }
|
||||
const schema2 = { type: 'number', maximum: 42 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'number', maximum: 42 })
|
||||
})
|
||||
|
||||
test('should merge different maximum values', () => {
|
||||
const schema1 = { type: 'integer', maximum: 42 }
|
||||
const schema2 = { type: 'integer', maximum: 43 }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, { type: 'integer', maximum: 42 })
|
||||
})
|
||||
29
node_modules/@fastify/merge-json-schemas/test/merge-schema.test.js
generated
vendored
Normal file
29
node_modules/@fastify/merge-json-schemas/test/merge-schema.test.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert/strict')
|
||||
const { test } = require('node:test')
|
||||
const { mergeSchemas } = require('../index')
|
||||
const { defaultResolver } = require('./utils')
|
||||
|
||||
test('should return an empty schema if passing an empty array', () => {
|
||||
const mergedSchema = mergeSchemas([], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, {})
|
||||
})
|
||||
|
||||
test('should return true if passing all true values', () => {
|
||||
const mergedSchema = mergeSchemas([true, true, true], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, true)
|
||||
})
|
||||
|
||||
test('should return true if passing all false values', () => {
|
||||
const mergedSchema = mergeSchemas([false, false, false], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, false)
|
||||
})
|
||||
|
||||
test('should return true if passing at least one false schema', () => {
|
||||
const schema1 = { type: 'string' }
|
||||
const schema2 = { type: 'number' }
|
||||
|
||||
const mergedSchema = mergeSchemas([schema1, schema2, false], { defaultResolver })
|
||||
assert.deepStrictEqual(mergedSchema, false)
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user