fatsify核心功能示例测试!!!
This commit is contained in:
285
node_modules/thread-stream/test/base.test.js
generated
vendored
Normal file
285
node_modules/thread-stream/test/base.test.js
generated
vendored
Normal file
@@ -0,0 +1,285 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const { readFile } = require('fs')
|
||||
const { file } = require('./helper')
|
||||
const ThreadStream = require('..')
|
||||
const { MessageChannel } = require('worker_threads')
|
||||
const { once } = require('events')
|
||||
|
||||
test('base sync=true', function (t) {
|
||||
t.plan(15)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
t.same(stream.writableObjectMode, false)
|
||||
|
||||
t.same(stream.writableFinished, false)
|
||||
stream.on('finish', () => {
|
||||
t.same(stream.writableFinished, true)
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, 'hello world\nsomething else\n')
|
||||
})
|
||||
})
|
||||
|
||||
t.same(stream.closed, false)
|
||||
stream.on('close', () => {
|
||||
t.same(stream.closed, true)
|
||||
t.notOk(stream.writable)
|
||||
t.pass('close emitted')
|
||||
})
|
||||
|
||||
t.same(stream.writableNeedDrain, false)
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
t.ok(stream.writable)
|
||||
|
||||
t.same(stream.writableEnded, false)
|
||||
stream.end()
|
||||
t.same(stream.writableEnded, true)
|
||||
})
|
||||
|
||||
test('overflow sync=true', function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
bufferSize: 128,
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
let count = 0
|
||||
|
||||
// Write 10 chars, 20 times
|
||||
function write () {
|
||||
if (count++ === 20) {
|
||||
stream.end()
|
||||
return
|
||||
}
|
||||
|
||||
stream.write('aaaaaaaaaa')
|
||||
// do not wait for drain event
|
||||
setImmediate(write)
|
||||
}
|
||||
|
||||
write()
|
||||
|
||||
stream.on('finish', () => {
|
||||
t.pass('finish emitted')
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data.length, 200)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('overflow sync=false', function (t) {
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
bufferSize: 128,
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: false
|
||||
})
|
||||
|
||||
let count = 0
|
||||
|
||||
t.same(stream.writableNeedDrain, false)
|
||||
|
||||
// Write 10 chars, 20 times
|
||||
function write () {
|
||||
if (count++ === 20) {
|
||||
t.pass('end sent')
|
||||
stream.end()
|
||||
return
|
||||
}
|
||||
|
||||
if (!stream.write('aaaaaaaaaa')) {
|
||||
t.same(stream.writableNeedDrain, true)
|
||||
}
|
||||
// do not wait for drain event
|
||||
setImmediate(write)
|
||||
}
|
||||
|
||||
write()
|
||||
|
||||
stream.on('drain', () => {
|
||||
t.same(stream.writableNeedDrain, false)
|
||||
t.pass('drain')
|
||||
})
|
||||
|
||||
stream.on('finish', () => {
|
||||
t.pass('finish emitted')
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data.length, 200)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('over the bufferSize at startup', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
bufferSize: 10,
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('finish', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, 'hello world\nsomething else\n')
|
||||
})
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
t.pass('close emitted')
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello'))
|
||||
t.ok(stream.write(' world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
|
||||
stream.end()
|
||||
})
|
||||
|
||||
test('over the bufferSize at startup (async)', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
bufferSize: 10,
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: false
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello'))
|
||||
t.notOk(stream.write(' world\n'))
|
||||
t.notOk(stream.write('something else\n'))
|
||||
|
||||
stream.end()
|
||||
|
||||
stream.on('finish', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, 'hello world\nsomething else\n')
|
||||
})
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
t.pass('close emitted')
|
||||
})
|
||||
})
|
||||
|
||||
test('flushSync sync=false', function (t) {
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
bufferSize: 128,
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: false
|
||||
})
|
||||
|
||||
stream.on('drain', () => {
|
||||
t.pass('drain')
|
||||
stream.end()
|
||||
})
|
||||
|
||||
stream.on('finish', () => {
|
||||
t.pass('finish emitted')
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data.length, 200)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
for (let count = 0; count < 20; count++) {
|
||||
stream.write('aaaaaaaaaa')
|
||||
}
|
||||
stream.flushSync()
|
||||
})
|
||||
|
||||
test('pass down MessagePorts', async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const { port1, port2 } = new MessageChannel()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'port.js'),
|
||||
workerData: { port: port1 },
|
||||
workerOpts: {
|
||||
transferList: [port1]
|
||||
},
|
||||
sync: false
|
||||
})
|
||||
t.teardown(() => {
|
||||
stream.end()
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
|
||||
const [strings] = await once(port2, 'message')
|
||||
|
||||
t.equal(strings, 'hello world\nsomething else\n')
|
||||
})
|
||||
|
||||
test('destroy does not error', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: false
|
||||
})
|
||||
|
||||
stream.on('ready', () => {
|
||||
t.pass('ready emitted')
|
||||
stream.worker.terminate()
|
||||
})
|
||||
|
||||
stream.on('error', (err) => {
|
||||
t.equal(err.message, 'the worker thread exited')
|
||||
stream.flush((err) => {
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
})
|
||||
t.doesNotThrow(() => stream.flushSync())
|
||||
t.doesNotThrow(() => stream.end())
|
||||
})
|
||||
})
|
||||
|
||||
test('syntax error', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'syntax-error.mjs')
|
||||
})
|
||||
|
||||
stream.on('error', (err) => {
|
||||
t.equal(err.message, 'Unexpected end of input')
|
||||
})
|
||||
})
|
||||
38
node_modules/thread-stream/test/bench.test.js
generated
vendored
Normal file
38
node_modules/thread-stream/test/bench.test.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const ThreadStream = require('..')
|
||||
const { file } = require('./helper')
|
||||
|
||||
const MAX = 1000
|
||||
|
||||
let str = ''
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
str += 'hello'
|
||||
}
|
||||
|
||||
test('base', function (t) {
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest }
|
||||
})
|
||||
let runs = 0
|
||||
function benchThreadStream () {
|
||||
if (++runs === 1000) {
|
||||
stream.end()
|
||||
return
|
||||
}
|
||||
|
||||
for (let i = 0; i < MAX; i++) {
|
||||
stream.write(str)
|
||||
}
|
||||
setImmediate(benchThreadStream)
|
||||
}
|
||||
benchThreadStream()
|
||||
stream.on('finish', function () {
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
60
node_modules/thread-stream/test/bundlers.test.js
generated
vendored
Normal file
60
node_modules/thread-stream/test/bundlers.test.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const { file } = require('./helper')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
test('bundlers support with .js file', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
globalThis.__bundlerPathsOverrides = {
|
||||
'thread-stream-worker': join(__dirname, 'custom-worker.js')
|
||||
}
|
||||
|
||||
const dest = file()
|
||||
|
||||
process.on('uncaughtException', error => {
|
||||
console.log(error)
|
||||
})
|
||||
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.worker.removeAllListeners('message')
|
||||
stream.worker.once('message', message => {
|
||||
t.equal(message.code, 'CUSTOM-WORKER-CALLED')
|
||||
})
|
||||
|
||||
stream.end()
|
||||
})
|
||||
|
||||
test('bundlers support with .mjs file', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
globalThis.__bundlerPathsOverrides = {
|
||||
'thread-stream-worker': join(__dirname, 'custom-worker.js')
|
||||
}
|
||||
|
||||
const dest = file()
|
||||
|
||||
process.on('uncaughtException', error => {
|
||||
console.log(error)
|
||||
})
|
||||
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.mjs'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.worker.removeAllListeners('message')
|
||||
stream.worker.once('message', message => {
|
||||
t.equal(message.code, 'CUSTOM-WORKER-CALLED')
|
||||
})
|
||||
|
||||
stream.end()
|
||||
})
|
||||
37
node_modules/thread-stream/test/close-on-gc.js
generated
vendored
Normal file
37
node_modules/thread-stream/test/close-on-gc.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
'use strict'
|
||||
|
||||
const { join } = require('path')
|
||||
const ThreadStream = require('..')
|
||||
const assert = require('assert')
|
||||
|
||||
let worker = null
|
||||
|
||||
function setup () {
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest: process.argv[2] },
|
||||
sync: true
|
||||
})
|
||||
|
||||
worker = stream.worker
|
||||
|
||||
stream.write('hello')
|
||||
stream.write(' ')
|
||||
stream.write('world\n')
|
||||
stream.flushSync()
|
||||
stream.unref()
|
||||
|
||||
// the stream object goes out of scope here
|
||||
setImmediate(gc) // eslint-disable-line
|
||||
}
|
||||
|
||||
setup()
|
||||
|
||||
let exitEmitted = false
|
||||
worker.on('exit', function () {
|
||||
exitEmitted = true
|
||||
})
|
||||
|
||||
process.on('exit', function () {
|
||||
assert.strictEqual(exitEmitted, true)
|
||||
})
|
||||
80
node_modules/thread-stream/test/commonjs-fallback.test.js
generated
vendored
Normal file
80
node_modules/thread-stream/test/commonjs-fallback.test.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const { MessageChannel } = require('worker_threads')
|
||||
const { once } = require('events')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
const isYarnPnp = process.versions.pnp !== undefined
|
||||
|
||||
test('yarn module resolution', { skip: !isYarnPnp }, t => {
|
||||
t.plan(6)
|
||||
|
||||
const modulePath = require.resolve('pino-elasticsearch')
|
||||
t.match(modulePath, /.*\.zip.*/)
|
||||
|
||||
const stream = new ThreadStream({
|
||||
filename: modulePath,
|
||||
workerData: { node: null },
|
||||
sync: true
|
||||
})
|
||||
|
||||
t.same(stream.writableErrored, null)
|
||||
stream.on('error', (err) => {
|
||||
t.same(stream.writableErrored, err)
|
||||
t.pass('error emitted')
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.writable)
|
||||
stream.end()
|
||||
})
|
||||
|
||||
test('yarn module resolution for directories with special characters', { skip: !isYarnPnp }, async t => {
|
||||
t.plan(3)
|
||||
|
||||
const { port1, port2 } = new MessageChannel()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'dir with spaces', 'test-package.zip', 'worker.js'),
|
||||
workerData: { port: port1 },
|
||||
workerOpts: {
|
||||
transferList: [port1]
|
||||
},
|
||||
sync: false
|
||||
})
|
||||
t.teardown(() => {
|
||||
stream.end()
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
|
||||
const [strings] = await once(port2, 'message')
|
||||
|
||||
t.equal(strings, 'hello world\nsomething else\n')
|
||||
})
|
||||
|
||||
test('yarn module resolution for typescript commonjs modules', { skip: !isYarnPnp }, async t => {
|
||||
t.plan(3)
|
||||
|
||||
const { port1, port2 } = new MessageChannel()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'ts-commonjs-default-export.zip', 'worker.js'),
|
||||
workerData: { port: port1 },
|
||||
workerOpts: {
|
||||
transferList: [port1]
|
||||
},
|
||||
sync: false
|
||||
})
|
||||
t.teardown(() => {
|
||||
stream.end()
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
|
||||
const [strings] = await once(port2, 'message')
|
||||
|
||||
t.equal(strings, 'hello world\nsomething else\n')
|
||||
})
|
||||
21
node_modules/thread-stream/test/context.test.js
generated
vendored
Normal file
21
node_modules/thread-stream/test/context.test.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const ThreadStream = require('..')
|
||||
const { version } = require('../package.json')
|
||||
require('why-is-node-running')
|
||||
|
||||
test('get context', (t) => {
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'get-context.js'),
|
||||
workerData: {},
|
||||
sync: true
|
||||
})
|
||||
t.on('end', () => stream.end())
|
||||
stream.on('context', (ctx) => {
|
||||
t.same(ctx.threadStreamVersion, version)
|
||||
t.end()
|
||||
})
|
||||
stream.write('hello')
|
||||
})
|
||||
16
node_modules/thread-stream/test/create-and-exit.js
generated
vendored
Normal file
16
node_modules/thread-stream/test/create-and-exit.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
const { join } = require('path')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest: process.argv[2] },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.write('hello')
|
||||
stream.write(' ')
|
||||
stream.write('world\n')
|
||||
stream.flushSync()
|
||||
stream.unref()
|
||||
9
node_modules/thread-stream/test/custom-worker.js
generated
vendored
Normal file
9
node_modules/thread-stream/test/custom-worker.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
const { parentPort } = require('worker_threads')
|
||||
|
||||
parentPort.postMessage({
|
||||
code: 'CUSTOM-WORKER-CALLED'
|
||||
})
|
||||
|
||||
require('../lib/worker')
|
||||
BIN
node_modules/thread-stream/test/dir with spaces/test-package.zip
generated
vendored
Normal file
BIN
node_modules/thread-stream/test/dir with spaces/test-package.zip
generated
vendored
Normal file
Binary file not shown.
22
node_modules/thread-stream/test/emit-event.js
generated
vendored
Normal file
22
node_modules/thread-stream/test/emit-event.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('stream')
|
||||
const parentPort = require('worker_threads').parentPort
|
||||
|
||||
async function run () {
|
||||
return new Writable({
|
||||
autoDestroy: true,
|
||||
write (chunk, enc, cb) {
|
||||
if (parentPort) {
|
||||
parentPort.postMessage({
|
||||
code: 'EVENT',
|
||||
name: 'socketError',
|
||||
args: ['list', 'of', 'args', 123, new Error('unable to write data to the TCP socket')]
|
||||
})
|
||||
}
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
61
node_modules/thread-stream/test/end.test.js
generated
vendored
Normal file
61
node_modules/thread-stream/test/end.test.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const { readFile } = require('fs')
|
||||
const { file } = require('./helper')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
test('destroy support', function (t) {
|
||||
t.plan(7)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file-on-destroy.js'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
t.notOk(stream.writable)
|
||||
t.pass('close emitted')
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
t.ok(stream.writable)
|
||||
|
||||
stream.end()
|
||||
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, 'hello world\nsomething else\n')
|
||||
})
|
||||
})
|
||||
|
||||
test('synchronous _final support', function (t) {
|
||||
t.plan(7)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file-on-final.js'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
t.notOk(stream.writable)
|
||||
t.pass('close emitted')
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
t.ok(stream.writable)
|
||||
|
||||
stream.end()
|
||||
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, 'hello world\nsomething else\n')
|
||||
})
|
||||
})
|
||||
14
node_modules/thread-stream/test/error.js
generated
vendored
Normal file
14
node_modules/thread-stream/test/error.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('stream')
|
||||
|
||||
async function run (opts) {
|
||||
const stream = new Writable({
|
||||
write (chunk, enc, cb) {
|
||||
cb(new Error('kaboom'))
|
||||
}
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
47
node_modules/thread-stream/test/esm.test.mjs
generated
vendored
Normal file
47
node_modules/thread-stream/test/esm.test.mjs
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import { test } from 'tap'
|
||||
import { readFile } from 'fs'
|
||||
import ThreadStream from '../index.js'
|
||||
import { join } from 'desm'
|
||||
import { pathToFileURL } from 'url'
|
||||
import { file } from './helper.js'
|
||||
|
||||
function basic (text, filename) {
|
||||
test(text, function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename,
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('finish', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, 'hello world\nsomething else\n')
|
||||
})
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
t.pass('close emitted')
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
|
||||
stream.end()
|
||||
})
|
||||
}
|
||||
|
||||
basic('esm with path', join(import.meta.url, 'to-file.mjs'))
|
||||
basic('esm with file URL', pathToFileURL(join(import.meta.url, 'to-file.mjs')).href)
|
||||
|
||||
basic('(ts -> es6) esm with path', join(import.meta.url, 'ts', 'to-file.es6.mjs'))
|
||||
basic('(ts -> es6) esm with file URL', pathToFileURL(join(import.meta.url, 'ts', 'to-file.es6.mjs')).href)
|
||||
|
||||
basic('(ts -> es2017) esm with path', join(import.meta.url, 'ts', 'to-file.es2017.mjs'))
|
||||
basic('(ts -> es2017) esm with file URL', pathToFileURL(join(import.meta.url, 'ts', 'to-file.es2017.mjs')).href)
|
||||
|
||||
basic('(ts -> esnext) esm with path', join(import.meta.url, 'ts', 'to-file.esnext.mjs'))
|
||||
basic('(ts -> esnext) esm with file URL', pathToFileURL(join(import.meta.url, 'ts', 'to-file.esnext.mjs')).href)
|
||||
23
node_modules/thread-stream/test/event.test.js
generated
vendored
Normal file
23
node_modules/thread-stream/test/event.test.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
test('event propagate', t => {
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'emit-event.js'),
|
||||
workerData: {},
|
||||
sync: true
|
||||
})
|
||||
t.on('end', () => stream.end())
|
||||
stream.on('socketError', function (a, b, c, n, error) {
|
||||
t.same(a, 'list')
|
||||
t.same(b, 'of')
|
||||
t.same(c, 'args')
|
||||
t.same(n, 123)
|
||||
t.same(error, new Error('unable to write data to the TCP socket'))
|
||||
t.end()
|
||||
})
|
||||
stream.write('hello')
|
||||
})
|
||||
14
node_modules/thread-stream/test/exit.js
generated
vendored
Normal file
14
node_modules/thread-stream/test/exit.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('stream')
|
||||
|
||||
async function run (opts) {
|
||||
const stream = new Writable({
|
||||
write (chunk, enc, cb) {
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
22
node_modules/thread-stream/test/get-context.js
generated
vendored
Normal file
22
node_modules/thread-stream/test/get-context.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('stream')
|
||||
const parentPort = require('worker_threads').parentPort
|
||||
|
||||
async function run (opts) {
|
||||
return new Writable({
|
||||
autoDestroy: true,
|
||||
write (chunk, enc, cb) {
|
||||
if (parentPort) {
|
||||
parentPort.postMessage({
|
||||
code: 'EVENT',
|
||||
name: 'context',
|
||||
args: opts.$context
|
||||
})
|
||||
}
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
1
node_modules/thread-stream/test/helper.d.ts
generated
vendored
Normal file
1
node_modules/thread-stream/test/helper.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare function file(): string
|
||||
35
node_modules/thread-stream/test/helper.js
generated
vendored
Normal file
35
node_modules/thread-stream/test/helper.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
const { join } = require('path')
|
||||
const { tmpdir } = require('os')
|
||||
const { unlinkSync } = require('fs')
|
||||
const t = require('tap')
|
||||
|
||||
const files = []
|
||||
let count = 0
|
||||
|
||||
function file () {
|
||||
const file = join(tmpdir(), `thread-stream-${process.pid}-${count++}`)
|
||||
files.push(file)
|
||||
return file
|
||||
}
|
||||
|
||||
process.on('beforeExit', () => {
|
||||
t.comment('unlink files')
|
||||
for (const file of files) {
|
||||
try {
|
||||
t.comment(`unliking ${file}`)
|
||||
unlinkSync(file)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
}
|
||||
t.comment('unlink completed')
|
||||
})
|
||||
|
||||
module.exports.file = file
|
||||
|
||||
if (process.env.SKIP_PROCESS_EXIT_CHECK !== 'true') {
|
||||
const why = require('why-is-node-running')
|
||||
setInterval(why, 10000).unref()
|
||||
}
|
||||
11
node_modules/thread-stream/test/indexes.test.js
generated
vendored
Normal file
11
node_modules/thread-stream/test/indexes.test.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const indexes = require('../lib/indexes')
|
||||
|
||||
for (const index of Object.keys(indexes)) {
|
||||
test(`${index} is lock free`, function (t) {
|
||||
t.equal(Atomics.isLockFree(indexes[index]), true)
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
74
node_modules/thread-stream/test/multibyte-chars.test.mjs
generated
vendored
Normal file
74
node_modules/thread-stream/test/multibyte-chars.test.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
import { test } from 'tap'
|
||||
import { readFile } from 'fs'
|
||||
import ThreadStream from '../index.js'
|
||||
import { join } from 'desm'
|
||||
import { file } from './helper.js'
|
||||
|
||||
test('break up utf8 multibyte (sync)', (t) => {
|
||||
t.plan(2)
|
||||
const longString = '\u03A3'.repeat(16)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
bufferSize: 15, // this must be odd
|
||||
filename: join(import.meta.url, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('finish', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, longString)
|
||||
})
|
||||
})
|
||||
|
||||
stream.write(longString)
|
||||
stream.end()
|
||||
})
|
||||
|
||||
test('break up utf8 multibyte (async)', (t) => {
|
||||
t.plan(2)
|
||||
const longString = '\u03A3'.repeat(16)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
bufferSize: 15, // this must be odd
|
||||
filename: join(import.meta.url, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: false
|
||||
})
|
||||
|
||||
stream.on('finish', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, longString)
|
||||
})
|
||||
})
|
||||
|
||||
stream.write(longString)
|
||||
stream.end()
|
||||
})
|
||||
|
||||
test('break up utf8 multibyte several times bigger than write buffer', (t) => {
|
||||
t.plan(2)
|
||||
const longString = '\u03A3'.repeat(32)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
bufferSize: 15, // this must be odd
|
||||
filename: join(import.meta.url, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: false
|
||||
})
|
||||
|
||||
stream.on('finish', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, longString)
|
||||
})
|
||||
})
|
||||
|
||||
stream.write(longString)
|
||||
stream.end()
|
||||
})
|
||||
57
node_modules/thread-stream/test/never-drain.test.js
generated
vendored
Normal file
57
node_modules/thread-stream/test/never-drain.test.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
const { test } = require('tap')
|
||||
const ThreadStream = require('../index')
|
||||
const { join } = require('path')
|
||||
|
||||
function retryUntilTimeout (fn, timeout) {
|
||||
const start = Date.now()
|
||||
return new Promise((resolve, reject) => {
|
||||
async function run () {
|
||||
if (fn()) {
|
||||
resolve()
|
||||
return
|
||||
}
|
||||
|
||||
if (Date.now() - start >= timeout) {
|
||||
reject(new Error('timeout'))
|
||||
return
|
||||
}
|
||||
setTimeout(run, 10)
|
||||
}
|
||||
|
||||
run()
|
||||
})
|
||||
}
|
||||
|
||||
const isNode18 = process.version.indexOf('v18') === 0
|
||||
|
||||
test('emit warning when the worker gracefully exit without the stream ended', { skip: !isNode18 }, async function (t) {
|
||||
const expectedWarning = 'ThreadStream: process exited before destination stream was drained. this may indicate that the destination stream try to write to a another missing stream'
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-next.js')
|
||||
})
|
||||
stream.unref()
|
||||
|
||||
let streamWarning
|
||||
function saveWarning (e) {
|
||||
if (e.message === expectedWarning) {
|
||||
streamWarning = e
|
||||
}
|
||||
}
|
||||
process.on('warning', saveWarning)
|
||||
|
||||
const data = 'hello'.repeat(10)
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
if (streamWarning?.message === expectedWarning) {
|
||||
break
|
||||
}
|
||||
stream.write(data)
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 1)
|
||||
})
|
||||
}
|
||||
|
||||
process.off('warning', saveWarning)
|
||||
t.equal(streamWarning?.message, expectedWarning)
|
||||
|
||||
await retryUntilTimeout(() => stream.worker.exited === true, 3000)
|
||||
})
|
||||
18
node_modules/thread-stream/test/on-message.js
generated
vendored
Normal file
18
node_modules/thread-stream/test/on-message.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
'use strict'
|
||||
|
||||
const { parentPort } = require('worker_threads')
|
||||
const { Writable } = require('stream')
|
||||
|
||||
function run () {
|
||||
parentPort.once('message', function ({ text, takeThisPortPlease }) {
|
||||
takeThisPortPlease.postMessage(`received: ${text}`)
|
||||
})
|
||||
return new Writable({
|
||||
autoDestroy: true,
|
||||
write (chunk, enc, cb) {
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
37
node_modules/thread-stream/test/pkg/index.js
generated
vendored
Normal file
37
node_modules/thread-stream/test/pkg/index.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* This file is packaged using pkg in order to test if worker.js works in that context
|
||||
*/
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const { file } = require('../helper')
|
||||
const ThreadStream = require('../..')
|
||||
|
||||
test('bundlers support with .js file', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
globalThis.__bundlerPathsOverrides = {
|
||||
'thread-stream-worker': join(__dirname, '..', 'custom-worker.js')
|
||||
}
|
||||
|
||||
const dest = file()
|
||||
|
||||
process.on('uncaughtException', (error) => {
|
||||
console.log(error)
|
||||
})
|
||||
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, '..', 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.worker.removeAllListeners('message')
|
||||
stream.worker.once('message', (message) => {
|
||||
t.equal(message.code, 'CUSTOM-WORKER-CALLED')
|
||||
})
|
||||
|
||||
stream.end()
|
||||
})
|
||||
15
node_modules/thread-stream/test/pkg/pkg.config.json
generated
vendored
Normal file
15
node_modules/thread-stream/test/pkg/pkg.config.json
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"pkg": {
|
||||
"assets": [
|
||||
"../custom-worker.js",
|
||||
"../to-file.js"
|
||||
],
|
||||
"targets": [
|
||||
"node14",
|
||||
"node16",
|
||||
"node18",
|
||||
"node20"
|
||||
],
|
||||
"outputPath": "test/pkg"
|
||||
}
|
||||
}
|
||||
46
node_modules/thread-stream/test/pkg/pkg.test.js
generated
vendored
Normal file
46
node_modules/thread-stream/test/pkg/pkg.test.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const config = require('./pkg.config.json')
|
||||
const { promisify } = require('util')
|
||||
const { unlink } = require('fs/promises')
|
||||
const { join } = require('path')
|
||||
const { platform } = require('process')
|
||||
const exec = promisify(require('child_process').exec)
|
||||
|
||||
test('worker test when packaged into executable using pkg', async (t) => {
|
||||
const packageName = 'index'
|
||||
|
||||
// package the app into several node versions, check config for more info
|
||||
const filePath = `${join(__dirname, packageName)}.js`
|
||||
const configPath = join(__dirname, 'pkg.config.json')
|
||||
process.env.NODE_OPTIONS ||= ''
|
||||
process.env.NODE_OPTIONS = '--no-warnings'
|
||||
const { stderr } = await exec(`npx pkg ${filePath} --config ${configPath}`)
|
||||
|
||||
// there should be no error when packaging
|
||||
t.equal(stderr, '')
|
||||
|
||||
// pkg outputs files in the following format by default: {filename}-{node version}
|
||||
for (const target of config.pkg.targets) {
|
||||
// execute the packaged test
|
||||
let executablePath = `${join(config.pkg.outputPath, packageName)}-${target}`
|
||||
|
||||
// when on windows, we need the .exe extension
|
||||
if (platform === 'win32') {
|
||||
executablePath = `${executablePath}.exe`
|
||||
} else {
|
||||
executablePath = `./${executablePath}`
|
||||
}
|
||||
|
||||
const { stderr } = await exec(executablePath)
|
||||
|
||||
// check if there were no errors
|
||||
t.equal(stderr, '')
|
||||
|
||||
// clean up afterwards
|
||||
await unlink(executablePath)
|
||||
}
|
||||
|
||||
t.end()
|
||||
})
|
||||
16
node_modules/thread-stream/test/port.js
generated
vendored
Normal file
16
node_modules/thread-stream/test/port.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('stream')
|
||||
|
||||
function run (opts) {
|
||||
const { port } = opts
|
||||
return new Writable({
|
||||
autoDestroy: true,
|
||||
write (chunk, enc, cb) {
|
||||
port.postMessage(chunk.toString())
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
24
node_modules/thread-stream/test/post-message.test.js
generated
vendored
Normal file
24
node_modules/thread-stream/test/post-message.test.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const { once } = require('events')
|
||||
const { MessageChannel } = require('worker_threads')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
test('message events emitted on the stream are posted to the worker', async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const { port1, port2 } = new MessageChannel()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'on-message.js'),
|
||||
sync: false
|
||||
})
|
||||
t.teardown(() => {
|
||||
stream.end()
|
||||
})
|
||||
|
||||
stream.emit('message', { text: 'hello', takeThisPortPlease: port1 }, [port1])
|
||||
const [confirmation] = await once(port2, 'message')
|
||||
t.equal(confirmation, 'received: hello')
|
||||
})
|
||||
41
node_modules/thread-stream/test/string-limit-2.test.js
generated
vendored
Normal file
41
node_modules/thread-stream/test/string-limit-2.test.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
'use strict'
|
||||
|
||||
const t = require('tap')
|
||||
|
||||
if (process.env.CI) {
|
||||
t.skip('skip on CI')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const { join } = require('path')
|
||||
const { file } = require('./helper')
|
||||
const { createReadStream } = require('fs')
|
||||
const ThreadStream = require('..')
|
||||
const buffer = require('buffer')
|
||||
|
||||
const MAX_STRING = buffer.constants.MAX_STRING_LENGTH
|
||||
|
||||
t.plan(1)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: false
|
||||
})
|
||||
|
||||
stream.on('close', async () => {
|
||||
t.comment('close emitted')
|
||||
let buf
|
||||
for await (const chunk of createReadStream(dest)) {
|
||||
buf = chunk
|
||||
}
|
||||
t.equal('asd', buf.toString().slice(-3))
|
||||
})
|
||||
|
||||
stream.on('ready', () => {
|
||||
t.comment('open emitted')
|
||||
stream.write('a'.repeat(MAX_STRING - 2))
|
||||
stream.write('asd')
|
||||
stream.end()
|
||||
})
|
||||
42
node_modules/thread-stream/test/string-limit.test.js
generated
vendored
Normal file
42
node_modules/thread-stream/test/string-limit.test.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
'use strict'
|
||||
|
||||
const t = require('tap')
|
||||
|
||||
if (process.env.CI) {
|
||||
t.skip('skip on CI')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const { join } = require('path')
|
||||
const { file } = require('./helper')
|
||||
const { stat } = require('fs')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
t.setTimeout(30000)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'to-file.js'),
|
||||
workerData: { dest },
|
||||
sync: false
|
||||
})
|
||||
|
||||
let length = 0
|
||||
|
||||
stream.on('close', () => {
|
||||
stat(dest, (err, f) => {
|
||||
t.error(err)
|
||||
t.equal(f.size, length)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
const buf = Buffer.alloc(1024).fill('x').toString() // 1 KB
|
||||
|
||||
// This writes 1 GB of data
|
||||
for (let i = 0; i < 1024 * 1024; i++) {
|
||||
length += buf.length
|
||||
stream.write(buf)
|
||||
}
|
||||
|
||||
stream.end()
|
||||
2
node_modules/thread-stream/test/syntax-error.mjs
generated
vendored
Normal file
2
node_modules/thread-stream/test/syntax-error.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
// this is a syntax error
|
||||
import
|
||||
121
node_modules/thread-stream/test/thread-management.test.js
generated
vendored
Normal file
121
node_modules/thread-stream/test/thread-management.test.js
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { fork } = require('child_process')
|
||||
const { join } = require('path')
|
||||
const { readFile } = require('fs').promises
|
||||
const { file } = require('./helper')
|
||||
const { once } = require('events')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
test('exits with 0', async function (t) {
|
||||
const dest = file()
|
||||
const child = fork(join(__dirname, 'create-and-exit.js'), [dest])
|
||||
|
||||
const [code] = await once(child, 'exit')
|
||||
t.equal(code, 0)
|
||||
|
||||
const data = await readFile(dest, 'utf8')
|
||||
t.equal(data, 'hello world\n')
|
||||
})
|
||||
|
||||
test('emit error if thread exits', async function (t) {
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'exit.js'),
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('ready', () => {
|
||||
stream.write('hello world\n')
|
||||
})
|
||||
|
||||
let [err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker thread exited')
|
||||
|
||||
stream.write('noop');
|
||||
[err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
|
||||
stream.write('noop');
|
||||
[err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
})
|
||||
|
||||
test('emit error if thread have unhandledRejection', async function (t) {
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'unhandledRejection.js'),
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('ready', () => {
|
||||
stream.write('hello world\n')
|
||||
})
|
||||
|
||||
let [err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'kaboom')
|
||||
|
||||
stream.write('noop');
|
||||
[err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
|
||||
stream.write('noop');
|
||||
[err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
})
|
||||
|
||||
test('emit error if worker stream emit error', async function (t) {
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'error.js'),
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('ready', () => {
|
||||
stream.write('hello world\n')
|
||||
})
|
||||
|
||||
let [err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'kaboom')
|
||||
|
||||
stream.write('noop');
|
||||
[err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
|
||||
stream.write('noop');
|
||||
[err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
})
|
||||
|
||||
test('emit error if thread have uncaughtException', async function (t) {
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'uncaughtException.js'),
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('ready', () => {
|
||||
stream.write('hello world\n')
|
||||
})
|
||||
|
||||
let [err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'kaboom')
|
||||
|
||||
stream.write('noop');
|
||||
[err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
|
||||
stream.write('noop');
|
||||
[err] = await once(stream, 'error')
|
||||
t.equal(err.message, 'the worker has exited')
|
||||
})
|
||||
|
||||
test('close the work if out of scope on gc', { skip: !global.WeakRef }, async function (t) {
|
||||
const dest = file()
|
||||
const child = fork(join(__dirname, 'close-on-gc.js'), [dest], {
|
||||
execArgv: ['--expose-gc']
|
||||
})
|
||||
|
||||
const [code] = await once(child, 'exit')
|
||||
t.equal(code, 0)
|
||||
|
||||
const data = await readFile(dest, 'utf8')
|
||||
t.equal(data, 'hello world\n')
|
||||
})
|
||||
23
node_modules/thread-stream/test/to-file-on-destroy.js
generated
vendored
Normal file
23
node_modules/thread-stream/test/to-file-on-destroy.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const { Writable } = require('stream')
|
||||
|
||||
function run (opts) {
|
||||
let data = ''
|
||||
return new Writable({
|
||||
autoDestroy: true,
|
||||
write (chunk, enc, cb) {
|
||||
data += chunk.toString()
|
||||
cb()
|
||||
},
|
||||
destroy (err, cb) {
|
||||
// process._rawDebug('destroy called')
|
||||
fs.writeFile(opts.dest, data, function (err2) {
|
||||
cb(err2 || err)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
24
node_modules/thread-stream/test/to-file-on-final.js
generated
vendored
Normal file
24
node_modules/thread-stream/test/to-file-on-final.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const { Writable } = require('stream')
|
||||
|
||||
function run (opts) {
|
||||
let data = ''
|
||||
return new Writable({
|
||||
autoDestroy: true,
|
||||
write (chunk, enc, cb) {
|
||||
data += chunk.toString()
|
||||
cb()
|
||||
},
|
||||
final (cb) {
|
||||
setTimeout(function () {
|
||||
fs.writeFile(opts.dest, data, function (err) {
|
||||
cb(err)
|
||||
})
|
||||
}, 100)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
12
node_modules/thread-stream/test/to-file.js
generated
vendored
Normal file
12
node_modules/thread-stream/test/to-file.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const { once } = require('events')
|
||||
|
||||
async function run (opts) {
|
||||
const stream = fs.createWriteStream(opts.dest)
|
||||
await once(stream, 'open')
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
8
node_modules/thread-stream/test/to-file.mjs
generated
vendored
Normal file
8
node_modules/thread-stream/test/to-file.mjs
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { createWriteStream } from 'fs'
|
||||
import { once } from 'events'
|
||||
|
||||
export default async function run (opts) {
|
||||
const stream = createWriteStream(opts.dest)
|
||||
await once(stream, 'open')
|
||||
return stream
|
||||
}
|
||||
9
node_modules/thread-stream/test/to-next.js
generated
vendored
Normal file
9
node_modules/thread-stream/test/to-next.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
const { PassThrough } = require('stream')
|
||||
|
||||
async function run (opts) {
|
||||
return new PassThrough({})
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
30
node_modules/thread-stream/test/transpiled.test.js
generated
vendored
Normal file
30
node_modules/thread-stream/test/transpiled.test.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { join } = require('path')
|
||||
const { file } = require('./helper')
|
||||
const ThreadStream = require('..')
|
||||
|
||||
function basic (esVersion) {
|
||||
test(`transpiled-ts-to-${esVersion}`, function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'ts', `to-file.${esVersion}.cjs`),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
// There are arbitrary checks, the important aspect of this test is to ensure
|
||||
// that we can properly load the transpiled file into our worker thread.
|
||||
t.same(stream.writableEnded, false)
|
||||
stream.end()
|
||||
t.same(stream.writableEnded, true)
|
||||
})
|
||||
}
|
||||
|
||||
basic('es5')
|
||||
basic('es6')
|
||||
basic('es2017')
|
||||
basic('esnext')
|
||||
BIN
node_modules/thread-stream/test/ts-commonjs-default-export.zip
generated
vendored
Normal file
BIN
node_modules/thread-stream/test/ts-commonjs-default-export.zip
generated
vendored
Normal file
Binary file not shown.
33
node_modules/thread-stream/test/ts.test.ts
generated
vendored
Normal file
33
node_modules/thread-stream/test/ts.test.ts
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
import { test } from 'tap'
|
||||
import { readFile } from 'fs'
|
||||
import ThreadStream from '../index.js'
|
||||
import { join } from 'path'
|
||||
import { file } from './helper.js'
|
||||
|
||||
|
||||
test('typescript module', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const dest = file()
|
||||
const stream = new ThreadStream({
|
||||
filename: join(__dirname, 'ts', 'to-file.ts'),
|
||||
workerData: { dest },
|
||||
sync: true
|
||||
})
|
||||
|
||||
stream.on('finish', () => {
|
||||
readFile(dest, 'utf8', (err, data) => {
|
||||
t.error(err)
|
||||
t.equal(data, 'hello world\nsomething else\n')
|
||||
})
|
||||
})
|
||||
|
||||
stream.on('close', () => {
|
||||
t.pass('close emitted')
|
||||
})
|
||||
|
||||
t.ok(stream.write('hello world\n'))
|
||||
t.ok(stream.write('something else\n'))
|
||||
|
||||
stream.end()
|
||||
})
|
||||
10
node_modules/thread-stream/test/ts/to-file.ts
generated
vendored
Normal file
10
node_modules/thread-stream/test/ts/to-file.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { type PathLike, type WriteStream, createWriteStream } from 'fs'
|
||||
import { once } from 'events'
|
||||
|
||||
export default async function run (
|
||||
opts: { dest: PathLike },
|
||||
): Promise<WriteStream> {
|
||||
const stream = createWriteStream(opts.dest)
|
||||
await once(stream, 'open')
|
||||
return stream
|
||||
}
|
||||
19
node_modules/thread-stream/test/ts/transpile.sh
generated
vendored
Executable file
19
node_modules/thread-stream/test/ts/transpile.sh
generated
vendored
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
cd ./test/ts;
|
||||
|
||||
if (echo "${npm_config_user_agent}" | grep "yarn"); then
|
||||
export RUNNER="yarn";
|
||||
else
|
||||
export RUNNER="npx";
|
||||
fi
|
||||
|
||||
test ./to-file.ts -ot ./to-file.es5.cjs || ("${RUNNER}" tsc --skipLibCheck --target es5 ./to-file.ts && mv ./to-file.js ./to-file.es5.cjs);
|
||||
test ./to-file.ts -ot ./to-file.es6.mjs || ("${RUNNER}" tsc --skipLibCheck --target es6 ./to-file.ts && mv ./to-file.js ./to-file.es6.mjs);
|
||||
test ./to-file.ts -ot ./to-file.es6.cjs || ("${RUNNER}" tsc --skipLibCheck --target es6 --module commonjs ./to-file.ts && mv ./to-file.js ./to-file.es6.cjs);
|
||||
test ./to-file.ts -ot ./to-file.es2017.mjs || ("${RUNNER}" tsc --skipLibCheck --target es2017 ./to-file.ts && mv ./to-file.js ./to-file.es2017.mjs);
|
||||
test ./to-file.ts -ot ./to-file.es2017.cjs || ("${RUNNER}" tsc --skipLibCheck --target es2017 --module commonjs ./to-file.ts && mv ./to-file.js ./to-file.es2017.cjs);
|
||||
test ./to-file.ts -ot ./to-file.esnext.mjs || ("${RUNNER}" tsc --skipLibCheck --target esnext --module esnext ./to-file.ts && mv ./to-file.js ./to-file.esnext.mjs);
|
||||
test ./to-file.ts -ot ./to-file.esnext.cjs || ("${RUNNER}" tsc --skipLibCheck --target esnext --module commonjs ./to-file.ts && mv ./to-file.js ./to-file.esnext.cjs);
|
||||
21
node_modules/thread-stream/test/uncaughtException.js
generated
vendored
Normal file
21
node_modules/thread-stream/test/uncaughtException.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('stream')
|
||||
|
||||
// Nop console.error to avoid printing things out
|
||||
console.error = () => {}
|
||||
|
||||
setImmediate(function () {
|
||||
throw new Error('kaboom')
|
||||
})
|
||||
|
||||
async function run (opts) {
|
||||
const stream = new Writable({
|
||||
write (chunk, enc, cb) {
|
||||
cb()
|
||||
}
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
21
node_modules/thread-stream/test/unhandledRejection.js
generated
vendored
Normal file
21
node_modules/thread-stream/test/unhandledRejection.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
const { Writable } = require('stream')
|
||||
|
||||
// Nop console.error to avoid printing things out
|
||||
console.error = () => {}
|
||||
|
||||
setImmediate(function () {
|
||||
Promise.reject(new Error('kaboom'))
|
||||
})
|
||||
|
||||
async function run (opts) {
|
||||
const stream = new Writable({
|
||||
write (chunk, enc, cb) {
|
||||
cb()
|
||||
}
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports = run
|
||||
7
node_modules/thread-stream/test/yarnrc.yml
generated
vendored
Normal file
7
node_modules/thread-stream/test/yarnrc.yml
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
nodeLinker: pnp
|
||||
pnpMode: loose
|
||||
pnpEnableEsmLoader: false
|
||||
packageExtensions:
|
||||
debug@*:
|
||||
dependencies:
|
||||
supports-color: '*'
|
||||
Reference in New Issue
Block a user