2024-01-05 09:39:59 +00:00
|
|
|
/**
|
2024-05-28 21:24:12 +00:00
|
|
|
* Replace bigint, date, etc with legal JSON types.
|
2024-01-05 09:39:59 +00:00
|
|
|
*
|
|
|
|
|
* @param {any} obj object to convert
|
|
|
|
|
* @returns {unknown} converted object
|
|
|
|
|
*/
|
|
|
|
|
export function toJson(obj) {
|
2024-02-14 05:11:34 +00:00
|
|
|
if (obj === undefined) return null
|
|
|
|
|
if (typeof obj === 'bigint') return Number(obj)
|
|
|
|
|
if (Array.isArray(obj)) return obj.map(toJson)
|
2024-05-04 07:38:19 +00:00
|
|
|
if (obj instanceof Uint8Array) return Array.from(obj)
|
2024-05-13 01:12:30 +00:00
|
|
|
if (obj instanceof Date) return obj.toISOString()
|
2024-02-14 05:11:34 +00:00
|
|
|
if (obj instanceof Object) {
|
2024-01-05 09:39:59 +00:00
|
|
|
/** @type {Record<string, unknown>} */
|
|
|
|
|
const newObj = {}
|
|
|
|
|
for (const key of Object.keys(obj)) {
|
2024-02-14 05:25:40 +00:00
|
|
|
if (obj[key] === undefined) continue
|
2024-01-05 09:39:59 +00:00
|
|
|
newObj[key] = toJson(obj[key])
|
|
|
|
|
}
|
|
|
|
|
return newObj
|
|
|
|
|
}
|
2024-02-14 05:11:34 +00:00
|
|
|
return obj
|
2024-01-05 09:39:59 +00:00
|
|
|
}
|
2024-04-07 16:33:57 +00:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Concatenate two arrays fast.
|
2024-05-02 06:23:50 +00:00
|
|
|
*
|
|
|
|
|
* @typedef {import('./types.js').DecodedArray} DecodedArray
|
2024-04-07 16:33:57 +00:00
|
|
|
* @param {any[]} aaa first array
|
2024-05-02 06:23:50 +00:00
|
|
|
* @param {DecodedArray} bbb second array
|
2024-04-07 16:33:57 +00:00
|
|
|
*/
|
|
|
|
|
export function concat(aaa, bbb) {
|
|
|
|
|
const chunk = 10000
|
|
|
|
|
for (let i = 0; i < bbb.length; i += chunk) {
|
|
|
|
|
aaa.push(...bbb.slice(i, i + chunk))
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-07-26 21:08:57 +00:00
|
|
|
|
|
|
|
|
/**
|
2024-09-25 08:59:21 +00:00
|
|
|
* Get the byte length of a URL using a HEAD request.
|
2024-07-26 21:08:57 +00:00
|
|
|
*
|
|
|
|
|
* @param {string} url
|
2024-09-25 08:59:21 +00:00
|
|
|
* @returns {Promise<number>}
|
2024-07-26 21:08:57 +00:00
|
|
|
*/
|
2024-09-25 08:59:21 +00:00
|
|
|
export async function byteLengthFromUrl(url) {
|
|
|
|
|
return await fetch(url, { method: 'HEAD' })
|
2024-07-26 21:08:57 +00:00
|
|
|
.then(res => {
|
|
|
|
|
if (!res.ok) throw new Error(`fetch head failed ${res.status}`)
|
|
|
|
|
const length = res.headers.get('Content-Length')
|
|
|
|
|
if (!length) throw new Error('missing content length')
|
|
|
|
|
return parseInt(length)
|
|
|
|
|
})
|
2024-09-25 08:59:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Construct an AsyncBuffer for a URL.
|
|
|
|
|
*
|
|
|
|
|
* @typedef {import('./types.js').AsyncBuffer} AsyncBuffer
|
|
|
|
|
* @param {string} url
|
|
|
|
|
* @param {number} [byteLength]
|
|
|
|
|
* @returns {Promise<AsyncBuffer>}
|
|
|
|
|
*/
|
|
|
|
|
export async function asyncBufferFromUrl(url, byteLength) {
|
|
|
|
|
// byte length from HEAD request
|
|
|
|
|
byteLength ||= await byteLengthFromUrl(url)
|
2024-07-26 21:08:57 +00:00
|
|
|
return {
|
|
|
|
|
byteLength,
|
|
|
|
|
async slice(start, end) {
|
|
|
|
|
// fetch byte range from url
|
|
|
|
|
const headers = new Headers()
|
|
|
|
|
const endStr = end === undefined ? '' : end - 1
|
|
|
|
|
headers.set('Range', `bytes=${start}-${endStr}`)
|
|
|
|
|
const res = await fetch(url, { headers })
|
|
|
|
|
if (!res.ok || !res.body) throw new Error(`fetch failed ${res.status}`)
|
|
|
|
|
return res.arrayBuffer()
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-07-26 22:01:01 +00:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Construct an AsyncBuffer for a local file using node fs package.
|
|
|
|
|
*
|
|
|
|
|
* @param {string} filename
|
|
|
|
|
* @returns {Promise<AsyncBuffer>}
|
|
|
|
|
*/
|
|
|
|
|
export async function asyncBufferFromFile(filename) {
|
2024-08-02 23:03:46 +00:00
|
|
|
const fsPackage = 'fs' // webpack no include
|
|
|
|
|
const fs = await import(fsPackage)
|
2024-07-26 22:01:01 +00:00
|
|
|
const stat = await fs.promises.stat(filename)
|
|
|
|
|
return {
|
|
|
|
|
byteLength: stat.size,
|
|
|
|
|
async slice(start, end) {
|
|
|
|
|
// read file slice
|
|
|
|
|
const readStream = fs.createReadStream(filename, { start, end })
|
|
|
|
|
return await readStreamToArrayBuffer(readStream)
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Convert a node ReadStream to ArrayBuffer.
|
|
|
|
|
*
|
|
|
|
|
* @param {import('stream').Readable} input
|
|
|
|
|
* @returns {Promise<ArrayBuffer>}
|
|
|
|
|
*/
|
|
|
|
|
function readStreamToArrayBuffer(input) {
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
|
/** @type {Buffer[]} */
|
|
|
|
|
const chunks = []
|
|
|
|
|
input.on('data', chunk => chunks.push(chunk))
|
|
|
|
|
input.on('end', () => {
|
|
|
|
|
const buffer = Buffer.concat(chunks)
|
|
|
|
|
resolve(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength))
|
|
|
|
|
})
|
|
|
|
|
input.on('error', reject)
|
|
|
|
|
})
|
|
|
|
|
}
|