mirror of
https://github.com/asadbek064/hyparquet-writer.git
synced 2025-12-05 23:31:54 +00:00
Export node by default for better next.js support.
Also move fileWriter into node.js file.
This commit is contained in:
parent
37508f69aa
commit
9d0ab4faa8
10
package.json
10
package.json
@ -28,13 +28,13 @@
|
||||
"types": "types/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"node": {
|
||||
"types": "./types/node.d.ts",
|
||||
"import": "./src/node.js"
|
||||
},
|
||||
"default": {
|
||||
"browser": {
|
||||
"types": "./types/index.d.ts",
|
||||
"import": "./src/index.js"
|
||||
},
|
||||
"default": {
|
||||
"types": "./types/node.d.ts",
|
||||
"import": "./src/node.js"
|
||||
}
|
||||
},
|
||||
"./src/*.js": {
|
||||
|
||||
@ -1,49 +0,0 @@
|
||||
import fs from 'fs'
|
||||
import { ByteWriter } from './bytewriter.js'
|
||||
|
||||
/**
|
||||
* Buffered file writer.
|
||||
* Writes data to a local file in chunks using node fs.
|
||||
*
|
||||
* @import {Writer} from '../src/types.js'
|
||||
* @param {string} filename
|
||||
* @returns {Writer}
|
||||
*/
|
||||
export function fileWriter(filename) {
|
||||
const writer = new ByteWriter()
|
||||
const chunkSize = 1_000_000 // 1mb
|
||||
|
||||
// create a new file or overwrite existing one
|
||||
fs.writeFileSync(filename, '', { flag: 'w' })
|
||||
|
||||
function flush() {
|
||||
const chunk = writer.buffer.slice(0, writer.index)
|
||||
// TODO: async
|
||||
fs.writeFileSync(filename, new Uint8Array(chunk), { flag: 'a' })
|
||||
writer.index = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Override the ensure method
|
||||
* @param {number} size
|
||||
*/
|
||||
writer.ensure = function(size) {
|
||||
if (writer.index > chunkSize) {
|
||||
flush()
|
||||
}
|
||||
if (writer.index + size > writer.buffer.byteLength) {
|
||||
const newSize = Math.max(writer.buffer.byteLength * 2, writer.index + size)
|
||||
const newBuffer = new ArrayBuffer(newSize)
|
||||
new Uint8Array(newBuffer).set(new Uint8Array(writer.buffer))
|
||||
writer.buffer = newBuffer
|
||||
writer.view = new DataView(writer.buffer)
|
||||
}
|
||||
}
|
||||
writer.getBuffer = function () {
|
||||
throw new Error('getBuffer not supported for FileWriter')
|
||||
}
|
||||
writer.finish = function() {
|
||||
flush()
|
||||
}
|
||||
return writer
|
||||
}
|
||||
56
src/node.js
56
src/node.js
@ -1,15 +1,13 @@
|
||||
import { fileWriter } from './filewriter.js'
|
||||
import fs from 'fs'
|
||||
import { ByteWriter } from './bytewriter.js'
|
||||
import { parquetWrite } from './write.js'
|
||||
|
||||
export { parquetWrite, parquetWriteBuffer } from './write.js'
|
||||
export { ByteWriter } from './bytewriter.js'
|
||||
export { ParquetWriter } from './parquet-writer.js'
|
||||
export { fileWriter }
|
||||
export * from './index.js'
|
||||
|
||||
/**
|
||||
* Write data as parquet to a local file.
|
||||
*
|
||||
* @import {ParquetWriteOptions} from '../src/types.js'
|
||||
* @import {ParquetWriteOptions, Writer} from '../src/types.js'
|
||||
* @param {Omit<ParquetWriteOptions, 'writer'> & {filename: string}} options
|
||||
*/
|
||||
export function parquetWriteFile(options) {
|
||||
@ -17,3 +15,49 @@ export function parquetWriteFile(options) {
|
||||
const writer = fileWriter(filename)
|
||||
parquetWrite({ ...rest, writer })
|
||||
}
|
||||
|
||||
/**
|
||||
* Buffered file writer.
|
||||
* Writes data to a local file in chunks using node fs.
|
||||
*
|
||||
* @param {string} filename
|
||||
* @returns {Writer}
|
||||
*/
|
||||
export function fileWriter(filename) {
|
||||
const writer = new ByteWriter()
|
||||
const chunkSize = 1_000_000 // 1mb
|
||||
|
||||
// create a new file or overwrite existing one
|
||||
fs.writeFileSync(filename, '', { flag: 'w' })
|
||||
|
||||
function flush() {
|
||||
const chunk = writer.buffer.slice(0, writer.index)
|
||||
// TODO: async
|
||||
fs.writeFileSync(filename, new Uint8Array(chunk), { flag: 'a' })
|
||||
writer.index = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Override the ensure method
|
||||
* @param {number} size
|
||||
*/
|
||||
writer.ensure = function(size) {
|
||||
if (writer.index > chunkSize) {
|
||||
flush()
|
||||
}
|
||||
if (writer.index + size > writer.buffer.byteLength) {
|
||||
const newSize = Math.max(writer.buffer.byteLength * 2, writer.index + size)
|
||||
const newBuffer = new ArrayBuffer(newSize)
|
||||
new Uint8Array(newBuffer).set(new Uint8Array(writer.buffer))
|
||||
writer.buffer = newBuffer
|
||||
writer.view = new DataView(writer.buffer)
|
||||
}
|
||||
}
|
||||
writer.getBuffer = function () {
|
||||
throw new Error('getBuffer not supported for FileWriter')
|
||||
}
|
||||
writer.finish = function() {
|
||||
flush()
|
||||
}
|
||||
return writer
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import fs from 'fs'
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest'
|
||||
import { fileWriter } from '../src/filewriter.js'
|
||||
import { fileWriter } from '../src/node.js'
|
||||
|
||||
const filedir = 'data/'
|
||||
const filename = 'data/filewriter.test.bin'
|
||||
|
||||
@ -30,8 +30,8 @@ describe('package.json', () => {
|
||||
const { exports } = packageJson
|
||||
expect(Object.keys(exports)).toEqual(['.', './src/*.js'])
|
||||
// node vs default (browser)
|
||||
expect(Object.keys(exports['.'])).toEqual(['node', 'default'])
|
||||
expect(Object.keys(exports['.'].node)).toEqual(['types', 'import'])
|
||||
expect(Object.keys(exports['.'])).toEqual(['browser', 'default'])
|
||||
expect(Object.keys(exports['.'].browser)).toEqual(['types', 'import'])
|
||||
expect(Object.keys(exports['.'].default)).toEqual(['types', 'import'])
|
||||
// deep imports
|
||||
expect(Object.keys(exports['./src/*.js'])).toEqual(['types', 'import'])
|
||||
|
||||
Loading…
Reference in New Issue
Block a user