hyparquet-writer/src/write.js

79 lines
2.0 KiB
JavaScript
Raw Normal View History

2025-03-26 04:06:43 +00:00
import { getParquetTypeForValues, writeColumn } from './column.js'
import { Writer } from './writer.js'
import { writeMetadata } from './metadata.js'
/**
* Write data as parquet to an ArrayBuffer
*
* @import {ColumnChunk, DecodedArray, FileMetaData, SchemaElement} from 'hyparquet'
* @param {Record<string, DecodedArray>} columnData
* @returns {ArrayBuffer}
*/
export function parquetWrite(columnData) {
const writer = new Writer()
// Check if all columns have the same length
const columnNames = Object.keys(columnData)
const num_rows = columnNames.length ? BigInt(columnData[columnNames[0]].length) : 0n
for (const name of columnNames) {
if (BigInt(columnData[name].length) !== num_rows) {
throw new Error('parquetWrite: all columns must have the same length')
}
}
// Write header PAR1
writer.appendUint32(0x31524150)
// schema
/** @type {SchemaElement[]} */
const schema = [{
name: 'root',
num_children: columnNames.length,
repetition_type: 'REQUIRED',
}]
// row group columns
/** @type {ColumnChunk[]} */
const columns = []
// Write columns
for (const name of columnNames) {
const values = columnData[name]
const type = getParquetTypeForValues(values)
const file_offset = BigInt(writer.offset)
const meta_data = writeColumn(writer, name, values, type)
const repetition_type = 'REQUIRED'
// save metadata
schema.push({ type, name, repetition_type })
columns.push({
file_path: name,
file_offset,
meta_data,
})
}
// Write metadata
/** @type {FileMetaData} */
const metadata = {
version: 2,
created_by: 'hyparquet',
schema,
num_rows,
row_groups: [{
columns,
total_byte_size: BigInt(writer.offset - 4),
num_rows,
}],
metadata_length: 0,
}
// @ts-ignore don't want to actually serialize metadata_length
delete metadata.metadata_length
writeMetadata(writer, metadata)
// Write footer PAR1
writer.appendUint32(0x31524150)
return writer.getBuffer()
}