mirror of
https://github.com/asadbek064/hyparquet-writer.git
synced 2025-12-05 23:31:54 +00:00
Round-trip tests
This commit is contained in:
parent
5ecc4ff52e
commit
4494260015
@ -6,7 +6,7 @@
|
||||
[](https://www.npmjs.com/package/hyparquet-writer)
|
||||
[](https://github.com/hyparam/hyparquet-writer/actions)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||

|
||||

|
||||
[](https://www.npmjs.com/package/hyparquet-writer?activeTab=dependencies)
|
||||
|
||||
Hyparquet Writer is a JavaScript library for writing [Apache Parquet](https://parquet.apache.org) files. It is designed to be lightweight, fast and store data very efficiently. It is a companion to the [hyparquet](https://github.com/hyparam/hyparquet) library, which is a JavaScript library for reading parquet files.
|
||||
|
||||
@ -43,7 +43,7 @@ export default [
|
||||
'jsdoc/require-returns-type': 'error',
|
||||
'jsdoc/sort-tags': 'error',
|
||||
'no-constant-condition': 'off',
|
||||
'no-extra-parens': 'error',
|
||||
'no-extra-parens': 'warn',
|
||||
'no-multi-spaces': 'error',
|
||||
'no-trailing-spaces': 'error',
|
||||
'no-undef': 'error',
|
||||
|
||||
BIN
test/files/fixed_length_decimal.parquet
Normal file
BIN
test/files/fixed_length_decimal.parquet
Normal file
Binary file not shown.
BIN
test/files/signs.parquet
Normal file
BIN
test/files/signs.parquet
Normal file
Binary file not shown.
34
test/write.roundtrip.test.js
Normal file
34
test/write.roundtrip.test.js
Normal file
@ -0,0 +1,34 @@
|
||||
import fs from 'fs'
|
||||
import { asyncBufferFromFile, parquetMetadataAsync, parquetReadObjects, parquetSchema } from 'hyparquet'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { parquetWriteBuffer } from '../src/index.js'
|
||||
|
||||
describe('parquetWrite round-trip', () => {
|
||||
const files = fs.readdirSync('test/files').filter(f => f.endsWith('.parquet'))
|
||||
|
||||
files.forEach(filename => {
|
||||
it(`round-trips data from ${filename}`, async () => {
|
||||
const file = await asyncBufferFromFile(`test/files/${filename}`)
|
||||
const metadata = await parquetMetadataAsync(file)
|
||||
const rows = await parquetReadObjects({ file })
|
||||
|
||||
// transpose the row data
|
||||
const schema = parquetSchema(metadata)
|
||||
const columnData = schema.children.map(({ element }) => ({
|
||||
...element,
|
||||
data: /** @type {any[]} */ ([]),
|
||||
}))
|
||||
for (const row of rows) {
|
||||
for (const { name, data } of columnData) {
|
||||
data.push(row[name])
|
||||
}
|
||||
}
|
||||
|
||||
const buffer = parquetWriteBuffer({ columnData })
|
||||
const output = await parquetReadObjects({ file: buffer })
|
||||
|
||||
expect(output.length).toBe(rows.length)
|
||||
expect(output).toEqual(rows)
|
||||
})
|
||||
})
|
||||
})
|
||||
Loading…
Reference in New Issue
Block a user