mirror of
https://github.com/asadbek064/hyparquet.git
synced 2026-02-22 12:21:33 +00:00
factor tests with it.for() (#55)
This commit is contained in:
parent
90f8a3b775
commit
cb639a0b45
@ -1,12 +1,19 @@
|
||||
import { compressors } from 'hyparquet-compressors'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { getColumnRange, readColumn } from '../src/column.js'
|
||||
import { parquetMetadata } from '../src/hyparquet.js'
|
||||
import { getSchemaPath } from '../src/schema.js'
|
||||
import { getColumnRange, readColumn } from '../src/column.js'
|
||||
import { asyncBufferFromFile } from '../src/utils.js'
|
||||
|
||||
const values = [null, 1, -2, NaN, 0, -1, -0, 2]
|
||||
|
||||
describe('readColumn', () => {
|
||||
it('read columns when rowLimit is undefined', async () => {
|
||||
it.for([
|
||||
{ rowLimit: undefined, expected: values },
|
||||
{ rowLimit: Infinity, expected: values },
|
||||
{ rowLimit: 2, expected: values.slice(0, 2) },
|
||||
{ rowLimit: 0, expected: [] },
|
||||
])('readColumn with rowLimit %p', async ({ rowLimit, expected }) => {
|
||||
const testFile = 'test/files/float16_nonzeros_and_nans.parquet'
|
||||
const asyncBuffer = await asyncBufferFromFile(testFile)
|
||||
const arrayBuffer = await asyncBuffer.slice(0)
|
||||
@ -19,65 +26,7 @@ describe('readColumn', () => {
|
||||
const schemaPath = getSchemaPath(metadata.schema, column.meta_data?.path_in_schema ?? [])
|
||||
const reader = { view: new DataView(columnArrayBuffer), offset: 0 }
|
||||
|
||||
const rowLimit = undefined
|
||||
const result = readColumn(reader, rowLimit, column.meta_data, schemaPath, { file: asyncBuffer, compressors })
|
||||
const expected = [null, 1, -2, NaN, 0, -1, -0, 2]
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
it('read columns when rowLimit is Infinity', async () => {
|
||||
const testFile = 'test/files/float16_nonzeros_and_nans.parquet'
|
||||
const asyncBuffer = await asyncBufferFromFile(testFile)
|
||||
const arrayBuffer = await asyncBuffer.slice(0)
|
||||
const metadata = parquetMetadata(arrayBuffer)
|
||||
|
||||
const column = metadata.row_groups[0].columns[0]
|
||||
if (!column.meta_data) throw new Error(`No column metadata for ${testFile}`)
|
||||
const [columnStartByte, columnEndByte] = getColumnRange(column.meta_data).map(Number)
|
||||
const columnArrayBuffer = arrayBuffer.slice(columnStartByte, columnEndByte)
|
||||
const schemaPath = getSchemaPath(metadata.schema, column.meta_data?.path_in_schema ?? [])
|
||||
const reader = { view: new DataView(columnArrayBuffer), offset: 0 }
|
||||
|
||||
const rowLimit = Infinity
|
||||
const result = readColumn(reader, rowLimit, column.meta_data, schemaPath, { file: asyncBuffer, compressors })
|
||||
const expected = [null, 1, -2, NaN, 0, -1, -0, 2]
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
it('read columns when rowLimit is defined', async () => {
|
||||
const testFile = 'test/files/float16_nonzeros_and_nans.parquet'
|
||||
const asyncBuffer = await asyncBufferFromFile(testFile)
|
||||
const arrayBuffer = await asyncBuffer.slice(0)
|
||||
const metadata = parquetMetadata(arrayBuffer)
|
||||
|
||||
const column = metadata.row_groups[0].columns[0]
|
||||
if (!column.meta_data) throw new Error(`No column metadata for ${testFile}`)
|
||||
const [columnStartByte, columnEndByte] = getColumnRange(column.meta_data).map(Number)
|
||||
const columnArrayBuffer = arrayBuffer.slice(columnStartByte, columnEndByte)
|
||||
const schemaPath = getSchemaPath(metadata.schema, column.meta_data?.path_in_schema ?? [])
|
||||
const reader = { view: new DataView(columnArrayBuffer), offset: 0 }
|
||||
|
||||
const rowLimit = 2
|
||||
const result = readColumn(reader, rowLimit, column.meta_data, schemaPath, { file: asyncBuffer, compressors })
|
||||
expect(result.length).toBe(rowLimit)
|
||||
})
|
||||
|
||||
it('read columns when rowLimit is 0', async () => {
|
||||
const testFile = 'test/files/float16_nonzeros_and_nans.parquet'
|
||||
const asyncBuffer = await asyncBufferFromFile(testFile)
|
||||
const arrayBuffer = await asyncBuffer.slice(0)
|
||||
const metadata = parquetMetadata(arrayBuffer)
|
||||
|
||||
const column = metadata.row_groups[0].columns[0]
|
||||
if (!column.meta_data) throw new Error(`No column metadata for ${testFile}`)
|
||||
const [columnStartByte, columnEndByte] = getColumnRange(column.meta_data).map(Number)
|
||||
const columnArrayBuffer = arrayBuffer.slice(columnStartByte, columnEndByte)
|
||||
const schemaPath = getSchemaPath(metadata.schema, column.meta_data?.path_in_schema ?? [])
|
||||
const reader = { view: new DataView(columnArrayBuffer), offset: 0 }
|
||||
|
||||
const rowLimit = 0
|
||||
const result = readColumn(reader, rowLimit, column.meta_data, schemaPath, { file: asyncBuffer, compressors })
|
||||
expect(result.length).toBe(rowLimit)
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Loading…
Reference in New Issue
Block a user