diff --git a/src/assemble.js b/src/assemble.js index b0376d5..d13be90 100644 --- a/src/assemble.js +++ b/src/assemble.js @@ -6,7 +6,7 @@ import { isListLike, isMapLike } from './schema.js' * Reconstructs a complex nested structure from flat arrays of definition and repetition levels, * according to Dremel encoding. * - * @import {DecodedArray, FieldRepetitionType} from '../src/types.d.ts' + * @import {DecodedArray, FieldRepetitionType} from './types.d.ts' * @param {any[]} output * @param {number[] | undefined} definitionLevels * @param {number[]} repetitionLevels @@ -103,7 +103,7 @@ export function assembleLists( * Assemble a nested structure from subcolumn data. * https://github.com/apache/parquet-format/blob/apache-parquet-format-2.10.0/LogicalTypes.md#nested-types * - * @import {SchemaTree} from '../src/types.d.ts' + * @import {SchemaTree} from './types.d.ts' * @param {Map} subcolumnData * @param {SchemaTree} schema top-level schema element * @param {number} [depth] depth of nested structure diff --git a/src/column.js b/src/column.js index 9b76a68..9d4d174 100644 --- a/src/column.js +++ b/src/column.js @@ -123,7 +123,7 @@ export function getColumnRange({ dictionary_page_offset, data_page_offset, total /** * Read parquet header from a buffer. * - * @import {ColumnMetaData, DecodedArray, DataReader, PageHeader, ParquetReadOptions, SchemaTree} from '../src/types.d.ts' + * @import {ColumnMetaData, DecodedArray, DataReader, PageHeader, ParquetReadOptions, SchemaTree} from './types.d.ts' * @param {DataReader} reader - parquet file reader * @returns {PageHeader} metadata object and bytes read */ diff --git a/src/constants.js b/src/constants.js index bc38702..d8e9587 100644 --- a/src/constants.js +++ b/src/constants.js @@ -1,4 +1,4 @@ -/** @type {import('../src/types.d.ts').ParquetType[]} */ +/** @type {import('./types.d.ts').ParquetType[]} */ export const ParquetType = [ 'BOOLEAN', 'INT32', @@ -29,7 +29,7 @@ export const FieldRepetitionType = [ 'REPEATED', ] -/** @type {import('../src/types.d.ts').ConvertedType[]} */ +/** @type {import('./types.d.ts').ConvertedType[]} */ export const ConvertedType = [ 'UTF8', 'MAP', @@ -55,7 +55,7 @@ export const ConvertedType = [ 'INTERVAL', ] -/** @type {import('../src/types.d.ts').LogicalTypeType[]} */ +/** @type {import('./types.d.ts').LogicalTypeType[]} */ export const logicalTypeType = [ 'NULL', 'STRING', @@ -85,7 +85,7 @@ export const CompressionCodec = [ 'LZ4_RAW', ] -/** @type {import('../src/types.d.ts').PageType[]} */ +/** @type {import('./types.d.ts').PageType[]} */ export const PageType = [ 'DATA_PAGE', 'INDEX_PAGE', @@ -93,7 +93,7 @@ export const PageType = [ 'DATA_PAGE_V2', ] -/** @type {import('../src/types.d.ts').BoundaryOrder[]} */ +/** @type {import('./types.d.ts').BoundaryOrder[]} */ export const BoundaryOrder = [ 'UNORDERED', 'ASCENDING', diff --git a/src/convert.js b/src/convert.js index 3db468e..cd49afa 100644 --- a/src/convert.js +++ b/src/convert.js @@ -3,7 +3,7 @@ const dayMillis = 86400000 // 1 day in milliseconds /** * Convert known types from primitive to rich, and dereference dictionary. * - * @import {DecodedArray, Encoding, SchemaElement} from '../src/types.d.ts' + * @import {DecodedArray, Encoding, SchemaElement} from './types.d.ts' * @param {DecodedArray} data series of primitive types * @param {DecodedArray | undefined} dictionary * @param {SchemaElement} schemaElement diff --git a/src/datapage.js b/src/datapage.js index 1fcbd27..c46780c 100644 --- a/src/datapage.js +++ b/src/datapage.js @@ -71,7 +71,7 @@ export function readDictionaryPage(bytes, diph, columnMetadata, typeLength) { } /** - * @import {ColumnMetaData, CompressionCodec, Compressors, DataPage, DataPageHeader, DataPageHeaderV2, DataReader, DecodedArray, DictionaryPageHeader, PageHeader, SchemaTree} from '../src/types.d.ts' + * @import {ColumnMetaData, CompressionCodec, Compressors, DataPage, DataPageHeader, DataPageHeaderV2, DataReader, DecodedArray, DictionaryPageHeader, PageHeader, SchemaTree} from './types.d.ts' * @param {DataReader} reader data view for the page * @param {DataPageHeader} daph data page header * @param {SchemaTree[]} schemaPath diff --git a/src/delta.js b/src/delta.js index ab63b0d..3ab7d1d 100644 --- a/src/delta.js +++ b/src/delta.js @@ -1,7 +1,7 @@ import { readVarInt, readZigZagBigInt } from './thrift.js' /** - * @import {DataReader} from '../src/types.d.ts' + * @import {DataReader} from './types.d.ts' * @param {DataReader} reader * @param {number} count number of values to read * @param {Int32Array | BigInt64Array} output diff --git a/src/encoding.js b/src/encoding.js index 6f1f4ab..1770897 100644 --- a/src/encoding.js +++ b/src/encoding.js @@ -146,7 +146,7 @@ export function byteStreamSplit(reader, count, type, typeLength) { } /** - * @import {DataReader, DecodedArray, ParquetType} from '../src/types.d.ts' + * @import {DataReader, DecodedArray, ParquetType} from './types.d.ts' * @param {ParquetType} type * @param {number | undefined} typeLength * @returns {number} diff --git a/src/hyparquet.js b/src/hyparquet.js index 94e4027..c79d55f 100644 --- a/src/hyparquet.js +++ b/src/hyparquet.js @@ -27,36 +27,36 @@ export function parquetReadObjects(options) { * `import { ParquetReadOptions } from 'hyparquet'` for example. * * @template {any} T - * @typedef {import('../src/types.d.ts').Awaitable} Awaitable + * @typedef {import('./types.d.ts').Awaitable} Awaitable */ /** - * @typedef {import('../src/types.d.ts').AsyncBuffer} AsyncBuffer - * @typedef {import('../src/types.d.ts').DataReader} DataReader - * @typedef {import('../src/types.d.ts').FileMetaData} FileMetaData - * @typedef {import('../src/types.d.ts').SchemaTree} SchemaTree - * @typedef {import('../src/types.d.ts').SchemaElement} SchemaElement - * @typedef {import('../src/types.d.ts').ParquetType} ParquetType - * @typedef {import('../src/types.d.ts').FieldRepetitionType} FieldRepetitionType - * @typedef {import('../src/types.d.ts').ConvertedType} ConvertedType - * @typedef {import('../src/types.d.ts').TimeUnit} TimeUnit - * @typedef {import('../src/types.d.ts').LogicalType} LogicalType - * @typedef {import('../src/types.d.ts').LogicalTypeType} LogicalTypeType - * @typedef {import('../src/types.d.ts').RowGroup} RowGroup - * @typedef {import('../src/types.d.ts').ColumnChunk} ColumnChunk - * @typedef {import('../src/types.d.ts').ColumnMetaData} ColumnMetaData - * @typedef {import('../src/types.d.ts').Encoding} Encoding - * @typedef {import('../src/types.d.ts').CompressionCodec} CompressionCodec - * @typedef {import('../src/types.d.ts').Compressors} Compressors - * @typedef {import('../src/types.d.ts').Statistics} Statistics - * @typedef {import('../src/types.d.ts').PageType} PageType - * @typedef {import('../src/types.d.ts').PageHeader} PageHeader - * @typedef {import('../src/types.d.ts').DataPageHeader} DataPageHeader - * @typedef {import('../src/types.d.ts').DictionaryPageHeader} DictionaryPageHeader - * @typedef {import('../src/types.d.ts').DecodedArray} DecodedArray - * @typedef {import('../src/types.d.ts').OffsetIndex} OffsetIndex - * @typedef {import('../src/types.d.ts').ColumnIndex} ColumnIndex - * @typedef {import('../src/types.d.ts').BoundaryOrder} BoundaryOrder - * @typedef {import('../src/types.d.ts').ColumnData} ColumnData - * @typedef {import('../src/types.d.ts').ParquetReadOptions} ParquetReadOptions + * @typedef {import('./types.d.ts').AsyncBuffer} AsyncBuffer + * @typedef {import('./types.d.ts').DataReader} DataReader + * @typedef {import('./types.d.ts').FileMetaData} FileMetaData + * @typedef {import('./types.d.ts').SchemaTree} SchemaTree + * @typedef {import('./types.d.ts').SchemaElement} SchemaElement + * @typedef {import('./types.d.ts').ParquetType} ParquetType + * @typedef {import('./types.d.ts').FieldRepetitionType} FieldRepetitionType + * @typedef {import('./types.d.ts').ConvertedType} ConvertedType + * @typedef {import('./types.d.ts').TimeUnit} TimeUnit + * @typedef {import('./types.d.ts').LogicalType} LogicalType + * @typedef {import('./types.d.ts').LogicalTypeType} LogicalTypeType + * @typedef {import('./types.d.ts').RowGroup} RowGroup + * @typedef {import('./types.d.ts').ColumnChunk} ColumnChunk + * @typedef {import('./types.d.ts').ColumnMetaData} ColumnMetaData + * @typedef {import('./types.d.ts').Encoding} Encoding + * @typedef {import('./types.d.ts').CompressionCodec} CompressionCodec + * @typedef {import('./types.d.ts').Compressors} Compressors + * @typedef {import('./types.d.ts').Statistics} Statistics + * @typedef {import('./types.d.ts').PageType} PageType + * @typedef {import('./types.d.ts').PageHeader} PageHeader + * @typedef {import('./types.d.ts').DataPageHeader} DataPageHeader + * @typedef {import('./types.d.ts').DictionaryPageHeader} DictionaryPageHeader + * @typedef {import('./types.d.ts').DecodedArray} DecodedArray + * @typedef {import('./types.d.ts').OffsetIndex} OffsetIndex + * @typedef {import('./types.d.ts').ColumnIndex} ColumnIndex + * @typedef {import('./types.d.ts').BoundaryOrder} BoundaryOrder + * @typedef {import('./types.d.ts').ColumnData} ColumnData + * @typedef {import('./types.d.ts').ParquetReadOptions} ParquetReadOptions */ diff --git a/src/indexes.js b/src/indexes.js index 8118bf1..c8d0b9a 100644 --- a/src/indexes.js +++ b/src/indexes.js @@ -33,7 +33,7 @@ export function readOffsetIndex(reader) { } /** - * @import {ColumnIndex, DataReader, OffsetIndex, PageLocation, SchemaElement} from '../src/types.d.ts' + * @import {ColumnIndex, DataReader, OffsetIndex, PageLocation, SchemaElement} from './types.d.ts' * @param {any} loc * @returns {PageLocation} */ diff --git a/src/metadata.js b/src/metadata.js index adefe61..52457d9 100644 --- a/src/metadata.js +++ b/src/metadata.js @@ -244,7 +244,7 @@ function timeUnit(unit) { /** * Convert column statistics based on column type. * - * @import {AsyncBuffer, FileMetaData, LogicalType, MinMaxType, SchemaElement, SchemaTree, Statistics, TimeUnit} from '../src/types.d.ts' + * @import {AsyncBuffer, FileMetaData, LogicalType, MinMaxType, SchemaElement, SchemaTree, Statistics, TimeUnit} from './types.d.ts' * @param {any} stats * @param {SchemaElement} schema * @returns {Statistics} diff --git a/src/plain.js b/src/plain.js index b1a21da..e3e3148 100644 --- a/src/plain.js +++ b/src/plain.js @@ -1,7 +1,7 @@ /** * Read `count` values of the given type from the reader.view. * - * @import {DataReader, DecodedArray, ParquetType} from '../src/types.d.ts' + * @import {DataReader, DecodedArray, ParquetType} from './types.d.ts' * @param {DataReader} reader - buffer to read data from * @param {ParquetType} type - parquet type of the data * @param {number} count - number of values to read diff --git a/src/query.js b/src/query.js index 72c6bdf..5315ebb 100644 --- a/src/query.js +++ b/src/query.js @@ -8,7 +8,7 @@ import { equals } from './utils.js' * Accepts optional filter object to filter the results and orderBy column name to sort the results. * Note that using orderBy may SIGNIFICANTLY increase the query time. * - * @import {ParquetQueryFilter} from '../src/types.d.ts' + * @import {ParquetQueryFilter} from './types.d.ts' * @param {ParquetReadOptions & { filter?: ParquetQueryFilter, orderBy?: string }} options * @returns {Promise[]>} resolves when all requested rows and columns are parsed */ @@ -48,7 +48,7 @@ export async function parquetQuery(options) { /** * Reads a list rows from a parquet file, reading only the row groups that contain the rows. * Returns a sparse array of rows. - * @import {ParquetReadOptions} from '../src/types.d.ts' + * @import {ParquetReadOptions} from './types.d.ts' * @param {ParquetReadOptions & { rows: number[] }} options * @returns {Promise[]>} */ diff --git a/src/read.js b/src/read.js index 78d68ed..fad053e 100644 --- a/src/read.js +++ b/src/read.js @@ -198,7 +198,7 @@ export async function readRowGroup(options, rowGroup, groupStart, rowLimit) { /** * Return a list of sub-columns needed to construct a top-level column. * - * @import {ParquetReadOptions, RowGroup, SchemaTree} from '../src/types.d.ts' + * @import {ParquetReadOptions, RowGroup, SchemaTree} from './types.d.ts' * @param {SchemaTree} schema * @param {string[]} output * @returns {string[]} diff --git a/src/schema.js b/src/schema.js index 2917e5b..a5f8bf9 100644 --- a/src/schema.js +++ b/src/schema.js @@ -1,7 +1,7 @@ /** * Build a tree from the schema elements. * - * @import {SchemaElement, SchemaTree} from '../src/types.d.ts' + * @import {SchemaElement, SchemaTree} from './types.d.ts' * @param {SchemaElement[]} schema * @param {number} rootIndex index of the root element * @param {string[]} path path to the element diff --git a/src/thrift.js b/src/thrift.js index 819eecc..f79d634 100644 --- a/src/thrift.js +++ b/src/thrift.js @@ -19,7 +19,7 @@ const CompactType = { /** * Parse TCompactProtocol * - * @import {DataReader} from '../src/types.d.ts' + * @import {DataReader} from './types.d.ts' * @param {DataReader} reader * @returns {Record} */ diff --git a/src/utils.js b/src/utils.js index f408421..447a985 100644 --- a/src/utils.js +++ b/src/utils.js @@ -194,7 +194,7 @@ export function cachedAsyncBuffer({ byteLength, slice }) { * Returns canonical cache key for a byte range 'start,end'. * Normalize int-range and suffix-range requests to the same key. * - * @import {AsyncBuffer, Awaitable, DecodedArray} from '../src/types.d.ts' + * @import {AsyncBuffer, Awaitable, DecodedArray} from './types.d.ts' * @param {number} start start byte of range * @param {number} [end] end byte of range, or undefined for suffix range * @param {number} [size] size of file, or undefined for suffix range diff --git a/test/utils.test.js b/test/utils.test.js index eb3f59c..13577ef 100644 --- a/test/utils.test.js +++ b/test/utils.test.js @@ -40,7 +40,7 @@ describe('toJson', () => { describe('byteLengthFromUrl', () => { it('returns the byte length from Content-Length header', async () => { - global.fetch = vi.fn().mockResolvedValue({ + global.fetch = vi.fn().mockResolvedValueOnce({ ok: true, headers: new Map([['Content-Length', '1024']]), }) @@ -51,16 +51,13 @@ describe('byteLengthFromUrl', () => { }) it('throws an error if the response is not ok', async () => { - global.fetch = vi.fn().mockResolvedValue({ - ok: false, - status: 404, - }) + global.fetch = vi.fn().mockResolvedValueOnce({ ok: false, status: 404 }) await expect(byteLengthFromUrl('https://example.com')).rejects.toThrow('fetch head failed 404') }) it('throws an error if Content-Length header is missing', async () => { - global.fetch = vi.fn().mockResolvedValue({ + global.fetch = vi.fn().mockResolvedValueOnce({ ok: true, headers: new Map(), }) @@ -70,12 +67,9 @@ describe('byteLengthFromUrl', () => { it ('passes authentication headers', async () => { - global.fetch = vi.fn().mockImplementation((url, options) => { + global.fetch = vi.fn().mockImplementation((_url, options) => { if (new Headers(options.headers).get('Authorization') !== 'Bearer token') { - return Promise.resolve({ - ok: false, - status: 401, - })} + return Promise.resolve({ ok: false, status: 401 })} return Promise.resolve({ ok: true, headers: new Map([['Content-Length', '1024']]), @@ -111,7 +105,7 @@ describe('asyncBufferFromUrl', () => { it('slice method fetches correct byte range', async () => { const mockArrayBuffer = new ArrayBuffer(100) - global.fetch = vi.fn().mockResolvedValue({ + global.fetch = vi.fn().mockResolvedValueOnce({ ok: true, body: {}, status: 206, @@ -129,7 +123,7 @@ describe('asyncBufferFromUrl', () => { it('slice method handles undefined end parameter', async () => { const mockArrayBuffer = new ArrayBuffer(100) - global.fetch = vi.fn().mockResolvedValue({ + global.fetch = vi.fn().mockResolvedValueOnce({ ok: true, body: {}, status: 206, @@ -145,22 +139,16 @@ describe('asyncBufferFromUrl', () => { }) it('slice method throws an error if fetch fails', async () => { - global.fetch = vi.fn().mockResolvedValue({ - ok: false, - status: 404, - }) + global.fetch = vi.fn().mockResolvedValueOnce({ ok: false, status: 404 }) const buffer = await asyncBufferFromUrl({ url: 'https://example.com', byteLength: 1024 }) await expect(buffer.slice(0, 100)).rejects.toThrow('fetch failed 404') }) it('passes authentication headers to get the byteLength', async () => { - global.fetch = vi.fn().mockImplementation((url, options) => { + global.fetch = vi.fn().mockImplementation((_url, options) => { if (new Headers(options.headers).get('Authorization') !== 'Bearer token') { - return Promise.resolve({ - ok: false, - status: 401, - }) + return Promise.resolve({ ok: false, status: 401 }) } return Promise.resolve({ ok: true, @@ -177,18 +165,12 @@ describe('asyncBufferFromUrl', () => { it ('passes authentication headers to fetch byte range', async () => { const mockArrayBuffer = new ArrayBuffer(100) - global.fetch = vi.fn().mockImplementation((url, options) => { + global.fetch = vi.fn().mockImplementation((_url, options) => { if (new Headers(options.headers).get('Authorization') !== 'Bearer token') { - return Promise.resolve({ - ok: false, - status: 401, - }) + return Promise.resolve({ ok: false, status: 401 }) } if (options.headers.get('Range') !== 'bytes=0-99') { - return Promise.resolve({ - ok: false, - status: 404, - }) + return Promise.resolve({ ok: false, status: 404 }) } return Promise.resolve({ ok: true, @@ -210,7 +192,7 @@ describe('asyncBufferFromUrl', () => { describe('when range requests are unsupported', () => { it('creates an AsyncBuffer with the correct byte length', async () => { const mockArrayBuffer = new ArrayBuffer(1024) - global.fetch = vi.fn().mockResolvedValue({ + global.fetch = vi.fn().mockResolvedValueOnce({ ok: true, status: 200, body: {}, @@ -229,7 +211,7 @@ describe('asyncBufferFromUrl', () => { it('does not make multiple requests for multiple slices', async () => { const mockArrayBuffer = new ArrayBuffer(1024) - global.fetch = vi.fn().mockResolvedValue({ + global.fetch = vi.fn().mockResolvedValueOnce({ ok: true, status: 200, body: {}, diff --git a/tsconfig.json b/tsconfig.json index 04d5724..b62627c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -5,6 +5,7 @@ "lib": ["esnext", "dom"], "module": "nodenext", "noEmit": true, + "noUnusedParameters": true, "resolveJsonModule": true, "strict": true },