mirror of
https://github.com/asadbek064/hyparquet.git
synced 2025-12-26 15:16:38 +00:00
Remove unnecessary parens
This commit is contained in:
parent
3583aeb549
commit
f389e30565
@ -39,6 +39,7 @@
|
||||
"jsdoc/require-returns-type": "error",
|
||||
"jsdoc/sort-tags": "error",
|
||||
"no-constant-condition": "off",
|
||||
"no-extra-parens": "error",
|
||||
"no-multi-spaces": "error",
|
||||
"no-trailing-spaces": "error",
|
||||
"no-var": "error",
|
||||
|
||||
@ -89,9 +89,9 @@ function parseInt96Date(value) {
|
||||
*/
|
||||
export function parseFloat16(bytes) {
|
||||
if (!bytes) return undefined
|
||||
const int16 = (bytes[1] << 8) | bytes[0]
|
||||
const int16 = bytes[1] << 8 | bytes[0]
|
||||
const sign = int16 >> 15 ? -1 : 1
|
||||
const exp = (int16 >> 10) & 0x1f
|
||||
const exp = int16 >> 10 & 0x1f
|
||||
const frac = int16 & 0x3ff
|
||||
if (exp === 0) return sign * Math.pow(2, -14) * (frac / 1024) // subnormals
|
||||
if (exp === 0x1f) return frac ? NaN : sign * Infinity
|
||||
|
||||
@ -42,26 +42,25 @@ export function readDataPageV2(compressedBytes, ph, schemaPath, columnMetadata,
|
||||
page = decompressPage(page, uncompressedPageSize, columnMetadata.codec, compressors)
|
||||
}
|
||||
const pageView = new DataView(page.buffer, page.byteOffset, page.byteLength)
|
||||
const pageReader = { view: pageView, offset: 0 }
|
||||
|
||||
// read values based on encoding
|
||||
/** @type {import('./types.d.ts').DecodedArray} */
|
||||
let dataPage = []
|
||||
const nValues = daph2.num_values - daph2.num_nulls
|
||||
if (daph2.encoding === 'PLAIN') {
|
||||
const pageReader = { view: pageView, offset: 0 }
|
||||
const { type_length } = schemaPath[schemaPath.length - 1].element
|
||||
dataPage = readPlain(pageReader, columnMetadata.type, nValues, type_length)
|
||||
} else if (daph2.encoding === 'RLE') {
|
||||
const bitWidth = 1
|
||||
const pageReader = { view: pageView, offset: 4 }
|
||||
pageReader.offset = 4
|
||||
dataPage = new Array(nValues)
|
||||
readRleBitPackedHybrid(pageReader, bitWidth, uncompressedPageSize, dataPage)
|
||||
readRleBitPackedHybrid(pageReader, 1, uncompressedPageSize, dataPage)
|
||||
} else if (
|
||||
daph2.encoding === 'PLAIN_DICTIONARY' ||
|
||||
daph2.encoding === 'RLE_DICTIONARY'
|
||||
) {
|
||||
const bitWidth = pageView.getUint8(0)
|
||||
const pageReader = { view: pageView, offset: 1 }
|
||||
pageReader.offset = 1
|
||||
dataPage = new Array(nValues)
|
||||
readRleBitPackedHybrid(pageReader, bitWidth, uncompressedPageSize, dataPage)
|
||||
} else if (daph2.encoding === 'DELTA_BINARY_PACKED') {
|
||||
@ -76,13 +75,11 @@ export function readDataPageV2(compressedBytes, ph, schemaPath, columnMetadata,
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the repetition levels from this page, if any.
|
||||
*
|
||||
* @typedef {import("./types.d.ts").DataReader} DataReader
|
||||
* @param {DataReader} reader data view for the page
|
||||
* @param {DataPageHeaderV2} daph2 data page header
|
||||
* @param {DataReader} reader
|
||||
* @param {DataPageHeaderV2} daph2 data page header v2
|
||||
* @param {SchemaTree[]} schemaPath
|
||||
* @returns {any[]} repetition levels and number of bytes read
|
||||
* @returns {any[]} repetition levels
|
||||
*/
|
||||
export function readRepetitionLevelsV2(reader, daph2, schemaPath) {
|
||||
const maxRepetitionLevel = getMaxRepetitionLevel(schemaPath)
|
||||
@ -98,12 +95,10 @@ export function readRepetitionLevelsV2(reader, daph2, schemaPath) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the definition levels from this page, if any.
|
||||
*
|
||||
* @param {DataReader} reader data view for the page
|
||||
* @param {DataReader} reader
|
||||
* @param {DataPageHeaderV2} daph2 data page header v2
|
||||
* @param {number} maxDefinitionLevel
|
||||
* @returns {number[] | undefined} definition levels and number of bytes read
|
||||
* @returns {number[] | undefined} definition levels
|
||||
*/
|
||||
function readDefinitionLevelsV2(reader, daph2, maxDefinitionLevel) {
|
||||
if (maxDefinitionLevel) {
|
||||
@ -116,8 +111,6 @@ function readDefinitionLevelsV2(reader, daph2, maxDefinitionLevel) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Unpack the delta binary packed encoding.
|
||||
*
|
||||
* @param {Uint8Array} page page data
|
||||
* @param {number} nValues number of values to read
|
||||
* @param {Int32Array | BigInt64Array} values array to write to
|
||||
@ -150,12 +143,12 @@ function deltaBinaryUnpack(page, nValues, values) {
|
||||
const mask = (1n << bitWidth) - 1n
|
||||
let bitpackPos = 0n
|
||||
while (count && miniblockCount) {
|
||||
let bits = (BigInt(view.getUint8(reader.offset)) >> bitpackPos) & mask // TODO: don't re-read value every time
|
||||
let bits = BigInt(view.getUint8(reader.offset)) >> bitpackPos & mask // TODO: don't re-read value every time
|
||||
bitpackPos += bitWidth
|
||||
while (bitpackPos >= 8) {
|
||||
bitpackPos -= 8n
|
||||
reader.offset++
|
||||
bits |= (BigInt(view.getUint8(reader.offset)) << bitWidth - bitpackPos) & mask
|
||||
bits |= BigInt(view.getUint8(reader.offset)) << bitWidth - bitpackPos & mask
|
||||
}
|
||||
const delta = minDelta + bits
|
||||
value += delta
|
||||
|
||||
@ -57,7 +57,7 @@ export function readRleBitPackedHybrid(reader, width, length, values) {
|
||||
* @param {number} seen - number of values seen so far
|
||||
*/
|
||||
function readRle(reader, count, bitWidth, values, seen) {
|
||||
const width = (bitWidth + 7) >> 3
|
||||
const width = bitWidth + 7 >> 3
|
||||
let value = 0
|
||||
if (width === 1) {
|
||||
value = reader.view.getUint8(reader.offset)
|
||||
@ -89,7 +89,7 @@ function readRle(reader, count, bitWidth, values, seen) {
|
||||
*/
|
||||
function readBitPacked(reader, header, bitWidth, values, seen) {
|
||||
// extract number of values to read from header
|
||||
let count = (header >> 1) << 3
|
||||
let count = header >> 1 << 3
|
||||
// mask for bitWidth number of bits
|
||||
const mask = (1 << bitWidth) - 1
|
||||
|
||||
@ -118,7 +118,7 @@ function readBitPacked(reader, header, bitWidth, values, seen) {
|
||||
} else {
|
||||
if (seen < values.length) {
|
||||
// emit value by shifting off to the right and masking
|
||||
values[seen++] = (data >> right) & mask
|
||||
values[seen++] = data >> right & mask
|
||||
}
|
||||
count--
|
||||
right += bitWidth
|
||||
|
||||
@ -47,7 +47,7 @@ function readPlainBoolean(reader, count) {
|
||||
const byteOffset = reader.offset + Math.floor(i / 8)
|
||||
const bitOffset = i % 8
|
||||
const byte = reader.view.getUint8(byteOffset)
|
||||
values[i] = (byte & (1 << bitOffset)) !== 0
|
||||
values[i] = (byte & 1 << bitOffset) !== 0
|
||||
}
|
||||
reader.offset += Math.ceil(count / 8)
|
||||
return values
|
||||
@ -95,7 +95,7 @@ function readPlainInt96(reader, count) {
|
||||
for (let i = 0; i < count; i++) {
|
||||
const low = reader.view.getBigInt64(reader.offset + i * 12, true)
|
||||
const high = reader.view.getInt32(reader.offset + i * 12 + 8, true)
|
||||
values[i] = (BigInt(high) << 64n) | low
|
||||
values[i] = BigInt(high) << 64n | low
|
||||
}
|
||||
reader.offset += count * 12
|
||||
return values
|
||||
|
||||
@ -101,8 +101,8 @@ export function snappyUncompress(input, output) {
|
||||
switch (c & 0x3) {
|
||||
case 1:
|
||||
// Copy with 1-byte offset
|
||||
len = ((c >>> 2) & 0x7) + 4
|
||||
offset = input[pos] + ((c >>> 5) << 8)
|
||||
len = (c >>> 2 & 0x7) + 4
|
||||
offset = input[pos] + (c >>> 5 << 8)
|
||||
pos++
|
||||
break
|
||||
case 2:
|
||||
|
||||
@ -165,7 +165,7 @@ function readVarBigInt(reader) {
|
||||
function readZigZag(reader) {
|
||||
const zigzag = readVarInt(reader)
|
||||
// convert zigzag to int
|
||||
return (zigzag >>> 1) ^ -(zigzag & 1)
|
||||
return zigzag >>> 1 ^ -(zigzag & 1)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -178,7 +178,7 @@ function readZigZag(reader) {
|
||||
export function readZigZagBigInt(reader) {
|
||||
const zigzag = readVarBigInt(reader)
|
||||
// convert zigzag to int
|
||||
return (zigzag >> BigInt(1)) ^ -(zigzag & BigInt(1))
|
||||
return zigzag >> BigInt(1) ^ -(zigzag & BigInt(1))
|
||||
}
|
||||
|
||||
/**
|
||||
@ -247,7 +247,7 @@ export function toVarInt(n) {
|
||||
varInt[idx++] = n
|
||||
break
|
||||
} else {
|
||||
varInt[idx++] = (n & 0x7f) | 0x80
|
||||
varInt[idx++] = n & 0x7f | 0x80
|
||||
n >>>= 7
|
||||
}
|
||||
}
|
||||
|
||||
@ -66,7 +66,7 @@ describe('readPlain', () => {
|
||||
view.setInt32(8, high, true)
|
||||
const reader = { view, offset: 0 }
|
||||
const result = readPlain(reader, 'INT96', 1, undefined)
|
||||
const expectedValue = (BigInt(high) << 64n) | low
|
||||
const expectedValue = BigInt(high) << 64n | low
|
||||
expect(result).toEqual([expectedValue])
|
||||
expect(reader.offset).toBe(12)
|
||||
})
|
||||
|
||||
Loading…
Reference in New Issue
Block a user