diff --git a/demo/demo.js b/demo/demo.js
index c5ed09a..3913d8a 100644
--- a/demo/demo.js
+++ b/demo/demo.js
@@ -1,6 +1,7 @@
import {
parquetMetadata, parquetMetadataAsync, parquetRead, parquetSchema, toJson,
} from '../src/hyparquet.js'
+import { asyncBufferFromUrl } from '../src/utils.js'
import { compressors } from './hyparquet-compressors.min.js'
import { fileLayout, fileMetadata } from './layout.js'
@@ -60,42 +61,12 @@ dropzone.addEventListener('drop', e => {
async function processUrl(url) {
content.innerHTML = ''
try {
- // Check if file is accessible and get its size
- const head = await fetch(url, { method: 'HEAD' })
- if (!head.ok) {
- content.innerHTML = `${url}`
- content.innerHTML += `
Error fetching file\n${head.status} ${head.statusText}
`
- return
- }
- const size = head.headers.get('content-length')
- if (!size) {
- content.innerHTML = `${url}`
- content.innerHTML += 'Error fetching file\nNo content-length header
'
- return
- }
- // Construct an AsyncBuffer that fetches file chunks
- const asyncBuffer = {
- byteLength: Number(size),
- /**
- * @param {number} start
- * @param {number} end
- * @returns {Promise}
- */
- slice: async (start, end) => {
- const rangeEnd = end === undefined ? '' : end - 1
- console.log(`Fetch ${url} bytes=${start}-${rangeEnd}`)
- const res = await fetch(url, {
- headers: { Range: `bytes=${start}-${rangeEnd}` },
- })
- return res.arrayBuffer()
- },
- }
+ const asyncBuffer = await asyncBufferFromUrl(url)
const metadata = await parquetMetadataAsync(asyncBuffer)
await render(asyncBuffer, metadata, `${url}`)
} catch (e) {
- console.error('Error fetching file', e)
- content.innerHTML = `${url}`
- content.innerHTML += `Error fetching file\n${e}
`
+ console.error('Error fetching url', e)
+ content.innerHTML += `Error fetching url ${url}\n${e}
`
}
}
diff --git a/src/utils.js b/src/utils.js
index 75cfed4..55f057f 100644
--- a/src/utils.js
+++ b/src/utils.js
@@ -35,3 +35,33 @@ export function concat(aaa, bbb) {
aaa.push(...bbb.slice(i, i + chunk))
}
}
+
+/**
+ * Construct an AsyncBuffer for a URL.
+ *
+ * @typedef {import('./types.js').AsyncBuffer} AsyncBuffer
+ * @param {string} url
+ * @returns {Promise}
+ */
+export async function asyncBufferFromUrl(url) {
+ // byte length from HEAD request
+ const byteLength = await fetch(url, { method: 'HEAD' })
+ .then(res => {
+ if (!res.ok) throw new Error(`fetch head failed ${res.status}`)
+ const length = res.headers.get('Content-Length')
+ if (!length) throw new Error('missing content length')
+ return parseInt(length)
+ })
+ return {
+ byteLength,
+ async slice(start, end) {
+ // fetch byte range from url
+ const headers = new Headers()
+ const endStr = end === undefined ? '' : end - 1
+ headers.set('Range', `bytes=${start}-${endStr}`)
+ const res = await fetch(url, { headers })
+ if (!res.ok || !res.body) throw new Error(`fetch failed ${res.status}`)
+ return res.arrayBuffer()
+ },
+ }
+}