Skip to content

Commit

Permalink
feat: http byte range support
Browse files Browse the repository at this point in the history
  • Loading branch information
Alan Shaw committed Sep 10, 2024
1 parent 7346b2c commit bc42678
Show file tree
Hide file tree
Showing 3 changed files with 96 additions and 7 deletions.
38 changes: 32 additions & 6 deletions src/handlers/unixfs-file.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import { toReadableStream } from '../util/streams.js'
import { detectContentType } from '../util/mime.js'
import { HttpError } from '../util/errors.js'
import { decodeRangeHeader, resolveRange } from '../util/range.js'

/**
* @typedef {import('../bindings.js').UnixfsEntryContext} UnixfsFileHandlerContext
Expand Down Expand Up @@ -34,8 +35,29 @@ export async function handleUnixfsFile (request, env, ctx) {
throw new HttpError('method not allowed', { status: 405 })
}

/** @type {import('dagula').AbsoluteRange|undefined} */
let range
if (request.headers.has('range')) {
/** @type {import('dagula').Range[]} */
let ranges = []
try {
ranges = decodeRangeHeader(request.headers.get('range') ?? '')
} catch (err) {
throw new HttpError('invalid range', { cause: err, status: 400 })
}

if (ranges.length > 1) {
throw new HttpError('multipart byte range unsupported', { status: 400 })
}

range = resolveRange(ranges[0], Number(entry.size))
}

console.log('unixfs root', entry.cid.toString())
const contentIterator = entry.content()[Symbol.asyncIterator]()
const status = range ? 206 : 200
const contentLength = range ? range[1] - range[0] + 1 : Number(entry.size)
const exportOpts = range ? { offset: range[0], length: range[1] - range[0] + 1 } : {}
const contentIterator = entry.content(exportOpts)[Symbol.asyncIterator]()
const { done, value: firstChunk } = await contentIterator.next()
if (done || !firstChunk.length) {
return new Response(null, { status: 204, headers })
Expand All @@ -47,6 +69,11 @@ export async function handleUnixfsFile (request, env, ctx) {
headers['Content-Type'] = contentType
}

if (range && Number(entry.size) !== contentLength) {
const contentRange = `bytes ${range[0]}-${range[1]}/${entry.size}`
headers['Content-Range'] = contentRange
}

// stream the remainder
const stream = toReadableStream((async function * () {
let bytesWritten = firstChunk.length
Expand All @@ -58,16 +85,15 @@ export async function handleUnixfsFile (request, env, ctx) {
yield chunk
}
// FixedLengthStream does not like when you send less than what you said
const entrySize = Number(entry.size)
if (bytesWritten < entry.size) {
console.warn(`padding with ${entrySize - bytesWritten} zeroed bytes`)
yield new Uint8Array(entrySize - bytesWritten)
if (bytesWritten < contentLength) {
console.warn(`padding with ${contentLength - bytesWritten} zeroed bytes`)
yield new Uint8Array(contentLength - bytesWritten)
}
} catch (/** @type {any} */ err) {
console.error(err.stack)
throw err
}
})())

return new Response(stream, { headers })
return new Response(stream, { status, headers })
}
2 changes: 1 addition & 1 deletion test/handlers/unixfs-dir.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { UnixFS } from 'ipfs-unixfs'
import { handleUnixfs } from '../../src/handlers/unixfs.js'
import { mockWaitUntil, mockBlockstore } from '../helpers.js'

describe('UnixFS handler', () => {
describe('UnixFS directory handler', () => {
it('directory correctly links to files whose name includes a #', async () => {
const waitUntil = mockWaitUntil()
const path = ''
Expand Down
63 changes: 63 additions & 0 deletions test/handlers/unixfs-file.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/* eslint-env browser */
import { describe, it } from 'node:test'
import assert from 'node:assert'
import { Dagula } from 'dagula'
import { fromString } from 'uint8arrays'
import { encode } from 'multiformats/block'
import * as raw from 'multiformats/codecs/raw'
import * as pb from '@ipld/dag-pb'
import { sha256 as hasher } from 'multiformats/hashes/sha2'
import { UnixFS } from 'ipfs-unixfs'
import { handleUnixfs } from '../../src/handlers/unixfs.js'
import { mockWaitUntil, mockBlockstore } from '../helpers.js'

describe('UnixFS file handler', async () => {
const waitUntil = mockWaitUntil()
const filename = 'Puzzle People #1.png'
const path = `/${filename}`
const searchParams = new URLSearchParams()
const fileData = fromString('test')
const fileBlock = await encode({ value: fileData, codec: raw, hasher })
const pbData = pb.createNode(new UnixFS({ type: 'directory' }).marshal(), [{
Name: filename,
Hash: fileBlock.cid
}])
const dirBlock = await encode({ value: pbData, codec: pb, hasher })
const blockstore = mockBlockstore([dirBlock, fileBlock])
const dagula = new Dagula(blockstore)
const ctx = { waitUntil, unixfs: dagula, dataCid: dirBlock.cid, path, searchParams }
const env = { DEBUG: 'true' }

it('absolute byte range request', async () => {
const [first, last] = [1, 3]
const req = new Request('http://localhost/ipfs/bafy', { headers: { range: `bytes=${first}-${last}` } })
const res = await handleUnixfs(req, env, ctx)

assert.equal(res.status, 206)
assert.equal(res.headers.get('Content-Range'), `bytes ${first}-${last}/${fileData.length}`)
const data = await res.text()
assert.equal(data, 'est')
})

it('offset byte range request', async () => {
const [first] = [1]
const req = new Request('http://localhost/ipfs/bafy', { headers: { range: `bytes=${first}-` } })
const res = await handleUnixfs(req, env, ctx)

assert.equal(res.status, 206)
assert.equal(res.headers.get('Content-Range'), `bytes ${first}-${fileData.length - 1}/${fileData.length}`)
const data = await res.text()
assert.equal(data, 'est')
})

it('suffix byte range request', async () => {
const suffix = -3
const req = new Request('http://localhost/ipfs/bafy', { headers: { range: `bytes=${suffix}` } })
const res = await handleUnixfs(req, env, ctx)

assert.equal(res.status, 206)
assert.equal(res.headers.get('Content-Range'), `bytes ${fileData.length + suffix}-${fileData.length - 1}/${fileData.length}`)
const data = await res.text()
assert.equal(data, 'est')
})
})

0 comments on commit bc42678

Please sign in to comment.