Skip to content

Commit

Permalink
feat: set max number of car cids to resolve (#24)
Browse files Browse the repository at this point in the history
Sets max number of car CIDs to resolve based on issue and numbers
suggested in #22

Later on, once bigger chunks changes is largely propagated, we can
consider increasing this. Also, going to create an issue to add logging
to `freeway` so that we can log number of car CIDs when we throw this
error back so that we can have better visibility on these numbers
  • Loading branch information
vasco-santos authored Mar 2, 2023
1 parent ad152f5 commit 94e65ea
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 1 deletion.
2 changes: 1 addition & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions src/bindings.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ export interface Environment {
CARPARK: R2Bucket
DUDEWHERE: R2Bucket
SATNAV: R2Bucket
MAX_SHARDS: string
}

export interface CarCidsContext extends Context {
Expand Down
14 changes: 14 additions & 0 deletions src/middleware.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@ export function withCarCids (handler) {
if (!ctx.dataCid) throw new Error('missing data CID')
if (!ctx.searchParams) throw new Error('missing URL search params')

// Cloudflare currently sets a limit of 1000 sub-requests within the worker context
// If we have a given root CID splitted across hundreds of CARs, freeway will hit
// the sub-requests limit and not serve content anyway
const maxShards = env.MAX_SHARDS ? parseInt(env.MAX_SHARDS) : 250

const carCids = ctx.searchParams.getAll('origin').flatMap(str => {
return str.split(',')
.reduce((/** @type {import('multiformats').CID[]} */cids, str) => {
Expand All @@ -62,10 +67,19 @@ export function withCarCids (handler) {
const results = await env.DUDEWHERE.list({ prefix: `${ctx.dataCid}/`, cursor })
if (!results || !results.objects.length) break
carCids.push(...results.objects.map(o => parseCid(o.key.split('/')[1])))

if (carCids.length > maxShards) {
throw new HttpError('request exceeds maximum DAG shards', { status: 501 })
}

if (!results.truncated) break
cursor = results.cursor
}
console.log(`dude where's my CAR? ${ctx.dataCid} => ${carCids}`)
} else {
if (carCids.length > maxShards) {
throw new HttpError('request exceeds maximum DAG shards', { status: 501 })
}
}

if (!carCids.length) {
Expand Down
10 changes: 10 additions & 0 deletions test/index.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,4 +68,14 @@ describe('freeway', () => {
const output = new Uint8Array(await res.arrayBuffer())
assert(equals(input[0].content, output))
})

it('should fail when divided into more than 120 CAR files', async () => {
const input = [{ path: 'sargo.tar.xz', content: randomBytes(1218523560) }]
const { dataCid } = await builder.add(input)

const res = await miniflare.dispatchFetch(`http://localhost:8787/ipfs/${dataCid}/${input[0].path}`)

assert(!res.ok)
assert.equal(res.status, 501)
})
})
7 changes: 7 additions & 0 deletions wrangler.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ r2_buckets = [
[env.production.build]
command = "npm run build"

[env.production.vars]
MAX_SHARDS = "250"

# Staging!
[env.staging]
account_id = "fffa4b4363a7e5250af8357087263b3a"
Expand All @@ -40,6 +43,9 @@ r2_buckets = [
[env.staging.build]
command = "npm run build"

[env.staging.vars]
MAX_SHARDS = "250"

# Test!
[env.test]
workers_dev = true
Expand All @@ -51,6 +57,7 @@ r2_buckets = [

[env.test.vars]
DEBUG = "true"
MAX_SHARDS = "120"

[env.alanshaw]
workers_dev = true
Expand Down

0 comments on commit 94e65ea

Please sign in to comment.