Skip to content

Commit

Permalink
✨ make csvType=filtered work
Browse files Browse the repository at this point in the history
  • Loading branch information
danyx23 committed May 26, 2024
1 parent f600a8b commit 6062c79
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 9 deletions.
14 changes: 11 additions & 3 deletions functions/_common/grapherRenderer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -170,19 +170,27 @@ async function initGrapher(
return grapher
}

export async function fetchCsvForGrapher(slug: string, env: Env) {
export async function fetchCsvForGrapher(
slug: string,
env: Env,
searchParams?: URLSearchParams
) {
const grapherLogger = new TimeLogger("grapher")
const grapher = await initGrapher(
{
slug,
options: TWITTER_OPTIONS,
searchParams: new URLSearchParams(""),
searchParams: searchParams ?? new URLSearchParams(""),
env,
},
grapherLogger
)
await grapher.downloadLegacyDataFromOwidVariableIds()
return new Response(grapher.inputTable.toPrettyCsv(), {
const table =
searchParams.get("csvType") === "filtered"
? grapher.transformedTable
: grapher.inputTable
return new Response(table.toPrettyCsv(), {
headers: {
"Content-Type": "text/csv",
},
Expand Down
28 changes: 22 additions & 6 deletions functions/grapher/[slug].ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ export const onRequestGet: PagesFunction = async (context) => {
// Makes it so that if there's an error, we will just deliver the original page before the HTML rewrite.
// Only caveat is that redirects will not be taken into account for some reason; but on the other hand the worker is so simple that it's unlikely to fail.
context.passThroughOnException()
console.log(
"prepping Handling",
context.request.url,
context.request.headers.get("User-Agent")
)

// Redirects handling is performed by the worker, and is done by fetching the (baked) _grapherRedirects.json file.
// That file is a mapping from old slug to new slug.
Expand All @@ -21,16 +26,18 @@ export const onRequestGet: PagesFunction = async (context) => {
return redirects[slug]
}

const { request, env, params } = context
const url = new URL(request.url)
const isCsvRequest = url.pathname.endsWith(".csv")
const createRedirectResponse = (redirSlug: string, currentUrl: URL) =>
new Response(null, {
status: 302,
headers: { Location: `/grapher/${redirSlug}${currentUrl.search}` },
headers: {
Location: `/grapher/${redirSlug}${isCsvRequest ? ".csv" : ""}${currentUrl.search}`,
},
})

const { request, env, params } = context

const originalSlug = params.slug as string
const url = new URL(request.url)

/**
* REDIRECTS HANDLING:
Expand Down Expand Up @@ -59,7 +66,16 @@ export const onRequestGet: PagesFunction = async (context) => {
// { redirect: "manual" }
// )

const grapherPageResp = await env.ASSETS.fetch(url, { redirect: "manual" })
const grapherUrl = new URL(request.url)
// if we have a csv url, then create a new url without the csv extension but keeping the query params
// this is to check if the page exists and to redirect to the correct page if it does
if (isCsvRequest) {
grapherUrl.pathname = url.pathname.replace(/\.csv$/, "")
}

const grapherPageResp = await env.ASSETS.fetch(grapherUrl, {
redirect: "manual",
})

if (grapherPageResp.status === 404) {
// If the request is a 404, we check if there's a redirect for it.
Expand Down Expand Up @@ -135,7 +151,7 @@ export const onRequestGet: PagesFunction = async (context) => {
.get(
"/grapher/:slug.csv",
async ({ params: { slug } }, { searchParams }, env) =>
fetchCsvForGrapher(slug, env)
fetchCsvForGrapher(slug, env, searchParams) // pass undefined if we want the full csv
)
.get(
"/grapher/:slug",
Expand Down

0 comments on commit 6062c79

Please sign in to comment.