Skip to content

Commit

Permalink
💄 reorder parameters of knexRaw and knexRawFirst to have knex in firs…
Browse files Browse the repository at this point in the history
…t pos
  • Loading branch information
danyx23 committed Mar 6, 2024
1 parent cdfe2f4 commit 080ec57
Show file tree
Hide file tree
Showing 15 changed files with 84 additions and 71 deletions.
4 changes: 2 additions & 2 deletions adminSiteServer/adminRouter.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ adminRouter.get("/datasets/:datasetId.csv", async (req, res) => {
await db.knexInstance().transaction(async (t) => {
const datasetName = (
await db.knexRawFirst<Pick<DbPlainDataset, "name">>(
`SELECT name FROM datasets WHERE id=?`,
t,
`SELECT name FROM datasets WHERE id=?`,
[datasetId]
)
)?.name
Expand All @@ -170,7 +170,7 @@ adminRouter.get("/datasets/:datasetId/downloadZip", async (req, res) => {

const file = await db.knexRawFirst<
Pick<DbPlainDatasetFile, "filename" | "file">
>(`SELECT filename, file FROM dataset_files WHERE datasetId=?`, knex, [
>(knex, `SELECT filename, file FROM dataset_files WHERE datasetId=?`, [
datasetId,
])
res.send(file?.file)
Expand Down
52 changes: 26 additions & 26 deletions adminSiteServer/apiRouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -483,8 +483,8 @@ apiRouter.get(
if (!slug) return {}

const pageviewsByUrl = await db.knexRawFirst(
"select * from ?? where url = ?",
db.knexInstance(),
"select * from ?? where url = ?",
[
AnalyticsPageviewsTableName,
`https://ourworldindata.org/grapher/${slug}`,
Expand Down Expand Up @@ -1595,6 +1595,7 @@ apiRouter.get("/datasets.json", async (req) => {
return db.knexInstance().transaction(
async (trx) => {
const datasets = await db.knexRaw<Record<string, any>>(
trx,
`
WITH variable_counts AS (
SELECT
Expand Down Expand Up @@ -1624,19 +1625,18 @@ apiRouter.get("/datasets.json", async (req) => {
JOIN users mu ON mu.id=ad.metadataEditedByUserId
JOIN datasets d ON d.id=ad.id
ORDER BY ad.dataEditedAt DESC
`,
trx
`
)

const tags = await db.knexRaw<
Pick<DbPlainTag, "id" | "name"> &
Pick<DbPlainDatasetTag, "datasetId">
>(
trx,
`
SELECT dt.datasetId, t.id, t.name FROM dataset_tags dt
JOIN tags t ON dt.tagId = t.id
`,
trx
`
)
const tagsByDatasetId = lodash.groupBy(tags, (t) => t.datasetId)
for (const dataset of datasets) {
Expand All @@ -1659,6 +1659,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {
return db.knexInstance().transaction(
async (trx) => {
const dataset = await db.knexRawFirst<Record<string, any>>(
trx,
`
SELECT d.id,
d.namespace,
Expand All @@ -1682,16 +1683,15 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {
JOIN users mu ON mu.id=d.metadataEditedByUserId
WHERE d.id = ?
`,
trx,
[datasetId]
)

if (!dataset)
throw new JsonError(`No dataset by id '${datasetId}'`, 404)

const zipFile = await db.knexRawFirst(
`SELECT filename FROM dataset_files WHERE datasetId=?`,
trx,
`SELECT filename FROM dataset_files WHERE datasetId=?`,
[datasetId]
)
if (zipFile) dataset.zipFile = zipFile
Expand All @@ -1700,12 +1700,12 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {
DbRawVariable,
"id" | "name" | "description" | "display" | "catalogPath"
>[] = await db.knexRaw(
trx,
`
SELECT v.id, v.name, v.description, v.display, v.catalogPath
FROM variables AS v
WHERE v.datasetId = ?
`,
trx,
[datasetId]
)

Expand All @@ -1717,6 +1717,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {

// add all origins
const origins: DbRawOrigin[] = await db.knexRaw(
trx,
`
select distinct
o.*
Expand All @@ -1725,7 +1726,6 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {
join variables as v on ov.variableId = v.id
where v.datasetId = ?
`,
trx,
[datasetId]
)

Expand All @@ -1734,13 +1734,13 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {
dataset.origins = parsedOrigins

const sources = await db.knexRaw(
trx,
`
SELECT s.id, s.name, s.description
FROM sources AS s
WHERE s.datasetId = ?
ORDER BY s.id ASC
`,
trx,
[datasetId]
)

Expand All @@ -1754,6 +1754,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {
})

const charts = await db.knexRaw(
trx,
`
SELECT ${OldChart.listFields}
FROM charts
Expand All @@ -1764,7 +1765,6 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {
WHERE v.datasetId = ?
GROUP BY charts.id
`,
trx,
[datasetId]
)

Expand All @@ -1773,25 +1773,25 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => {
await Chart.assignTagsForCharts(charts as any)

const tags = await db.knexRaw(
trx,
`
SELECT t.id, t.name
FROM tags t
JOIN dataset_tags dt ON dt.tagId = t.id
WHERE dt.datasetId = ?
`,
trx,
[datasetId]
)
dataset.tags = tags

const availableTags = await db.knexRaw(
trx,
`
SELECT t.id, t.name, p.name AS parentName
FROM tags AS t
JOIN tags AS p ON t.parentId=p.id
WHERE p.isBulkImport IS FALSE
`,
trx
`
)
dataset.availableTags = availableTags

Expand All @@ -1812,6 +1812,7 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => {
await knex.transaction(async (trx) => {
const newDataset = (req.body as { dataset: any }).dataset
await db.knexRaw(
trx,
`
UPDATE datasets
SET
Expand All @@ -1820,7 +1821,6 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => {
metadataEditedByUserId=?
WHERE id=?
`,
trx,
[
newDataset.nonRedistributable,
new Date(),
Expand All @@ -1830,13 +1830,13 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => {
)

const tagRows = newDataset.tags.map((tag: any) => [tag.id, datasetId])
await db.knexRaw(`DELETE FROM dataset_tags WHERE datasetId=?`, trx, [
await db.knexRaw(trx, `DELETE FROM dataset_tags WHERE datasetId=?`, [
datasetId,
])
if (tagRows.length)
await db.knexRaw(
`INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`,
trx,
`INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`,
[tagRows]
)

Expand Down Expand Up @@ -1864,8 +1864,8 @@ apiRouter.post(
if (!dataset) throw new JsonError(`No dataset by id ${datasetId}`, 404)
await knex.transaction(async (trx) => {
await db.knexRaw(
`UPDATE datasets SET isArchived = 1 WHERE id=?`,
trx,
`UPDATE datasets SET isArchived = 1 WHERE id=?`,
[datasetId]
)
})
Expand Down Expand Up @@ -1895,22 +1895,22 @@ apiRouter.delete(

await knex.transaction(async (trx) => {
await db.knexRaw(
`DELETE d FROM country_latest_data AS d JOIN variables AS v ON d.variable_id=v.id WHERE v.datasetId=?`,
trx,
`DELETE d FROM country_latest_data AS d JOIN variables AS v ON d.variable_id=v.id WHERE v.datasetId=?`,
[datasetId]
)
await db.knexRaw(
`DELETE FROM dataset_files WHERE datasetId=?`,
trx,
`DELETE FROM dataset_files WHERE datasetId=?`,
[datasetId]
)
await db.knexRaw(`DELETE FROM variables WHERE datasetId=?`, trx, [
await db.knexRaw(trx, `DELETE FROM variables WHERE datasetId=?`, [
datasetId,
])
await db.knexRaw(`DELETE FROM sources WHERE datasetId=?`, trx, [
await db.knexRaw(trx, `DELETE FROM sources WHERE datasetId=?`, [
datasetId,
])
await db.knexRaw(`DELETE FROM datasets WHERE id=?`, trx, [
await db.knexRaw(trx, `DELETE FROM datasets WHERE id=?`, [
datasetId,
])
})
Expand Down Expand Up @@ -1941,6 +1941,7 @@ apiRouter.post(
if (req.body.republish) {
await knex.transaction(async (trx) => {
await db.knexRaw(
trx,
`
UPDATE charts
SET config = JSON_SET(config, "$.version", config->"$.version" + 1)
Expand All @@ -1951,7 +1952,6 @@ apiRouter.post(
WHERE variables.datasetId = ?
)
`,
trx,
[datasetId]
)
})
Expand Down Expand Up @@ -2346,19 +2346,19 @@ apiRouter.get("/sources/:sourceId.json", async (req: Request) => {
return db.knexInstance().transaction(
async (trx) => {
const source = await db.knexRawFirst<Record<string, any>>(
trx,
`
SELECT s.id, s.name, s.description, s.createdAt, s.updatedAt, d.namespace
FROM sources AS s
JOIN active_datasets AS d ON d.id=s.datasetId
WHERE s.id=?`,
trx,
[sourceId]
)
if (!source)
throw new JsonError(`No source by id '${sourceId}'`, 404)
source.variables = await db.knexRaw(
`SELECT id, name, updatedAt FROM variables WHERE variables.sourceId=?`,
trx,
`SELECT id, name, updatedAt FROM variables WHERE variables.sourceId=?`,
[sourceId]
)

Expand Down
4 changes: 2 additions & 2 deletions baker/GrapherBaker.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -476,13 +476,13 @@ export const bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers =
async (bakedSiteDir: string, knex: Knex<any, any[]>) => {
const chartsToBake: { id: number; config: string; slug: string }[] =
await knexRaw(
knex,
`
SELECT
id, config, config->>'$.slug' as slug
FROM charts WHERE JSON_EXTRACT(config, "$.isPublished")=true
ORDER BY JSON_EXTRACT(config, "$.slug") ASC
`,
knex
`
)

const newSlugs = chartsToBake.map((row) => row.slug)
Expand Down
8 changes: 4 additions & 4 deletions baker/algolia/indexExplorersToAlgolia.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,11 +120,11 @@ const getExplorerRecords = async (
// Fetch info about all charts used in explorers, as linked by the explorer_charts table
const graphersUsedInExplorers = await db
.knexRaw<{ chartId: number }>(
knex,
`
SELECT DISTINCT chartId
FROM explorer_charts
`,
knex
`
)
.then((results: { chartId: number }[]) =>
results.map(({ chartId }) => chartId)
Expand All @@ -134,15 +134,15 @@ const getExplorerRecords = async (

const explorerRecords = await db
.knexRaw<Omit<ExplorerEntry, "views_7d">>(
knex,
`
SELECT slug,
COALESCE(config->>"$.explorerSubtitle", "null") AS subtitle,
COALESCE(config->>"$.explorerTitle", "null") AS title,
COALESCE(config->>"$.blocks", "null") AS blocks
FROM explorers
WHERE isPublished = true
`,
knex
`
)
.then((results) =>
results.flatMap(({ slug, title, subtitle, blocks }) => {
Expand Down
2 changes: 1 addition & 1 deletion baker/syncRedirectsToGrapher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ export const syncRedirectsToGrapher = async (): Promise<void> => {
`Adding redirect: ${source} -> ${resolvedTarget} (${code})`
)
await db.knexRaw(
`INSERT INTO redirects (source, target, code) VALUES (?, ?, ?)`,
knex,
`INSERT INTO redirects (source, target, code) VALUES (?, ?, ?)`,
[source, resolvedTarget, code]
)
}
Expand Down
Loading

0 comments on commit 080ec57

Please sign in to comment.