diff --git a/adminSiteServer/adminRouter.tsx b/adminSiteServer/adminRouter.tsx index 331e79f51f1..f1cd4495187 100644 --- a/adminSiteServer/adminRouter.tsx +++ b/adminSiteServer/adminRouter.tsx @@ -142,8 +142,8 @@ adminRouter.get("/datasets/:datasetId.csv", async (req, res) => { await db.knexInstance().transaction(async (t) => { const datasetName = ( await db.knexRawFirst>( - `SELECT name FROM datasets WHERE id=?`, t, + `SELECT name FROM datasets WHERE id=?`, [datasetId] ) )?.name @@ -170,7 +170,7 @@ adminRouter.get("/datasets/:datasetId/downloadZip", async (req, res) => { const file = await db.knexRawFirst< Pick - >(`SELECT filename, file FROM dataset_files WHERE datasetId=?`, knex, [ + >(knex, `SELECT filename, file FROM dataset_files WHERE datasetId=?`, [ datasetId, ]) res.send(file?.file) diff --git a/adminSiteServer/apiRouter.ts b/adminSiteServer/apiRouter.ts index 7436526db96..3faf7424d7d 100644 --- a/adminSiteServer/apiRouter.ts +++ b/adminSiteServer/apiRouter.ts @@ -483,8 +483,8 @@ apiRouter.get( if (!slug) return {} const pageviewsByUrl = await db.knexRawFirst( - "select * from ?? where url = ?", db.knexInstance(), + "select * from ?? where url = ?", [ AnalyticsPageviewsTableName, `https://ourworldindata.org/grapher/${slug}`, @@ -1595,6 +1595,7 @@ apiRouter.get("/datasets.json", async (req) => { return db.knexInstance().transaction( async (trx) => { const datasets = await db.knexRaw>( + trx, ` WITH variable_counts AS ( SELECT @@ -1624,19 +1625,18 @@ apiRouter.get("/datasets.json", async (req) => { JOIN users mu ON mu.id=ad.metadataEditedByUserId JOIN datasets d ON d.id=ad.id ORDER BY ad.dataEditedAt DESC - `, - trx + ` ) const tags = await db.knexRaw< Pick & Pick >( + trx, ` SELECT dt.datasetId, t.id, t.name FROM dataset_tags dt JOIN tags t ON dt.tagId = t.id - `, - trx + ` ) const tagsByDatasetId = lodash.groupBy(tags, (t) => t.datasetId) for (const dataset of datasets) { @@ -1659,6 +1659,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { return db.knexInstance().transaction( async (trx) => { const dataset = await db.knexRawFirst>( + trx, ` SELECT d.id, d.namespace, @@ -1682,7 +1683,6 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { JOIN users mu ON mu.id=d.metadataEditedByUserId WHERE d.id = ? `, - trx, [datasetId] ) @@ -1690,8 +1690,8 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { throw new JsonError(`No dataset by id '${datasetId}'`, 404) const zipFile = await db.knexRawFirst( - `SELECT filename FROM dataset_files WHERE datasetId=?`, trx, + `SELECT filename FROM dataset_files WHERE datasetId=?`, [datasetId] ) if (zipFile) dataset.zipFile = zipFile @@ -1700,12 +1700,12 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { DbRawVariable, "id" | "name" | "description" | "display" | "catalogPath" >[] = await db.knexRaw( + trx, ` SELECT v.id, v.name, v.description, v.display, v.catalogPath FROM variables AS v WHERE v.datasetId = ? `, - trx, [datasetId] ) @@ -1717,6 +1717,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { // add all origins const origins: DbRawOrigin[] = await db.knexRaw( + trx, ` select distinct o.* @@ -1725,7 +1726,6 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { join variables as v on ov.variableId = v.id where v.datasetId = ? `, - trx, [datasetId] ) @@ -1734,13 +1734,13 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { dataset.origins = parsedOrigins const sources = await db.knexRaw( + trx, ` SELECT s.id, s.name, s.description FROM sources AS s WHERE s.datasetId = ? ORDER BY s.id ASC `, - trx, [datasetId] ) @@ -1754,6 +1754,7 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { }) const charts = await db.knexRaw( + trx, ` SELECT ${OldChart.listFields} FROM charts @@ -1764,7 +1765,6 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { WHERE v.datasetId = ? GROUP BY charts.id `, - trx, [datasetId] ) @@ -1773,25 +1773,25 @@ apiRouter.get("/datasets/:datasetId.json", async (req: Request) => { await Chart.assignTagsForCharts(charts as any) const tags = await db.knexRaw( + trx, ` SELECT t.id, t.name FROM tags t JOIN dataset_tags dt ON dt.tagId = t.id WHERE dt.datasetId = ? `, - trx, [datasetId] ) dataset.tags = tags const availableTags = await db.knexRaw( + trx, ` SELECT t.id, t.name, p.name AS parentName FROM tags AS t JOIN tags AS p ON t.parentId=p.id WHERE p.isBulkImport IS FALSE - `, - trx + ` ) dataset.availableTags = availableTags @@ -1812,6 +1812,7 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => { await knex.transaction(async (trx) => { const newDataset = (req.body as { dataset: any }).dataset await db.knexRaw( + trx, ` UPDATE datasets SET @@ -1820,7 +1821,6 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => { metadataEditedByUserId=? WHERE id=? `, - trx, [ newDataset.nonRedistributable, new Date(), @@ -1830,13 +1830,13 @@ apiRouter.put("/datasets/:datasetId", async (req: Request, res: Response) => { ) const tagRows = newDataset.tags.map((tag: any) => [tag.id, datasetId]) - await db.knexRaw(`DELETE FROM dataset_tags WHERE datasetId=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM dataset_tags WHERE datasetId=?`, [ datasetId, ]) if (tagRows.length) await db.knexRaw( - `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, trx, + `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, [tagRows] ) @@ -1864,8 +1864,8 @@ apiRouter.post( if (!dataset) throw new JsonError(`No dataset by id ${datasetId}`, 404) await knex.transaction(async (trx) => { await db.knexRaw( - `UPDATE datasets SET isArchived = 1 WHERE id=?`, trx, + `UPDATE datasets SET isArchived = 1 WHERE id=?`, [datasetId] ) }) @@ -1895,22 +1895,22 @@ apiRouter.delete( await knex.transaction(async (trx) => { await db.knexRaw( - `DELETE d FROM country_latest_data AS d JOIN variables AS v ON d.variable_id=v.id WHERE v.datasetId=?`, trx, + `DELETE d FROM country_latest_data AS d JOIN variables AS v ON d.variable_id=v.id WHERE v.datasetId=?`, [datasetId] ) await db.knexRaw( - `DELETE FROM dataset_files WHERE datasetId=?`, trx, + `DELETE FROM dataset_files WHERE datasetId=?`, [datasetId] ) - await db.knexRaw(`DELETE FROM variables WHERE datasetId=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM variables WHERE datasetId=?`, [ datasetId, ]) - await db.knexRaw(`DELETE FROM sources WHERE datasetId=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM sources WHERE datasetId=?`, [ datasetId, ]) - await db.knexRaw(`DELETE FROM datasets WHERE id=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM datasets WHERE id=?`, [ datasetId, ]) }) @@ -1941,6 +1941,7 @@ apiRouter.post( if (req.body.republish) { await knex.transaction(async (trx) => { await db.knexRaw( + trx, ` UPDATE charts SET config = JSON_SET(config, "$.version", config->"$.version" + 1) @@ -1951,7 +1952,6 @@ apiRouter.post( WHERE variables.datasetId = ? ) `, - trx, [datasetId] ) }) @@ -2346,19 +2346,19 @@ apiRouter.get("/sources/:sourceId.json", async (req: Request) => { return db.knexInstance().transaction( async (trx) => { const source = await db.knexRawFirst>( + trx, ` SELECT s.id, s.name, s.description, s.createdAt, s.updatedAt, d.namespace FROM sources AS s JOIN active_datasets AS d ON d.id=s.datasetId WHERE s.id=?`, - trx, [sourceId] ) if (!source) throw new JsonError(`No source by id '${sourceId}'`, 404) source.variables = await db.knexRaw( - `SELECT id, name, updatedAt FROM variables WHERE variables.sourceId=?`, trx, + `SELECT id, name, updatedAt FROM variables WHERE variables.sourceId=?`, [sourceId] ) diff --git a/baker/GrapherBaker.tsx b/baker/GrapherBaker.tsx index 3618bc17763..e94e4f5a2e8 100644 --- a/baker/GrapherBaker.tsx +++ b/baker/GrapherBaker.tsx @@ -481,13 +481,13 @@ export const bakeAllChangedGrapherPagesVariablesPngSvgAndDeleteRemovedGraphers = async (bakedSiteDir: string, knex: Knex) => { const chartsToBake: { id: number; config: string; slug: string }[] = await knexRaw( + knex, ` SELECT id, config, config->>'$.slug' as slug FROM charts WHERE JSON_EXTRACT(config, "$.isPublished")=true ORDER BY JSON_EXTRACT(config, "$.slug") ASC - `, - knex + ` ) const newSlugs = chartsToBake.map((row) => row.slug) diff --git a/baker/algolia/indexExplorersToAlgolia.ts b/baker/algolia/indexExplorersToAlgolia.ts index 3f3db5c4346..5a127d29613 100644 --- a/baker/algolia/indexExplorersToAlgolia.ts +++ b/baker/algolia/indexExplorersToAlgolia.ts @@ -120,11 +120,11 @@ const getExplorerRecords = async ( // Fetch info about all charts used in explorers, as linked by the explorer_charts table const graphersUsedInExplorers = await db .knexRaw<{ chartId: number }>( + knex, ` SELECT DISTINCT chartId FROM explorer_charts - `, - knex + ` ) .then((results: { chartId: number }[]) => results.map(({ chartId }) => chartId) @@ -134,6 +134,7 @@ const getExplorerRecords = async ( const explorerRecords = await db .knexRaw>( + knex, ` SELECT slug, COALESCE(config->>"$.explorerSubtitle", "null") AS subtitle, @@ -141,8 +142,7 @@ const getExplorerRecords = async ( COALESCE(config->>"$.blocks", "null") AS blocks FROM explorers WHERE isPublished = true - `, - knex + ` ) .then((results) => results.flatMap(({ slug, title, subtitle, blocks }) => { diff --git a/baker/siteRenderers.tsx b/baker/siteRenderers.tsx index 151a40a12d4..04fd3eaa2c5 100644 --- a/baker/siteRenderers.tsx +++ b/baker/siteRenderers.tsx @@ -261,8 +261,8 @@ export const renderPost = async ( export const renderFrontPage = async (knex: Knex) => { // Annoying, MySQL+TypeORM doesn't support JSONB, so I'm using raw SQL to confirm if there's a published homepage const gdocHomepageResult = await db.knexRawFirst<{ id: string }>( - `SELECT id FROM posts_gdocs WHERE content->>"$.type" = "${OwidGdocType.Homepage}" AND published = TRUE`, - db.knexInstance() + db.knexInstance(), + `SELECT id FROM posts_gdocs WHERE content->>"$.type" = "${OwidGdocType.Homepage}" AND published = TRUE` ) if (gdocHomepageResult) { diff --git a/baker/syncRedirectsToGrapher.ts b/baker/syncRedirectsToGrapher.ts index 2a8d188dbb7..ae57eb7e5b7 100644 --- a/baker/syncRedirectsToGrapher.ts +++ b/baker/syncRedirectsToGrapher.ts @@ -61,8 +61,8 @@ export const syncRedirectsToGrapher = async (): Promise => { `Adding redirect: ${source} -> ${resolvedTarget} (${code})` ) await db.knexRaw( - `INSERT INTO redirects (source, target, code) VALUES (?, ?, ?)`, knex, + `INSERT INTO redirects (source, target, code) VALUES (?, ?, ?)`, [source, resolvedTarget, code] ) } diff --git a/db/db.ts b/db/db.ts index 996188f55f7..0efcbaa71e3 100644 --- a/db/db.ts +++ b/db/db.ts @@ -115,17 +115,17 @@ export const knexTable = (table: string): Knex.QueryBuilder => knexInstance().table(table) export const knexRaw = async ( - str: string, knex: Knex, + str: string, params?: any[] ): Promise => (await knex.raw(str, params ?? []))[0] export const knexRawFirst = async ( - str: string, knex: Knex, + str: string, params?: any[] ): Promise => { - const results = await knexRaw(str, knex, params) + const results = await knexRaw(knex, str, params) if (results.length === 0) return undefined return results[0] } @@ -140,10 +140,10 @@ export const getSlugsWithPublishedGdocsSuccessors = async ( knex: Knex ): Promise> => { return knexRaw( + knex, `-- sql select slug from posts_with_gdoc_publish_status - where isGdocPublished = TRUE`, - knex + where isGdocPublished = TRUE` ).then((rows) => new Set(rows.map((row: any) => row.slug))) } @@ -151,6 +151,7 @@ export const getExplorerTags = async ( knex: Knex ): Promise<{ slug: string; tags: DbChartTagJoin[] }[]> => { return knexRaw<{ slug: string; tags: string }>( + knex, `-- sql SELECT ext.explorerSlug as slug, @@ -163,8 +164,7 @@ export const getExplorerTags = async ( LEFT JOIN tags t ON ext.tagId = t.id GROUP BY - ext.explorerSlug`, - knex + ext.explorerSlug` ).then((rows) => rows.map((row) => ({ slug: row.slug, @@ -186,6 +186,7 @@ export const getPublishedExplorersBySlug = async ( const tags = await getExplorerTags(knex) const tagsBySlug = keyBy(tags, "slug") return knexRaw( + knex, `-- sql SELECT slug, @@ -194,8 +195,7 @@ export const getPublishedExplorersBySlug = async ( FROM explorers WHERE - isPublished = TRUE`, - knex + isPublished = TRUE` ).then((rows) => { const processed = rows.map((row: any) => { return { @@ -213,6 +213,7 @@ export const getLatestDataInsights = ( limit = 5 ): Promise => { return knexRaw( + knexInstance(), ` SELECT content->>'$.title' AS title, @@ -225,7 +226,6 @@ export const getLatestDataInsights = ( ORDER BY publishedAt DESC LIMIT ? `, - knexInstance(), [limit] ).then((results) => results.map((record: any) => ({ @@ -237,35 +237,35 @@ export const getLatestDataInsights = ( export const getPublishedDataInsightCount = (): Promise => { return knexRawFirst<{ count: number }>( + knexInstance(), ` SELECT COUNT(*) AS count FROM posts_gdocs WHERE content->>'$.type' = '${OwidGdocType.DataInsight}' AND published = TRUE - AND publishedAt < NOW()`, - knexInstance() + AND publishedAt < NOW()` ).then((res) => res?.count ?? 0) } export const getTotalNumberOfCharts = (): Promise => { return knexRawFirst<{ count: number }>( + knexInstance(), ` SELECT COUNT(*) AS count FROM charts - WHERE config->"$.isPublished" = TRUE`, - knexInstance() + WHERE config->"$.isPublished" = TRUE` ).then((res) => res?.count ?? 0) } export const getTotalNumberOfInUseGrapherTags = (): Promise => { return knexRawFirst<{ count: number }>( + knexInstance(), ` SELECT COUNT(DISTINCT(tagId)) AS count FROM chart_tags WHERE chartId IN ( SELECT id FROM charts - WHERE publishedAt IS NOT NULL)`, - knexInstance() + WHERE publishedAt IS NOT NULL)` ).then((res) => res?.count ?? 0) } diff --git a/db/model/Chart.ts b/db/model/Chart.ts index 0b53d8e2321..6f4116677b9 100644 --- a/db/model/Chart.ts +++ b/db/model/Chart.ts @@ -392,6 +392,7 @@ export const getChartEmbedUrlsInPublishedWordpressPosts = async ( DbPlainPostLink, "target" | "queryString" >[] = await db.knexRaw( + knex, ` SELECT pl.target, @@ -429,8 +430,7 @@ export const getChartEmbedUrlsInPublishedWordpressPosts = async ( -- AND pgl.componentType = "chart" -- AND pg.content ->> '$.type' <> 'fragment' -- AND pg.published = 1 - `, - knex + ` ) return chartSlugQueryString.map((row) => { diff --git a/db/model/Dataset.ts b/db/model/Dataset.ts index c059234ab47..22a2a3ca794 100644 --- a/db/model/Dataset.ts +++ b/db/model/Dataset.ts @@ -69,10 +69,10 @@ export async function writeDatasetCSV( // get variables of a dataset const variableIds = ( await db.knexRaw<{ variableId: number }>( + knex, `SELECT id as variableId FROM variables v WHERE datasetId=?`, - knex, [datasetId] ) ).map((row) => row.variableId) @@ -99,13 +99,14 @@ export async function setTagsForDataset( ): Promise { await knex.transaction(async (trx: Knex) => { const tagRows = tagIds.map((tagId) => [tagId, datasetId]) - await db.knexRaw(`DELETE FROM dataset_tags WHERE datasetId=?`, trx, [ + await db.knexRaw(trx, `DELETE FROM dataset_tags WHERE datasetId=?`, [ datasetId, ]) if (tagRows.length) await db.knexRaw( - `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, trx, + `INSERT INTO dataset_tags (tagId, datasetId) VALUES ?`, + [tagRows] ) }) @@ -122,8 +123,8 @@ export async function datasetToDatapackage( .knexTable(VariablesTableName) .where({ datasetId })) as DbRawVariable[] const tags = await db.knexRaw>( - `SELECT t.id, t.name FROM dataset_tags dt JOIN tags t ON t.id=dt.tagId WHERE dt.datasetId=?`, knex, + `SELECT t.id, t.name FROM dataset_tags dt JOIN tags t ON t.id=dt.tagId WHERE dt.datasetId=?`, [datasetId] ) diff --git a/db/model/Gdoc/GdocHomepage.ts b/db/model/Gdoc/GdocHomepage.ts index 8425ccf6055..165ef32bafa 100644 --- a/db/model/Gdoc/GdocHomepage.ts +++ b/db/model/Gdoc/GdocHomepage.ts @@ -34,14 +34,14 @@ export class GdocHomepage _validateSubclass = async (): Promise => { const errors: OwidGdocErrorMessage[] = [] const otherPublishedHomepages = await db.knexRaw<{ id: string }>( + db.knexInstance(), ` - SELECT + SELECT id FROM posts_gdocs WHERE content->>"$.type" = "${OwidGdocType.Homepage}" AND published = TRUE AND id != ?`, - db.knexInstance(), [this.id] ) if (otherPublishedHomepages.length > 0) { diff --git a/db/model/Pageview.ts b/db/model/Pageview.ts index 5407e41bf3f..b31d39bde50 100644 --- a/db/model/Pageview.ts +++ b/db/model/Pageview.ts @@ -32,8 +32,8 @@ export async function getAnalyticsPageviewsByUrlObj( [url: string]: DbPlainAnalyticsPageview }> { const pageviews = await db.knexRaw( - "SELECT * FROM ??", knex, + "SELECT * FROM ??", [AnalyticsPageviewsTableName] ) diff --git a/db/model/Post.ts b/db/model/Post.ts index b63f4823aba..4e10ddfe52f 100644 --- a/db/model/Post.ts +++ b/db/model/Post.ts @@ -162,6 +162,7 @@ export const getPostsFromSnapshots = async ( filterFunc?: FilterFnPostRestApi ): Promise => { const rawPosts: Pick[] = await db.knexRaw( + knex, ` SELECT wpApiSnapshot FROM ${postsTable} WHERE wpApiSnapshot IS NOT NULL @@ -169,7 +170,6 @@ export const getPostsFromSnapshots = async ( AND type IN (?) ORDER BY wpApiSnapshot->>'$.date' DESC; `, - knex, [postTypes] ) @@ -303,6 +303,7 @@ export const getWordpressPostReferencesByChartId = async ( knex: Knex ): Promise => { const relatedWordpressPosts: PostReference[] = await db.knexRaw( + knex, ` SELECT DISTINCT p.title, @@ -339,7 +340,6 @@ export const getWordpressPostReferencesByChartId = async ( ORDER BY p.title ASC `, - knex, [chartId] ) @@ -351,6 +351,7 @@ export const getGdocsPostReferencesByChartId = async ( knex: Knex ): Promise => { const relatedGdocsPosts: PostReference[] = await db.knexRaw( + knex, ` SELECT DISTINCT pg.content ->> '$.title' AS title, @@ -379,7 +380,6 @@ export const getGdocsPostReferencesByChartId = async ( ORDER BY pg.content ->> '$.title' ASC `, - knex, [chartId] ) diff --git a/db/model/Redirect.ts b/db/model/Redirect.ts index 9a29e3839b7..6c1ad4f609d 100644 --- a/db/model/Redirect.ts +++ b/db/model/Redirect.ts @@ -6,10 +6,10 @@ export const getRedirectsFromDb = async ( knex: Knex ): Promise => { const redirectsFromDb: DbPlainRedirect[] = await db.knexRaw( + knex, ` SELECT source, target, code FROM redirects - `, - knex + ` ) return redirectsFromDb diff --git a/db/model/Variable.ts b/db/model/Variable.ts index 4dd96016906..851ccc83fb5 100644 --- a/db/model/Variable.ts +++ b/db/model/Variable.ts @@ -143,8 +143,8 @@ export async function getMergedGrapherConfigForVariable( DbRawVariable, "grapherConfigAdmin" | "grapherConfigETL" >[] = await knexRaw( - `SELECT grapherConfigAdmin, grapherConfigETL FROM variables WHERE id = ?`, knex, + `SELECT grapherConfigAdmin, grapherConfigETL FROM variables WHERE id = ?`, [variableId] ) if (!rows.length) return @@ -255,11 +255,11 @@ export const getDataValue = async ( const unit = ( await knexRawFirst>( + knex, `-- sql SELECT unit FROM variables WHERE id = ? `, - knex, [variableId] ) )?.unit @@ -293,12 +293,12 @@ export const getOwidChartDimensionConfigForVariable = async ( knex: Knex ): Promise => { const row = await db.knexRawFirst<{ dimensions: string }>( + knex, ` SELECT config->"$.dimensions" AS dimensions FROM charts WHERE id = ? `, - knex, [chartId] ) if (!row?.dimensions) return @@ -314,8 +314,8 @@ export const getOwidVariableDisplayConfig = async ( knex: Knex ): Promise => { const row = await knexRawFirst>( - `SELECT display FROM variables WHERE id = ?`, knex, + `SELECT display FROM variables WHERE id = ?`, [variableId] ) if (!row?.display) return @@ -516,7 +516,7 @@ export const readSQLasDF = async ( params: any[], knex: Knex ): Promise => { - return createDataFrame(await db.knexRaw(sql, knex, params)) + return createDataFrame(await db.knexRaw(knex, sql, params)) } export async function getVariableOfDatapageIfApplicable( @@ -729,7 +729,7 @@ const queryRegexSafe = async ( knex: Knex ): Promise => { // catch regular expression failures in MySQL and return empty result - return await knexRaw(query, knex).catch((err) => { + return await knexRaw(knex, query).catch((err) => { if (err.message.includes("regular expression")) { return [] } diff --git a/db/refreshPageviewsFromDatasette.ts b/db/refreshPageviewsFromDatasette.ts index 89d9e185e88..ff9a60fca57 100644 --- a/db/refreshPageviewsFromDatasette.ts +++ b/db/refreshPageviewsFromDatasette.ts @@ -60,7 +60,7 @@ async function downloadAndInsertCSV(knex: Knex): Promise { console.log("Parsed CSV data:", onlyValidRows.length, "rows") console.log("Columns:", analyticsPageviewsColumnNames.join(", ")) - await db.knexRaw("TRUNCATE TABLE analytics_pageviews", knex) + await db.knexRaw(knex, "TRUNCATE TABLE analytics_pageviews") await knex.transaction(async (trx) => { await trx.batchInsert("analytics_pageviews", onlyValidRows) }) diff --git a/db/tests/basic.test.ts b/db/tests/basic.test.ts index 63eeebcb99f..4b8d5bd9cc2 100644 --- a/db/tests/basic.test.ts +++ b/db/tests/basic.test.ts @@ -132,8 +132,8 @@ test("knex interface", async () => { // Use raw queries, using ?? to specify the table name using the shared const value // The pick type is used to type the result row const usersFromRawQuery: Pick[] = await knexRaw( - "select email from users", trx, + "select email from users", [] ) expect(usersFromRawQuery.length).toBe(2)