- There are 4 built-in dialects for PostgreSQL, MySQL, Microsoft SQL
- Server (MSSQL), and SQLite. Additionally, the community has implemented
- several dialects to choose from. Find out more at{' '}
- "Dialects".
+ There are {builtInDialects.length} built-in dialects for PostgreSQL,
+ MySQL, Microsoft SQL Server (MSSQL), SQLite, and PGlite. Additionally,
+ the community has implemented several dialects to choose from. Find out
+ more at "Dialects".
Driver installation
diff --git a/site/docs/getting-started/Instantiation.tsx b/site/docs/getting-started/Instantiation.tsx
index 87ee52e71..0a4a3bc9e 100644
--- a/site/docs/getting-started/Instantiation.tsx
+++ b/site/docs/getting-started/Instantiation.tsx
@@ -116,6 +116,17 @@ const dialect = new ${dialectClassName}({
})`
}
+ if (dialect === 'pglite') {
+ const driverImportName = 'PGlite'
+
+ return `import { ${driverImportName} } from '${driverNPMPackageName}'
+import { Kysely, ${dialectClassName} } from 'kysely'
+
+const dialect = new ${dialectClassName}({
+ pglite: new ${driverImportName}(),
+})`
+ }
+
throw new Error(`Unsupported dialect: ${dialect}`)
}
diff --git a/site/docs/getting-started/Querying.tsx b/site/docs/getting-started/Querying.tsx
index 380d340e4..1e200f8e2 100644
--- a/site/docs/getting-started/Querying.tsx
+++ b/site/docs/getting-started/Querying.tsx
@@ -36,6 +36,7 @@ export async function deletePerson(id: number) {
return person
}`,
+ // TODO: Update to use output clause once #687 is completed
mssql: `// As of v0.27.0, Kysely doesn't support the \`OUTPUT\` clause. This will change
// in the future. For now, the following implementations achieve the same results
// as other dialects' examples, but with extra steps.
@@ -63,7 +64,7 @@ export async function deletePerson(id: number) {
return person
}`,
sqlite: postgresqlCodeSnippet,
- // TODO: Update to use output clause once #687 is completed
+ pglite: postgresqlCodeSnippet,
}
export function Querying(props: PropsWithDialect) {
diff --git a/site/docs/getting-started/Summary.tsx b/site/docs/getting-started/Summary.tsx
index 6fed7f893..d75a50f76 100644
--- a/site/docs/getting-started/Summary.tsx
+++ b/site/docs/getting-started/Summary.tsx
@@ -2,14 +2,9 @@ import Admonition from '@theme/Admonition'
import CodeBlock from '@theme/CodeBlock'
import Link from '@docusaurus/Link'
import { IUseADifferentDatabase } from './IUseADifferentDatabase'
-import {
- PRETTY_DIALECT_NAMES,
- type Dialect,
- type PropsWithDialect,
-} from './shared'
+import type { Dialect, PropsWithDialect } from './shared'
-const dialectSpecificCodeSnippets: Record = {
- postgresql: ` await db.schema.createTable('person')
+const postgresqlCodeSnippet = ` await db.schema.createTable('person')
.addColumn('id', 'serial', (cb) => cb.primaryKey())
.addColumn('first_name', 'varchar', (cb) => cb.notNull())
.addColumn('last_name', 'varchar')
@@ -17,7 +12,11 @@ const dialectSpecificCodeSnippets: Record = {
.addColumn('created_at', 'timestamp', (cb) =>
cb.notNull().defaultTo(sql\`now()\`)
)
- .execute()`,
+ .addColumn('metadata', 'jsonb', (cb) => cb.notNull())
+ .execute()`
+
+const dialectSpecificCodeSnippets: Record = {
+ postgresql: postgresqlCodeSnippet,
mysql: ` await db.schema.createTable('person')
.addColumn('id', 'integer', (cb) => cb.primaryKey().autoIncrement())
.addColumn('first_name', 'varchar(255)', (cb) => cb.notNull())
@@ -26,6 +25,7 @@ const dialectSpecificCodeSnippets: Record = {
.addColumn('created_at', 'timestamp', (cb) =>
cb.notNull().defaultTo(sql\`now()\`)
)
+ .addColumn('metadata', 'json', (cb) => cb.notNull())
.execute()`,
// TODO: Update line 42's IDENTITY once identity(1,1) is added to core.
mssql: ` await db.schema.createTable('person')
@@ -36,6 +36,7 @@ const dialectSpecificCodeSnippets: Record = {
.addColumn('created_at', 'datetime', (cb) =>
cb.notNull().defaultTo(sql\`GETDATE()\`)
)
+ .addColumn('metadata', sql\`nvarchar(max)\`, (cb) => cb.notNull())
.execute()`,
sqlite: ` await db.schema.createTable('person')
.addColumn('id', 'integer', (cb) => cb.primaryKey().autoIncrement().notNull())
@@ -45,14 +46,19 @@ const dialectSpecificCodeSnippets: Record = {
.addColumn('created_at', 'timestamp', (cb) =>
cb.notNull().defaultTo(sql\`current_timestamp\`)
)
+ .addColumn('metadata', 'text', (cb) => cb.notNull())
.execute()`,
+ pglite: postgresqlCodeSnippet,
}
+const truncateTableSnippet = `await sql\`truncate table \${sql.table('person')}\`.execute(db)`
+
const dialectSpecificTruncateSnippets: Record = {
- postgresql: `await sql\`truncate table \${sql.table('person')}\`.execute(db)`,
- mysql: `await sql\`truncate table \${sql.table('person')}\`.execute(db)`,
- mssql: `await sql\`truncate table \${sql.table('person')}\`.execute(db)`,
+ postgresql: truncateTableSnippet,
+ mysql: truncateTableSnippet,
+ mssql: truncateTableSnippet,
sqlite: `await sql\`delete from \${sql.table('person')}\`.execute(db)`,
+ pglite: truncateTableSnippet,
}
export function Summary(props: PropsWithDialect) {
@@ -107,6 +113,12 @@ ${dialectSpecificCodeSnippet}
first_name: 'Jennifer',
last_name: 'Aniston',
gender: 'woman',
+ metadata: sql.jval({
+ login_at: new Date().toISOString(),
+ ip: null,
+ agent: null,
+ plan: 'free',
+ }),
})
})
diff --git a/site/docs/getting-started/_types.mdx b/site/docs/getting-started/_types.mdx
index 42cf066be..9e79da78a 100644
--- a/site/docs/getting-started/_types.mdx
+++ b/site/docs/getting-started/_types.mdx
@@ -10,7 +10,7 @@ import {
ColumnType,
Generated,
Insertable,
- JSONColumnType,
+ Json,
Selectable,
Updateable,
} from 'kysely'
@@ -45,12 +45,10 @@ export interface PersonTable {
// can never be updated:
created_at: ColumnType
- // You can specify JSON columns using the `JSONColumnType` wrapper.
- // It is a shorthand for `ColumnType`, where T
- // is the type of the JSON object/array retrieved from the database,
- // and the insert and update types are always `string` since you're
- // always stringifying insert/update values.
- metadata: JSONColumnType<{
+ // You can specify JSON columns using the `Json` wrapper.
+ // When inserting/updating values of such columns, you're required to wrap the
+ // values with `eb.jval` or `sql.jval`.
+ metadata: Json<{
login_at: string
ip: string | null
agent: string | null
diff --git a/site/docs/getting-started/shared.tsx b/site/docs/getting-started/shared.tsx
index f07efb512..82ea39960 100644
--- a/site/docs/getting-started/shared.tsx
+++ b/site/docs/getting-started/shared.tsx
@@ -1,7 +1,7 @@
import type { ReactNode } from 'react'
import packageJson from '../../package.json'
-export type Dialect = 'postgresql' | 'mysql' | 'sqlite' | 'mssql'
+export type Dialect = 'postgresql' | 'mysql' | 'sqlite' | 'mssql' | 'pglite'
export type PropsWithDialect
= P & {
dialect: Dialect | undefined
@@ -31,6 +31,7 @@ export const DIALECT_CLASS_NAMES = {
mysql: 'MysqlDialect',
mssql: 'MssqlDialect',
sqlite: 'SqliteDialect',
+ pglite: 'PGliteDialect',
} as const satisfies Record
export const getDriverNPMPackageNames = (
@@ -41,6 +42,7 @@ export const getDriverNPMPackageNames = (
mysql: 'mysql2',
mssql: 'tedious',
sqlite: 'better-sqlite3',
+ pglite: '@electric-sql/pglite',
}) as const satisfies Record
export const POOL_NPM_PACKAGE_NAMES = {
@@ -52,6 +54,7 @@ export const PRETTY_DIALECT_NAMES = {
mysql: 'MySQL',
mssql: 'Microsoft SQL Server (MSSQL)',
sqlite: 'SQLite',
+ pglite: 'PGlite',
} as const satisfies Record
export const PRETTY_PACKAGE_MANAGER_NAMES = {
diff --git a/site/src/components/SectionFeatures/index.tsx b/site/src/components/SectionFeatures/index.tsx
index 292224604..8207ebe5b 100644
--- a/site/src/components/SectionFeatures/index.tsx
+++ b/site/src/components/SectionFeatures/index.tsx
@@ -58,8 +58,8 @@ const FeatureList: FeatureItem[] = [
<>
Kysely's community-driven dialect system makes it easy to implement
support for any SQL database without waiting for the core team. It ships
- with official dialects for PostgreSQL, MySQL, MS SQL Server, and SQLite
- right out of the box.
+ with official dialects for PostgreSQL, MySQL, MS SQL Server, SQLite, and
+ PGlite right out of the box.
>
),
},
diff --git a/src/dialect/database-introspector.ts b/src/dialect/database-introspector.ts
index 75093b811..8974828d7 100644
--- a/src/dialect/database-introspector.ts
+++ b/src/dialect/database-introspector.ts
@@ -43,6 +43,7 @@ export interface DatabaseMetadata {
export interface TableMetadata {
readonly name: string
readonly isView: boolean
+ readonly isForeign: boolean
readonly columns: ColumnMetadata[]
readonly schema?: string
}
diff --git a/src/dialect/dialect-adapter-base.ts b/src/dialect/dialect-adapter-base.ts
index fab6512aa..3abe60252 100644
--- a/src/dialect/dialect-adapter-base.ts
+++ b/src/dialect/dialect-adapter-base.ts
@@ -12,6 +12,10 @@ export abstract class DialectAdapterBase implements DialectAdapter {
return true
}
+ get supportsMultipleConnections(): boolean {
+ return true
+ }
+
get supportsTransactionalDdl(): boolean {
return false
}
diff --git a/src/dialect/dialect-adapter.ts b/src/dialect/dialect-adapter.ts
index f8bf61fe1..b868592cb 100644
--- a/src/dialect/dialect-adapter.ts
+++ b/src/dialect/dialect-adapter.ts
@@ -12,28 +12,45 @@ export interface DialectAdapter {
/**
* Whether or not this dialect supports `if not exists` in creation of tables/schemas/views/etc.
*
+ * Default is `false`.
+ *
* If this is false, Kysely's internal migrations tables and schemas are created
* without `if not exists` in migrations. This is not a problem if the dialect
* supports transactional DDL.
*/
- readonly supportsCreateIfNotExists: boolean
+ readonly supportsCreateIfNotExists?: boolean
+
+ /**
+ * Whether or not this dialect supports multiple connections at the same time.
+ *
+ * Default is `true`.
+ *
+ * If this is false, Kysely will use a single connection for all database operations.
+ */
+ readonly supportsMultipleConnections?: boolean
/**
* Whether or not this dialect supports transactional DDL.
*
+ * Default is `false`.
+ *
* If this is true, migrations are executed inside a transaction.
*/
- readonly supportsTransactionalDdl: boolean
+ readonly supportsTransactionalDdl?: boolean
/**
* Whether or not this dialect supports the `returning` in inserts
* updates and deletes.
+ *
+ * Default is `false`.
*/
- readonly supportsReturning: boolean
+ readonly supportsReturning?: boolean
/**
* Whether or not this dialect supports the `output` clause in inserts
* updates and deletes.
+ *
+ * Default is `false`.
*/
readonly supportsOutput?: boolean
diff --git a/src/dialect/mssql/mssql-introspector.ts b/src/dialect/mssql/mssql-introspector.ts
index 9b2a9baf0..ef8cad93f 100644
--- a/src/dialect/mssql/mssql-introspector.ts
+++ b/src/dialect/mssql/mssql-introspector.ts
@@ -142,6 +142,7 @@ export class MssqlIntrospector implements DatabaseIntrospector {
tableDictionary[key] ||
freeze({
columns: [],
+ isForeign: false,
isView: rawColumn.table_type === 'V ',
name: rawColumn.table_name,
schema: rawColumn.table_schema_name ?? undefined,
diff --git a/src/dialect/mysql/mysql-introspector.ts b/src/dialect/mysql/mysql-introspector.ts
index d407361d0..6cc59c47a 100644
--- a/src/dialect/mysql/mysql-introspector.ts
+++ b/src/dialect/mysql/mysql-introspector.ts
@@ -47,6 +47,7 @@ export class MysqlIntrospector implements DatabaseIntrospector {
'columns.TABLE_NAME',
'columns.TABLE_SCHEMA',
'tables.TABLE_TYPE',
+ 'tables.ENGINE',
'columns.IS_NULLABLE',
'columns.DATA_TYPE',
'columns.EXTRA',
@@ -83,6 +84,7 @@ export class MysqlIntrospector implements DatabaseIntrospector {
table = freeze({
name: it.TABLE_NAME,
isView: it.TABLE_TYPE === 'VIEW',
+ isForeign: it.ENGINE === 'FEDERATED',
schema: it.TABLE_SCHEMA,
columns: [],
})
@@ -116,6 +118,7 @@ interface RawColumnMetadata {
TABLE_NAME: string
TABLE_SCHEMA: string
TABLE_TYPE: string
+ ENGINE: string
IS_NULLABLE: 'YES' | 'NO'
DATA_TYPE: string
EXTRA: string
diff --git a/src/dialect/pglite/pglite-adapter.ts b/src/dialect/pglite/pglite-adapter.ts
new file mode 100644
index 000000000..c6c0ad2a1
--- /dev/null
+++ b/src/dialect/pglite/pglite-adapter.ts
@@ -0,0 +1,19 @@
+import { PostgresAdapter } from '../postgres/postgres-adapter.js'
+
+export class PGliteAdapter extends PostgresAdapter {
+ override get supportsMultipleConnections(): boolean {
+ return false
+ }
+
+ override async acquireMigrationLock(): Promise {
+ // PGlite only has one connection that's reserved by the migration system
+ // for the whole time between acquireMigrationLock and releaseMigrationLock.
+ // We don't need to do anything here.
+ }
+
+ override async releaseMigrationLock(): Promise {
+ // PGlite only has one connection that's reserved by the migration system
+ // for the whole time between acquireMigrationLock and releaseMigrationLock.
+ // We don't need to do anything here.
+ }
+}
diff --git a/src/dialect/pglite/pglite-dialect-config.ts b/src/dialect/pglite/pglite-dialect-config.ts
new file mode 100644
index 000000000..1141b3848
--- /dev/null
+++ b/src/dialect/pglite/pglite-dialect-config.ts
@@ -0,0 +1,67 @@
+import type { DatabaseConnection } from '../../driver/database-connection.js'
+
+/**
+ * Config for the PGlite dialect.
+ */
+export interface PGliteDialectConfig {
+ /**
+ * Called once when the first query is executed.
+ *
+ * This is a Kysely specific feature and does not come from the `@electric-sql/pglite`
+ * module.
+ */
+ onCreateConnection?: (connection: DatabaseConnection) => Promise
+
+ /**
+ * A PGlite instance or a function that returns one.
+ *
+ * If a function is provided, it's called once when the first query is executed.
+ *
+ * https://pglite.dev/docs/api#main-constructor
+ */
+ pglite: PGlite | (() => PGlite | Promise)
+}
+
+/**
+ * This interface is the subset of the PGlite instance that kysely needs.
+ *
+ * We don't use the type from `@electric-sql/pglite` here to not have a dependency
+ * to it.
+ *
+ * https://pglite.dev/docs/api
+ */
+export interface PGlite {
+ close(): Promise
+ closed: boolean
+ query(
+ query: string,
+ params?: any[],
+ options?: QueryOptions,
+ ): Promise>
+ ready: boolean
+ transaction(callback: (tx: PGliteTransaction) => Promise): Promise
+ waitReady: Promise
+}
+
+export interface QueryOptions {
+ blob?: Blob | File
+ onNotice?: (notice: any) => void
+ paramTypes?: number[]
+ parsers?: Record any>
+ rowMode?: 'array' | 'object'
+ serializers?: Record string>
+}
+
+export interface Results {
+ affectedRows?: number
+ blob?: Blob
+ fields: {
+ dataTypeID: number
+ name: string
+ }[]
+ rows: T[]
+}
+
+export interface PGliteTransaction extends Pick {
+ rollback(): Promise
+}
diff --git a/src/dialect/pglite/pglite-dialect.ts b/src/dialect/pglite/pglite-dialect.ts
new file mode 100644
index 000000000..fd868a324
--- /dev/null
+++ b/src/dialect/pglite/pglite-dialect.ts
@@ -0,0 +1,59 @@
+import type { Driver } from '../../driver/driver.js'
+import type { Kysely } from '../../kysely.js'
+import type { QueryCompiler } from '../../query-compiler/query-compiler.js'
+import type { DatabaseIntrospector } from '../database-introspector.js'
+import type { DialectAdapter } from '../dialect-adapter.js'
+import type { Dialect } from '../dialect.js'
+import { PostgresIntrospector } from '../postgres/postgres-introspector.js'
+import { PostgresQueryCompiler } from '../postgres/postgres-query-compiler.js'
+import { PGliteAdapter } from './pglite-adapter.js'
+import type { PGliteDialectConfig } from './pglite-dialect-config.js'
+import { PGliteDriver } from './pglite-driver.js'
+
+/**
+ * PGlite dialect.
+ *
+ * The constructor takes an instance of {@link PGliteDialectConfig}.
+ *
+ * ```ts
+ * import { PGlite } from '@electric-sql/pglite'
+ *
+ * new PGliteDialect({
+ * pglite: new PGlite()
+ * })
+ * ```
+ *
+ * If you want the client to only be created once it's first used, `pglite`
+ * can be a function:
+ *
+ * ```ts
+ * import { PGlite } from '@electric-sql/pglite'
+ *
+ * new PGliteDialect({
+ * pglite: () => new PGlite()
+ * })
+ * ```
+ */
+export class PGliteDialect implements Dialect {
+ readonly #config: PGliteDialectConfig
+
+ constructor(config: PGliteDialectConfig) {
+ this.#config = config
+ }
+
+ createAdapter(): DialectAdapter {
+ return new PGliteAdapter()
+ }
+
+ createDriver(): Driver {
+ return new PGliteDriver(this.#config)
+ }
+
+ createIntrospector(db: Kysely): DatabaseIntrospector {
+ return new PostgresIntrospector(db)
+ }
+
+ createQueryCompiler(): QueryCompiler {
+ return new PostgresQueryCompiler()
+ }
+}
diff --git a/src/dialect/pglite/pglite-driver.ts b/src/dialect/pglite/pglite-driver.ts
new file mode 100644
index 000000000..c87d3702f
--- /dev/null
+++ b/src/dialect/pglite/pglite-driver.ts
@@ -0,0 +1,192 @@
+import type {
+ DatabaseConnection,
+ QueryResult,
+} from '../../driver/database-connection.js'
+import type { Driver } from '../../driver/driver.js'
+import { parseSavepointCommand } from '../../parser/savepoint-parser.js'
+import type { CompiledQuery } from '../../query-compiler/compiled-query.js'
+import type { QueryCompiler } from '../../query-compiler/query-compiler.js'
+import { Deferred } from '../../util/deferred.js'
+import { freeze, isFunction } from '../../util/object-utils.js'
+import { createQueryId } from '../../util/query-id.js'
+import { extendStackTrace } from '../../util/stack-trace-utils.js'
+import type {
+ PGlite,
+ PGliteDialectConfig,
+ PGliteTransaction,
+} from './pglite-dialect-config.js'
+
+const PRIVATE_BEGIN_TRANSACTION_METHOD = Symbol()
+const PRIVATE_COMMIT_TRANSACTION_METHOD = Symbol()
+const PRIVATE_ROLLBACK_TRANSACTION_METHOD = Symbol()
+
+export class PGliteDriver implements Driver {
+ readonly #config: PGliteDialectConfig
+ #connection?: PGliteConnection
+ #pglite?: PGlite
+
+ constructor(config: PGliteDialectConfig) {
+ this.#config = freeze({ ...config })
+ }
+
+ async acquireConnection(): Promise {
+ return this.#connection!
+ }
+
+ async beginTransaction(connection: PGliteConnection): Promise {
+ await connection[PRIVATE_BEGIN_TRANSACTION_METHOD]()
+ }
+
+ async commitTransaction(connection: PGliteConnection): Promise {
+ await connection[PRIVATE_COMMIT_TRANSACTION_METHOD]()
+ }
+
+ async destroy(): Promise {
+ if (!this.#pglite?.closed) {
+ await this.#pglite?.close()
+ }
+ }
+
+ async init(): Promise {
+ this.#pglite = isFunction(this.#config.pglite)
+ ? await this.#config.pglite()
+ : this.#config.pglite
+
+ if (this.#pglite.closed) {
+ throw new Error('PGlite instance is already closed.')
+ }
+
+ if (!this.#pglite.ready) {
+ await this.#pglite.waitReady
+ }
+
+ this.#connection = new PGliteConnection(this.#pglite!)
+
+ if (this.#config.onCreateConnection) {
+ await this.#config.onCreateConnection(this.#connection)
+ }
+ }
+
+ async releaseConnection(): Promise {
+ // noop
+ }
+
+ async releaseSavepoint(
+ connection: DatabaseConnection,
+ savepointName: string,
+ compileQuery: QueryCompiler['compileQuery'],
+ ): Promise {
+ await connection.executeQuery(
+ compileQuery(
+ parseSavepointCommand('release', savepointName),
+ createQueryId(),
+ ),
+ )
+ }
+
+ async rollbackToSavepoint(
+ connection: DatabaseConnection,
+ savepointName: string,
+ compileQuery: QueryCompiler['compileQuery'],
+ ): Promise {
+ await connection.executeQuery(
+ compileQuery(
+ parseSavepointCommand('rollback to', savepointName),
+ createQueryId(),
+ ),
+ )
+ }
+
+ async rollbackTransaction(connection: PGliteConnection): Promise {
+ await connection[PRIVATE_ROLLBACK_TRANSACTION_METHOD]()
+ }
+
+ async savepoint(
+ connection: DatabaseConnection,
+ savepointName: string,
+ compileQuery: QueryCompiler['compileQuery'],
+ ): Promise {
+ await connection.executeQuery(
+ compileQuery(
+ parseSavepointCommand('savepoint', savepointName),
+ createQueryId(),
+ ),
+ )
+ }
+}
+
+class PGliteConnection implements DatabaseConnection {
+ readonly #pglite: PGlite
+ #commitTransaction?: () => void
+ #rollbackTransaction?: () => void
+ #transaction?: PGliteTransaction
+ #transactionClosedPromise?: Promise
+
+ constructor(pglite: PGlite) {
+ this.#pglite = pglite
+ }
+
+ async executeQuery(compiledQuery: CompiledQuery): Promise> {
+ try {
+ const { affectedRows, rows } = await (
+ this.#transaction || this.#pglite
+ ).query(compiledQuery.sql, compiledQuery.parameters as never, {
+ rowMode: 'object',
+ })
+
+ return {
+ numAffectedRows:
+ affectedRows != null ? BigInt(affectedRows) : undefined,
+ rows: rows || [],
+ }
+ } catch (error) {
+ throw extendStackTrace(error, new Error())
+ }
+ }
+
+ async *streamQuery(): AsyncIterableIterator> {
+ throw new Error('Streaming is not supported by PGlite.')
+ }
+
+ async [PRIVATE_BEGIN_TRANSACTION_METHOD](): Promise {
+ const {
+ promise: waitForCommit,
+ reject: rollback,
+ resolve: commit,
+ } = new Deferred()
+ const { promise: waitForBegin, resolve: hasBegun } = new Deferred()
+
+ this.#commitTransaction = commit
+ this.#rollbackTransaction = rollback
+
+ // we want to use PGlite's exclusive transaction mode, to lock the instance,
+ // in case this dialect is not the only one using it.
+ this.#transactionClosedPromise = this.#pglite.transaction(async (tx) => {
+ this.#transaction = tx
+
+ hasBegun()
+
+ await waitForCommit
+ })
+
+ await waitForBegin
+ }
+
+ async [PRIVATE_COMMIT_TRANSACTION_METHOD](): Promise {
+ this.#commitTransaction?.()
+ await this.#transactionClosedPromise
+ this.#commitTransaction = undefined
+ this.#rollbackTransaction = undefined
+ this.#transaction = undefined
+ this.#transactionClosedPromise = undefined
+ }
+
+ async [PRIVATE_ROLLBACK_TRANSACTION_METHOD](): Promise {
+ this.#rollbackTransaction?.()
+ await this.#transactionClosedPromise?.catch(() => {})
+ this.#commitTransaction = undefined
+ this.#rollbackTransaction = undefined
+ this.#transaction = undefined
+ this.#transactionClosedPromise = undefined
+ }
+}
diff --git a/src/dialect/postgres/postgres-introspector.ts b/src/dialect/postgres/postgres-introspector.ts
index 1c5855232..f9d840245 100644
--- a/src/dialect/postgres/postgres-introspector.ts
+++ b/src/dialect/postgres/postgres-introspector.ts
@@ -70,6 +70,7 @@ export class PostgresIntrospector implements DatabaseIntrospector {
'r' /*regular table*/,
'v' /*view*/,
'p' /*partitioned table*/,
+ 'f' /*foreign table*/,
])
.where('ns.nspname', '!~', '^pg_')
.where('ns.nspname', '!=', 'information_schema')
@@ -112,6 +113,7 @@ export class PostgresIntrospector implements DatabaseIntrospector {
table = freeze({
name: it.table,
isView: it.table_type === 'v',
+ isForeign: it.table_type === 'f',
schema: it.schema,
columns: [],
})
diff --git a/src/dialect/sqlite/sqlite-adapter.ts b/src/dialect/sqlite/sqlite-adapter.ts
index 28822d664..c6206b0f7 100644
--- a/src/dialect/sqlite/sqlite-adapter.ts
+++ b/src/dialect/sqlite/sqlite-adapter.ts
@@ -3,6 +3,10 @@ import { DialectAdapterBase } from '../dialect-adapter-base.js'
import { MigrationLockOptions } from '../dialect-adapter.js'
export class SqliteAdapter extends DialectAdapterBase {
+ override get supportsMultipleConnections(): boolean {
+ return false
+ }
+
override get supportsTransactionalDdl(): boolean {
return false
}
diff --git a/src/dialect/sqlite/sqlite-driver.ts b/src/dialect/sqlite/sqlite-driver.ts
index 53a64369e..a831e1b67 100644
--- a/src/dialect/sqlite/sqlite-driver.ts
+++ b/src/dialect/sqlite/sqlite-driver.ts
@@ -13,7 +13,6 @@ import { SqliteDatabase, SqliteDialectConfig } from './sqlite-dialect-config.js'
export class SqliteDriver implements Driver {
readonly #config: SqliteDialectConfig
- readonly #connectionMutex = new ConnectionMutex()
#db?: SqliteDatabase
#connection?: DatabaseConnection
@@ -35,9 +34,6 @@ export class SqliteDriver implements Driver {
}
async acquireConnection(): Promise {
- // SQLite only has one single connection. We use a mutex here to wait
- // until the single connection has been released.
- await this.#connectionMutex.lock()
return this.#connection!
}
@@ -93,7 +89,7 @@ export class SqliteDriver implements Driver {
}
async releaseConnection(): Promise {
- this.#connectionMutex.unlock()
+ // noop
}
async destroy(): Promise {
@@ -149,27 +145,3 @@ class SqliteConnection implements DatabaseConnection {
}
}
}
-
-class ConnectionMutex {
- #promise?: Promise
- #resolve?: () => void
-
- async lock(): Promise {
- while (this.#promise) {
- await this.#promise
- }
-
- this.#promise = new Promise((resolve) => {
- this.#resolve = resolve
- })
- }
-
- unlock(): void {
- const resolve = this.#resolve
-
- this.#promise = undefined
- this.#resolve = undefined
-
- resolve?.()
- }
-}
diff --git a/src/dialect/sqlite/sqlite-introspector.ts b/src/dialect/sqlite/sqlite-introspector.ts
index 487b2fa62..a4d20e256 100644
--- a/src/dialect/sqlite/sqlite-introspector.ts
+++ b/src/dialect/sqlite/sqlite-introspector.ts
@@ -135,6 +135,7 @@ export class SqliteIntrospector implements DatabaseIntrospector {
return {
name: name,
isView: type === 'view',
+ isForeign: false,
columns: columns.map((col) => ({
name: col.name,
dataType: col.type,
diff --git a/src/driver/connection-mutex.ts b/src/driver/connection-mutex.ts
new file mode 100644
index 000000000..52c28ae2d
--- /dev/null
+++ b/src/driver/connection-mutex.ts
@@ -0,0 +1,30 @@
+/**
+ * This mutex is used to ensure that only one operation at a time can
+ * acquire a connection from the driver. This is necessary when the
+ * driver only has a single connection, like SQLite and PGlite.
+ *
+ * @internal
+ */
+export class ConnectionMutex {
+ #promise?: Promise
+ #resolve?: () => void
+
+ async lock(): Promise {
+ while (this.#promise) {
+ await this.#promise
+ }
+
+ this.#promise = new Promise((resolve) => {
+ this.#resolve = resolve
+ })
+ }
+
+ unlock(): void {
+ const resolve = this.#resolve
+
+ this.#promise = undefined
+ this.#resolve = undefined
+
+ resolve?.()
+ }
+}
diff --git a/src/driver/runtime-driver.ts b/src/driver/runtime-driver.ts
index eec01f345..c92b0d0b1 100644
--- a/src/driver/runtime-driver.ts
+++ b/src/driver/runtime-driver.ts
@@ -1,7 +1,9 @@
+import { DialectAdapter } from '../dialect/dialect-adapter.js'
import { CompiledQuery } from '../query-compiler/compiled-query.js'
import { QueryCompiler } from '../query-compiler/query-compiler.js'
import { Log } from '../util/log.js'
import { performanceNow } from '../util/performance-now.js'
+import { ConnectionMutex } from './connection-mutex.js'
import { DatabaseConnection, QueryResult } from './database-connection.js'
import { Driver, TransactionSettings } from './driver.js'
@@ -18,11 +20,16 @@ export class RuntimeDriver implements Driver {
#initDone: boolean
#destroyPromise?: Promise
#connections = new WeakSet()
+ #connectionMutex?: ConnectionMutex
- constructor(driver: Driver, log: Log) {
- this.#initDone = false
+ constructor(driver: Driver, adapter: DialectAdapter, log: Log) {
this.#driver = driver
+ this.#initDone = false
this.#log = log
+
+ if (adapter.supportsMultipleConnections === false) {
+ this.#connectionMutex = new ConnectionMutex()
+ }
}
async init(): Promise {
@@ -54,6 +61,10 @@ export class RuntimeDriver implements Driver {
await this.init()
}
+ if (this.#connectionMutex) {
+ await this.#connectionMutex.lock()
+ }
+
const connection = await this.#driver.acquireConnection()
if (!this.#connections.has(connection)) {
@@ -69,6 +80,8 @@ export class RuntimeDriver implements Driver {
async releaseConnection(connection: DatabaseConnection): Promise {
await this.#driver.releaseConnection(connection)
+
+ this.#connectionMutex?.unlock()
}
beginTransaction(
diff --git a/src/expression/expression-builder.ts b/src/expression/expression-builder.ts
index b9c9ad584..94c1f4d85 100644
--- a/src/expression/expression-builder.ts
+++ b/src/expression/expression-builder.ts
@@ -69,7 +69,7 @@ import {
ValTuple5,
} from '../parser/tuple-parser.js'
import { TupleNode } from '../operation-node/tuple-node.js'
-import { Selectable } from '../util/column-type.js'
+import { Selectable, Serialized } from '../util/column-type.js'
import { JSONPathNode } from '../operation-node/json-path-node.js'
import { KyselyTypeError } from '../util/type-error.js'
import {
@@ -78,6 +78,7 @@ import {
} from '../parser/data-type-parser.js'
import { CastNode } from '../operation-node/cast-node.js'
import { SelectFrom } from '../parser/select-from-parser.js'
+import { ValueNode } from '../operation-node/value-node.js'
export interface ExpressionBuilder {
/**
@@ -590,6 +591,42 @@ export interface ExpressionBuilder {
value: VE,
): ExpressionWrapper>
+ /**
+ * Returns a value expression that will be serialized before being passed to the database.
+ *
+ * This can be used to pass in an object/array value when inserting/updating a
+ * value to a column defined with `Json`.
+ *
+ * Default serializer function is `JSON.stringify`.
+ *
+ * ### Example
+ *
+ * ```ts
+ * import { Json } from 'kysely'
+ *
+ * interface Database {
+ * person_metadata: {
+ * experience: Json<{ title: string; company: string }[]>
+ * preferences: Json<{ locale: string; timezone: string }>
+ * profile: Json<{ email_verified: boolean }>
+ * }
+ * }
+ *
+ * const result = await db
+ * .insertInto('person_metadata')
+ * .values(({ jval }) => ({
+ * personId: 123,
+ * experience: jval([{ title: 'Software Engineer', company: 'Google' }]), // ✔️
+ * // preferences: jval({ locale: 'en' }), // ❌ missing `timezone`
+ * // profile: JSON.stringify({ email_verified: true }), // ❌ doesn't match `Serialized<{ email_verified }>`
+ * }))
+ * .execute()
+ * ```
+ */
+ jval(
+ obj: O,
+ ): ExpressionWrapper>
+
/**
* Creates a tuple expression.
*
@@ -1233,6 +1270,12 @@ export function createExpressionBuilder(
return new ExpressionWrapper(parseValueExpression(value))
},
+ jval(
+ value: O,
+ ): ExpressionWrapper> {
+ return new ExpressionWrapper(ValueNode.createSerialized(value))
+ },
+
refTuple(
...values: ReadonlyArray>
): ExpressionWrapper {
diff --git a/src/index.ts b/src/index.ts
index 476a675fc..7b96036f0 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -76,9 +76,9 @@ export * from './dialect/database-introspector.js'
export * from './dialect/sqlite/sqlite-dialect.js'
export * from './dialect/sqlite/sqlite-dialect-config.js'
export * from './dialect/sqlite/sqlite-driver.js'
-export * from './dialect/postgres/postgres-query-compiler.js'
-export * from './dialect/postgres/postgres-introspector.js'
-export * from './dialect/postgres/postgres-adapter.js'
+export * from './dialect/sqlite/sqlite-query-compiler.js'
+export * from './dialect/sqlite/sqlite-introspector.js'
+export * from './dialect/sqlite/sqlite-adapter.js'
export * from './dialect/mysql/mysql-dialect.js'
export * from './dialect/mysql/mysql-dialect-config.js'
@@ -90,9 +90,9 @@ export * from './dialect/mysql/mysql-adapter.js'
export * from './dialect/postgres/postgres-driver.js'
export * from './dialect/postgres/postgres-dialect-config.js'
export * from './dialect/postgres/postgres-dialect.js'
-export * from './dialect/sqlite/sqlite-query-compiler.js'
-export * from './dialect/sqlite/sqlite-introspector.js'
-export * from './dialect/sqlite/sqlite-adapter.js'
+export * from './dialect/postgres/postgres-query-compiler.js'
+export * from './dialect/postgres/postgres-introspector.js'
+export * from './dialect/postgres/postgres-adapter.js'
export * from './dialect/mssql/mssql-adapter.js'
export * from './dialect/mssql/mssql-dialect-config.js'
@@ -101,6 +101,11 @@ export * from './dialect/mssql/mssql-driver.js'
export * from './dialect/mssql/mssql-introspector.js'
export * from './dialect/mssql/mssql-query-compiler.js'
+export * from './dialect/pglite/pglite-adapter.js'
+export * from './dialect/pglite/pglite-driver.js'
+export * from './dialect/pglite/pglite-dialect.js'
+export * from './dialect/pglite/pglite-dialect-config.js'
+
export * from './query-compiler/default-query-compiler.js'
export * from './query-compiler/query-compiler.js'
diff --git a/src/kysely.ts b/src/kysely.ts
index 7703126b9..2674d9005 100644
--- a/src/kysely.ts
+++ b/src/kysely.ts
@@ -113,7 +113,7 @@ export class Kysely
const adapter = dialect.createAdapter()
const log = new Log(args.log ?? [])
- const runtimeDriver = new RuntimeDriver(driver, log)
+ const runtimeDriver = new RuntimeDriver(driver, adapter, log)
const connectionProvider = new DefaultConnectionProvider(runtimeDriver)
const executor = new DefaultQueryExecutor(
diff --git a/src/migration/migrator.ts b/src/migration/migrator.ts
index 68b20aacc..8be2c5807 100644
--- a/src/migration/migrator.ts
+++ b/src/migration/migrator.ts
@@ -146,8 +146,8 @@ export class Migrator {
* }
* ```
*/
- async migrateToLatest(): Promise {
- return this.#migrate(() => ({ direction: 'Up', step: Infinity }))
+ async migrateToLatest(options?: MigrateOptions): Promise {
+ return this.#migrate(() => ({ direction: 'Up', step: Infinity }), options)
}
/**
@@ -203,6 +203,7 @@ export class Migrator {
*/
async migrateTo(
targetMigrationName: string | NoMigrations,
+ options?: MigrateOptions,
): Promise {
return this.#migrate(
({
@@ -226,6 +227,7 @@ export class Migrator {
const executedIndex = executedMigrations.indexOf(
targetMigrationName as string,
)
+
const pendingIndex = pendingMigrations.findIndex(
(m) => m.name === (targetMigrationName as string),
)
@@ -235,14 +237,17 @@ export class Migrator {
direction: 'Down',
step: executedMigrations.length - executedIndex - 1,
}
- } else if (pendingIndex !== -1) {
+ }
+
+ if (pendingIndex !== -1) {
return { direction: 'Up', step: pendingIndex + 1 }
- } else {
- throw new Error(
- `migration "${targetMigrationName}" isn't executed or pending`,
- )
}
+
+ throw new Error(
+ `migration "${targetMigrationName}" isn't executed or pending`,
+ )
},
+ options,
)
}
@@ -274,8 +279,8 @@ export class Migrator {
* await migrator.migrateUp()
* ```
*/
- async migrateUp(): Promise {
- return this.#migrate(() => ({ direction: 'Up', step: 1 }))
+ async migrateUp(options?: MigrateOptions): Promise {
+ return this.#migrate(() => ({ direction: 'Up', step: 1 }), options)
}
/**
@@ -306,8 +311,8 @@ export class Migrator {
* await migrator.migrateDown()
* ```
*/
- async migrateDown(): Promise {
- return this.#migrate(() => ({ direction: 'Down', step: 1 }))
+ async migrateDown(options?: MigrateOptions): Promise {
+ return this.#migrate(() => ({ direction: 'Down', step: 1 }), options)
}
async #migrate(
@@ -315,10 +320,11 @@ export class Migrator {
direction: MigrationDirection
step: number
},
+ options: MigrateOptions | undefined,
): Promise {
try {
await this.#ensureMigrationTablesExists()
- return await this.#runMigrations(getMigrationDirectionAndStep)
+ return await this.#runMigrations(getMigrationDirectionAndStep, options)
} catch (error) {
if (error instanceof MigrationResultSetError) {
return error.resultSet
@@ -489,6 +495,7 @@ export class Migrator {
direction: MigrationDirection
step: number
},
+ options: MigrateOptions | undefined,
): Promise {
const adapter = this.#props.db.getExecutor().adapter
@@ -526,11 +533,14 @@ export class Migrator {
}
}
- if (adapter.supportsTransactionalDdl && !this.#props.disableTransactions) {
- return this.#props.db.transaction().execute(run)
- } else {
+ const disableTransaction =
+ options?.disableTransactions ?? this.#props.disableTransactions
+
+ if (!adapter.supportsTransactionalDdl || disableTransaction) {
return this.#props.db.connection().execute(run)
}
+
+ return this.#props.db.transaction().execute(run)
}
async #getState(db: Kysely): Promise {
@@ -752,7 +762,18 @@ export class Migrator {
}
}
-export interface MigratorProps {
+export interface MigrateOptions {
+ /**
+ * When `true`, don't run migrations in transactions even if the dialect supports transactional DDL.
+ *
+ * Default is `false`.
+ *
+ * This is useful when some migrations include queries that would fail otherwise.
+ */
+ readonly disableTransactions?: boolean
+}
+
+export interface MigratorProps extends MigrateOptions {
readonly db: Kysely
readonly provider: MigrationProvider
@@ -825,15 +846,6 @@ export interface MigratorProps {
* Default is `name0.localeCompare(name1)`.
*/
readonly nameComparator?: (name0: string, name1: string) => number
-
- /**
- * When `true`, don't run migrations in transactions even if the dialect supports transactional DDL.
- *
- * Default is `false`.
- *
- * This is useful when some migrations include queries that would fail otherwise.
- */
- readonly disableTransactions?: boolean
}
/**
diff --git a/src/operation-node/unique-constraint-node.ts b/src/operation-node/unique-constraint-node.ts
index c25e5d816..d35e74d5b 100644
--- a/src/operation-node/unique-constraint-node.ts
+++ b/src/operation-node/unique-constraint-node.ts
@@ -1,11 +1,12 @@
-import { freeze } from '../util/object-utils.js'
+import { logOnce } from '../util/log-once.js'
+import { freeze, isString } from '../util/object-utils.js'
import { ColumnNode } from './column-node.js'
import { IdentifierNode } from './identifier-node.js'
import { OperationNode } from './operation-node.js'
export interface UniqueConstraintNode extends OperationNode {
readonly kind: 'UniqueConstraintNode'
- readonly columns: ReadonlyArray
+ readonly columns: ReadonlyArray
readonly name?: IdentifierNode
readonly nullsNotDistinct?: boolean
readonly deferrable?: boolean
@@ -18,21 +19,56 @@ export type UniqueConstraintNodeProps = Omit<
>
/**
+ * TODO: remove this interface once support for `string[]` is removed.
+ *
* @internal
*/
-export const UniqueConstraintNode = freeze({
+interface UniqueConstraintNodeFactory {
+ is(node: OperationNode): node is UniqueConstraintNode
+ create(
+ columns: OperationNode[],
+ constraintName?: string,
+ nullsNotDistinct?: boolean,
+ ): UniqueConstraintNode
+ /**
+ * @deprecated pass `ColumnNode[]` instead of strings.
+ */
+ create(
+ columns: string[],
+ constraintName?: string,
+ nullsNotDistinct?: boolean,
+ ): UniqueConstraintNode
+ cloneWith(
+ node: UniqueConstraintNode,
+ props: UniqueConstraintNodeProps,
+ ): UniqueConstraintNode
+}
+
+/**
+ * @internal
+ */
+export const UniqueConstraintNode: UniqueConstraintNodeFactory = freeze({
is(node: OperationNode): node is UniqueConstraintNode {
return node.kind === 'UniqueConstraintNode'
},
create(
- columns: string[],
+ columns: string[] | OperationNode[],
constraintName?: string,
nullsNotDistinct?: boolean,
): UniqueConstraintNode {
+ // TODO: remove this block when support for `string[]` is removed.
+ if (isString(columns.at(0))) {
+ logOnce(
+ '`UniqueConstraintNode.create(columns: string[], ...)` is deprecated - pass `ColumnNode[]` instead.',
+ )
+
+ columns = (columns as string[]).map(ColumnNode.create)
+ }
+
return freeze({
kind: 'UniqueConstraintNode',
- columns: freeze(columns.map(ColumnNode.create)),
+ columns: freeze(columns) as OperationNode[],
name: constraintName ? IdentifierNode.create(constraintName) : undefined,
nullsNotDistinct,
})
@@ -42,9 +78,6 @@ export const UniqueConstraintNode = freeze({
node: UniqueConstraintNode,
props: UniqueConstraintNodeProps,
): UniqueConstraintNode {
- return freeze({
- ...node,
- ...props,
- })
+ return freeze({ ...node, ...props })
},
})
diff --git a/src/operation-node/value-node.ts b/src/operation-node/value-node.ts
index 2c811d0dd..c020e3907 100644
--- a/src/operation-node/value-node.ts
+++ b/src/operation-node/value-node.ts
@@ -5,6 +5,7 @@ export interface ValueNode extends OperationNode {
readonly kind: 'ValueNode'
readonly value: unknown
readonly immediate?: boolean
+ readonly serialized?: boolean
}
/**
@@ -29,4 +30,12 @@ export const ValueNode = freeze({
immediate: true,
})
},
+
+ createSerialized(value: unknown): ValueNode {
+ return freeze({
+ kind: 'ValueNode',
+ value,
+ serialized: true,
+ })
+ },
})
diff --git a/src/query-compiler/default-query-compiler.ts b/src/query-compiler/default-query-compiler.ts
index 9e310d279..37e981a5a 100644
--- a/src/query-compiler/default-query-compiler.ts
+++ b/src/query-compiler/default-query-compiler.ts
@@ -519,6 +519,8 @@ export class DefaultQueryCompiler
protected override visitValue(node: ValueNode): void {
if (node.immediate) {
this.appendImmediateValue(node.value)
+ } else if (node.serialized) {
+ this.appendSerializedValue(node.value)
} else {
this.appendValue(node.value)
}
@@ -1757,6 +1759,14 @@ export class DefaultQueryCompiler
this.append(this.getCurrentParameterPlaceholder())
}
+ protected appendSerializedValue(parameter: unknown): void {
+ if (parameter === null) {
+ this.appendValue(null)
+ } else {
+ this.appendValue(JSON.stringify(parameter))
+ }
+ }
+
protected getLeftIdentifierWrapper(): string {
return '"'
}
diff --git a/src/raw-builder/sql.ts b/src/raw-builder/sql.ts
index f8a08fd6d..4e4ab3abe 100644
--- a/src/raw-builder/sql.ts
+++ b/src/raw-builder/sql.ts
@@ -6,6 +6,7 @@ import { ValueNode } from '../operation-node/value-node.js'
import { parseStringReference } from '../parser/reference-parser.js'
import { parseTable } from '../parser/table-parser.js'
import { parseValueExpression } from '../parser/value-parser.js'
+import { Serialized } from '../util/column-type.js'
import { createQueryId } from '../util/query-id.js'
import { RawBuilder, createRawBuilder } from './raw-builder.js'
@@ -137,6 +138,22 @@ export interface Sql {
*/
val(value: V): RawBuilder
+ /**
+ * `sql.jval(value)` is a shortcut for:
+ *
+ * ```ts
+ * import { Serialized, sql } from 'kysely'
+ *
+ * const serializerFn = JSON.stringify
+ * const obj = { hello: 'world!' }
+ *
+ * sql>`${serializerFn(obj)}`
+ * ```
+ *
+ * Default serializer function is `JSON.stringify`.
+ */
+ jval(value: O): RawBuilder>
+
/**
* @deprecated Use {@link Sql.val} instead.
*/
@@ -417,6 +434,13 @@ export const sql: Sql = Object.assign(
})
},
+ jval(value: O): RawBuilder> {
+ return createRawBuilder({
+ queryId: createQueryId(),
+ rawNode: RawNode.createWithChild(ValueNode.createSerialized(value)),
+ })
+ },
+
value(value: V): RawBuilder {
return this.val(value)
},
diff --git a/src/schema/alter-table-builder.ts b/src/schema/alter-table-builder.ts
index d28c133db..479252965 100644
--- a/src/schema/alter-table-builder.ts
+++ b/src/schema/alter-table-builder.ts
@@ -7,7 +7,7 @@ import { OperationNodeSource } from '../operation-node/operation-node-source.js'
import { RenameColumnNode } from '../operation-node/rename-column-node.js'
import { CompiledQuery } from '../query-compiler/compiled-query.js'
import { Compilable } from '../util/compilable.js'
-import { freeze, noop } from '../util/object-utils.js'
+import { freeze, isString, noop } from '../util/object-utils.js'
import {
ColumnDefinitionBuilder,
ColumnDefinitionBuilderCallback,
@@ -55,6 +55,10 @@ import {
CheckConstraintBuilderCallback,
} from './check-constraint-builder.js'
import { RenameConstraintNode } from '../operation-node/rename-constraint-node.js'
+import {
+ ExpressionOrFactory,
+ parseExpression,
+} from '../parser/expression-parser.js'
/**
* This builder can be used to create a `alter table` query.
@@ -173,12 +177,19 @@ export class AlterTableBuilder implements ColumnAlteringInterface {
*/
addUniqueConstraint(
constraintName: string,
- columns: string[],
+ columns: (string | ExpressionOrFactory)[],
build: UniqueConstraintNodeBuilderCallback = noop,
): AlterTableExecutor {
const uniqueConstraintBuilder = build(
new UniqueConstraintNodeBuilder(
- UniqueConstraintNode.create(columns, constraintName),
+ UniqueConstraintNode.create(
+ columns.map((column) =>
+ isString(column)
+ ? ColumnNode.create(column)
+ : parseExpression(column),
+ ),
+ constraintName,
+ ),
),
)
diff --git a/src/schema/create-table-builder.ts b/src/schema/create-table-builder.ts
index 83c5a2e5b..62e5937b4 100644
--- a/src/schema/create-table-builder.ts
+++ b/src/schema/create-table-builder.ts
@@ -9,7 +9,7 @@ import { Compilable } from '../util/compilable.js'
import { QueryExecutor } from '../query-executor/query-executor.js'
import { ColumnDefinitionBuilder } from './column-definition-builder.js'
import { QueryId } from '../util/query-id.js'
-import { freeze, noop } from '../util/object-utils.js'
+import { freeze, isString, noop } from '../util/object-utils.js'
import { ForeignKeyConstraintNode } from '../operation-node/foreign-key-constraint-node.js'
import { ColumnNode } from '../operation-node/column-node.js'
import {
@@ -30,7 +30,10 @@ import {
UniqueConstraintNodeBuilder,
UniqueConstraintNodeBuilderCallback,
} from './unique-constraint-builder.js'
-import { parseExpression } from '../parser/expression-parser.js'
+import {
+ ExpressionOrFactory,
+ parseExpression,
+} from '../parser/expression-parser.js'
import {
PrimaryKeyConstraintBuilder,
PrimaryKeyConstraintBuilderCallback,
@@ -243,15 +246,39 @@ export class CreateTableBuilder
* )
* .execute()
* ```
+ *
+ * In dialects such as MySQL you create unique constraints on expressions as follows:
+ *
+ * ```ts
+ *
+ * import { sql } from 'kysely'
+ *
+ * await db.schema
+ * .createTable('person')
+ * .addColumn('first_name', 'varchar(64)')
+ * .addColumn('last_name', 'varchar(64)')
+ * .addUniqueConstraint(
+ * 'first_name_last_name_unique',
+ * [sql`(lower('first_name'))`, 'last_name']
+ * )
+ * .execute()
+ * ```
*/
addUniqueConstraint(
constraintName: string,
- columns: C[],
+ columns: (C | ExpressionOrFactory)[],
build: UniqueConstraintNodeBuilderCallback = noop,
): CreateTableBuilder {
const uniqueConstraintBuilder = build(
new UniqueConstraintNodeBuilder(
- UniqueConstraintNode.create(columns, constraintName),
+ UniqueConstraintNode.create(
+ columns.map((column) =>
+ isString(column)
+ ? ColumnNode.create(column)
+ : parseExpression(column),
+ ),
+ constraintName,
+ ),
),
)
diff --git a/src/util/column-type.ts b/src/util/column-type.ts
index fc9daeb7d..9d7b18973 100644
--- a/src/util/column-type.ts
+++ b/src/util/column-type.ts
@@ -63,9 +63,37 @@ export type Generated = ColumnType
*/
export type GeneratedAlways = ColumnType
+/**
+ * A shortcut for defining type-safe JSON columns. Inserts/updates require passing
+ * values that are wrapped with `eb.jval` or `sql.jval` instead of `JSON.stringify`.
+ */
+export type Json<
+ SelectType extends object | null,
+ InsertType extends Serialized | Extract =
+ | Serialized
+ | Extract,
+ UpdateType extends Serialized | Extract =
+ | Serialized
+ | Extract,
+> = ColumnType
+
+/**
+ * A symbol that is used to brand serialized objects/arrays.
+ * @internal
+ */
+declare const SerializedBrand: unique symbol
+
+/**
+ * A type that is used to brand serialized objects/arrays.
+ */
+export type Serialized = O & {
+ readonly [SerializedBrand]: '⚠️ When you insert into or update columns of type `Json` (or similar), you should wrap your JSON value with `eb.jval` or `sql.jval`, instead of `JSON.stringify`. ⚠️'
+}
+
/**
* A shortcut for defining JSON columns, which are by default inserted/updated
* as stringified JSON strings.
+ * @deprecated Use {@link Json} instead.
*/
export type JSONColumnType<
SelectType extends object | null,
@@ -201,5 +229,5 @@ export type Insertable = DrainOuterGeneric<
* ```
*/
export type Updateable = DrainOuterGeneric<{
- [K in UpdateKeys]?: UpdateType
+ [K in UpdateKeys]?: UpdateType | undefined
}>
diff --git a/test/node/src/aggregate-function.test.ts b/test/node/src/aggregate-function.test.ts
index e61935737..7b79a65da 100644
--- a/test/node/src/aggregate-function.test.ts
+++ b/test/node/src/aggregate-function.test.ts
@@ -19,7 +19,9 @@ import {
const funcNames = ['avg', 'count', 'max', 'min', 'sum'] as const
for (const dialect of DIALECTS) {
- describe(`${dialect}: aggregate functions`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: aggregate functions`, () => {
let ctx: TestContext
before(async function () {
@@ -660,7 +662,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it(`should execute a query with ${funcName}(column) filter(where ...) in select clause`, async () => {
const query = ctx.db
.selectFrom('person')
@@ -909,7 +911,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it(`should execute a query with ${funcName}(table.*) in select clause`, async () => {
const query = ctx.db
.selectFrom('person')
@@ -986,7 +988,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it(`should execute a query with ${funcName}(*) filter(where ...) in select clause`, async () => {
const query = ctx.db
.selectFrom('person')
@@ -1035,29 +1037,29 @@ for (const dialect of DIALECTS) {
it('should execute "dynamic" aggregate functions', async () => {
const query = ctx.db
.selectFrom('person')
- .$if(dialect === 'mssql', (qb) => qb.groupBy('person.first_name'))
+ .$if(sqlSpec === 'mssql', (qb) => qb.groupBy('person.first_name'))
.select([
ctx.db.fn
.agg('rank')
- .over((ob) => (dialect === 'mssql' ? ob.orderBy('first_name') : ob))
+ .over((ob) => (sqlSpec === 'mssql' ? ob.orderBy('first_name') : ob))
.as('rank'),
(eb) =>
eb.fn
.agg('rank')
.over((ob) =>
- dialect === 'mssql' ? ob.orderBy('first_name') : ob,
+ sqlSpec === 'mssql' ? ob.orderBy('first_name') : ob,
)
.as('another_rank'),
])
- .$if(dialect === 'postgres' || dialect === 'mssql', (qb) =>
+ .$if(sqlSpec === 'postgres' || sqlSpec === 'mssql', (qb) =>
qb.select((eb) =>
eb.fn
.agg('string_agg', ['first_name', sql.lit(',')])
- .$call((eb) => (dialect === 'mssql' ? eb : eb.distinct()))
+ .$call((eb) => (sqlSpec === 'mssql' ? eb : eb.distinct()))
.as('first_names'),
),
)
- .$if(dialect === 'mysql' || dialect === 'sqlite', (qb) =>
+ .$if(sqlSpec === 'mysql' || sqlSpec === 'sqlite', (qb) =>
qb.select((eb) =>
eb.fn
.agg('group_concat', ['first_name'])
@@ -1111,11 +1113,11 @@ for (const dialect of DIALECTS) {
describe('should execute order-sensitive aggregate functions', () => {
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
- const isMySql = dialect === 'mysql'
+ const isMySql = sqlSpec === 'mysql'
const funcName = isMySql ? 'group_concat' : 'string_agg'
const funcArgs: Array> = [
'first_name',
@@ -1163,13 +1165,13 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it(`should execute a query with within group (order by column) in select clause`, async () => {
const query = ctx.db.selectFrom('toy').select((eb) =>
eb.fn
.agg('percentile_cont', [sql.lit(0.5)])
.withinGroupOrderBy('toy.price')
- .$call((ab) => (dialect === 'mssql' ? ab.over() : ab))
+ .$call((ab) => (sqlSpec === 'mssql' ? ab.over() : ab))
.as('median_price'),
)
diff --git a/test/node/src/array.test.ts b/test/node/src/array.test.ts
index d319c795d..49bd2d9eb 100644
--- a/test/node/src/array.test.ts
+++ b/test/node/src/array.test.ts
@@ -18,88 +18,90 @@ interface PersonWithArrays extends Person {
nicknames: string[] | null
}
-if (DIALECTS.includes('postgres')) {
- const dialect = 'postgres'
-
- describe(`${dialect} array tests`, () => {
- let ctx: TestContext
- let db: Kysely & { person: PersonWithArrays }>
+for (const dialect of DIALECTS) {
+ const { sqlSpec, variant } = dialect
+
+ if (sqlSpec === 'postgres') {
+ describe(`${variant}: arrays`, () => {
+ let ctx: TestContext
+ let db: Kysely & { person: PersonWithArrays }>
+
+ before(async function () {
+ ctx = await initTest(this, dialect)
+
+ await ctx.db.schema
+ .alterTable('person')
+ .addColumn('lucky_numbers', sql`integer[]`, (col) =>
+ col.notNull().defaultTo(sql`ARRAY[]::integer[]`),
+ )
+ .addColumn('nicknames', sql`text[]`)
+ .execute()
+
+ db = ctx.db as any
+ })
- before(async function () {
- ctx = await initTest(this, dialect)
+ beforeEach(async () => {
+ await insertDefaultDataSet(ctx)
+
+ await db
+ .updateTable('person')
+ .set({
+ nicknames: ['Jenny', 'Jen'],
+ lucky_numbers: [7, 42],
+ })
+ .where('first_name', '=', 'Jennifer')
+ .execute()
+ })
- await ctx.db.schema
- .alterTable('person')
- .addColumn('lucky_numbers', sql`integer[]`, (col) =>
- col.notNull().defaultTo(sql`ARRAY[]::integer[]`),
- )
- .addColumn('nicknames', sql`text[]`)
- .execute()
+ afterEach(async () => {
+ await clearDatabase(ctx)
+ })
- db = ctx.db as any
- })
+ after(async () => {
+ await destroyTest(ctx)
+ })
- beforeEach(async () => {
- await insertDefaultDataSet(ctx)
+ it('array columns should get returned as arrays by default', async () => {
+ const jennifer = await db
+ .selectFrom('person')
+ .where('first_name', '=', 'Jennifer')
+ .select(['first_name', 'lucky_numbers', 'nicknames'])
+ .executeTakeFirstOrThrow()
- await db
- .updateTable('person')
- .set({
- nicknames: ['Jenny', 'Jen'],
+ expect(jennifer).to.eql({
+ first_name: 'Jennifer',
lucky_numbers: [7, 42],
+ nicknames: ['Jenny', 'Jen'],
})
- .where('first_name', '=', 'Jennifer')
- .execute()
- })
-
- afterEach(async () => {
- await clearDatabase(ctx)
- })
+ })
- after(async () => {
- await destroyTest(ctx)
- })
+ it('should filter using the `any` function', async () => {
+ const jennifer = await db
+ .selectFrom('person')
+ .where((eb) => eb(eb.val(7), '=', eb.fn.any('lucky_numbers')))
+ .select(['first_name', 'lucky_numbers', 'nicknames'])
+ .executeTakeFirstOrThrow()
- it('array columns should get returned as arrays by default', async () => {
- const jennifer = await db
- .selectFrom('person')
- .where('first_name', '=', 'Jennifer')
- .select(['first_name', 'lucky_numbers', 'nicknames'])
- .executeTakeFirstOrThrow()
-
- expect(jennifer).to.eql({
- first_name: 'Jennifer',
- lucky_numbers: [7, 42],
- nicknames: ['Jenny', 'Jen'],
+ expect(jennifer).to.eql({
+ first_name: 'Jennifer',
+ lucky_numbers: [7, 42],
+ nicknames: ['Jenny', 'Jen'],
+ })
})
- })
- it('should filter using the `any` function', async () => {
- const jennifer = await db
- .selectFrom('person')
- .where((eb) => eb(eb.val(7), '=', eb.fn.any('lucky_numbers')))
- .select(['first_name', 'lucky_numbers', 'nicknames'])
- .executeTakeFirstOrThrow()
-
- expect(jennifer).to.eql({
- first_name: 'Jennifer',
- lucky_numbers: [7, 42],
- nicknames: ['Jenny', 'Jen'],
- })
- })
+ it('should filter using the `any` function on a nullable column', async () => {
+ const jennifer = await db
+ .selectFrom('person')
+ .where((eb) => eb(eb.val('Jen'), '=', eb.fn.any('nicknames')))
+ .select(['first_name', 'lucky_numbers', 'nicknames'])
+ .executeTakeFirstOrThrow()
- it('should filter using the `any` function on a nullable column', async () => {
- const jennifer = await db
- .selectFrom('person')
- .where((eb) => eb(eb.val('Jen'), '=', eb.fn.any('nicknames')))
- .select(['first_name', 'lucky_numbers', 'nicknames'])
- .executeTakeFirstOrThrow()
-
- expect(jennifer).to.eql({
- first_name: 'Jennifer',
- lucky_numbers: [7, 42],
- nicknames: ['Jenny', 'Jen'],
+ expect(jennifer).to.eql({
+ first_name: 'Jennifer',
+ lucky_numbers: [7, 42],
+ nicknames: ['Jenny', 'Jen'],
+ })
})
})
- })
+ }
}
diff --git a/test/node/src/camel-case.test.ts b/test/node/src/camel-case.test.ts
index 27719fea2..801eca019 100644
--- a/test/node/src/camel-case.test.ts
+++ b/test/node/src/camel-case.test.ts
@@ -19,7 +19,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: camel case test`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: camel case`, () => {
let ctx: TestContext
let camelDb: Kysely
@@ -51,7 +53,7 @@ for (const dialect of DIALECTS) {
.addColumn('lastName', 'varchar(255)')
.addColumn(
'preferences',
- dialect === 'mssql' ? 'varchar(8000)' : 'json',
+ sqlSpec === 'mssql' ? 'varchar(8000)' : 'json',
)
.addColumn('addressRow1', 'varchar(255)')
.execute()
@@ -89,7 +91,7 @@ for (const dialect of DIALECTS) {
// Can't run this test on SQLite because we can't access the same database
// from the other Kysely instance.
- if (dialect !== 'sqlite') {
+ if (sqlSpec !== 'sqlite' && variant !== 'pglite') {
it('should have created the table and its columns in snake_case', async () => {
const result = await sql`select * from camel_person`.execute(
ctx.db,
@@ -276,7 +278,7 @@ for (const dialect of DIALECTS) {
it('should map nested objects by default', async () => {
let db = camelDb.withoutPlugins()
- if (dialect === 'mssql' || dialect === 'sqlite') {
+ if (sqlSpec === 'mssql' || sqlSpec === 'sqlite') {
db = db.withPlugin(new ParseJSONResultsPlugin())
}
@@ -295,7 +297,7 @@ for (const dialect of DIALECTS) {
it('should respect maintainNestedObjectKeys', async () => {
let db = camelDb.withoutPlugins()
- if (dialect === 'mssql' || dialect === 'sqlite') {
+ if (sqlSpec === 'mssql' || sqlSpec === 'sqlite') {
db = db.withPlugin(new ParseJSONResultsPlugin())
}
@@ -355,7 +357,7 @@ for (const dialect of DIALECTS) {
})
})
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('should convert merge queries', async () => {
const query = camelDb
.mergeInto('camelPerson')
diff --git a/test/node/src/case.test.ts b/test/node/src/case.test.ts
index 10de4de85..25e11b32b 100644
--- a/test/node/src/case.test.ts
+++ b/test/node/src/case.test.ts
@@ -10,7 +10,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: case`, () => {
+ const { variant } = dialect
+
+ describe(`${variant}: case`, () => {
let ctx: TestContext
before(async function () {
diff --git a/test/node/src/clear.test.ts b/test/node/src/clear.test.ts
index dd212189b..5ed725094 100644
--- a/test/node/src/clear.test.ts
+++ b/test/node/src/clear.test.ts
@@ -9,7 +9,9 @@ import {
} from './test-setup'
for (const dialect of DIALECTS) {
- describe(`${dialect} clear`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: clear`, () => {
let ctx: TestContext
before(async function () {
@@ -318,7 +320,7 @@ for (const dialect of DIALECTS) {
})
})
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
it('should clear limit', () => {
const query = ctx.db
.selectFrom('person')
diff --git a/test/node/src/coalesce.test.ts b/test/node/src/coalesce.test.ts
index 133d9e730..0d2011745 100644
--- a/test/node/src/coalesce.test.ts
+++ b/test/node/src/coalesce.test.ts
@@ -11,7 +11,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: coalesce`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: coalesce`, () => {
let ctx: TestContext
before(async function () {
@@ -30,7 +32,7 @@ for (const dialect of DIALECTS) {
await destroyTest(ctx)
})
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should coalesce a single item', async () => {
const { coalesce } = ctx.db.fn
@@ -87,9 +89,9 @@ for (const dialect of DIALECTS) {
coalesce('first_name', ctx.db.dynamic.ref('last_name')).as(
'ColumnReference1',
),
- ...(dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ ...(sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
? [coalesce('first_name', sql`${1}`).as('ColumnReference2')]
: []),
coalesce('first_name', max('last_name')).as('ColumnReference3'),
@@ -100,9 +102,9 @@ for (const dialect of DIALECTS) {
coalesce(ctx.db.dynamic.ref('first_name'), 'last_name').as(
'DynamicReference1',
),
- ...(dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ ...(sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
? [
coalesce(ctx.db.dynamic.ref('first_name'), sql`${2}`).as(
'DynamicReference2',
@@ -125,9 +127,9 @@ for (const dialect of DIALECTS) {
coalesce(max('first_name'), ctx.db.dynamic.ref('last_name')).as(
'AggregateFunction2',
),
- ...(dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ ...(sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
? [coalesce(max('first_name'), sql`${8}`).as('AggregateFunction3')]
: []),
])
diff --git a/test/node/src/controlled-transaction.test.ts b/test/node/src/controlled-transaction.test.ts
index 9196c3bbc..4fa8671d4 100644
--- a/test/node/src/controlled-transaction.test.ts
+++ b/test/node/src/controlled-transaction.test.ts
@@ -20,9 +20,12 @@ import {
insertDefaultDataSet,
limit,
} from './test-setup.js'
+import { PGlite } from '@electric-sql/pglite'
for (const dialect of DIALECTS) {
- describe(`${dialect}: controlled transaction`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: controlled transaction`, () => {
let ctx: TestContext
const executedQueries: CompiledQuery[] = []
const sandbox = sinon.createSandbox()
@@ -42,6 +45,10 @@ for (const dialect of DIALECTS) {
Parameters,
ReturnType
>
+ let pgliteTransactionSpy: sinon.SinonSpy<
+ Parameters,
+ ReturnType
+ >
before(async function () {
ctx = await initTest(this, dialect, {
@@ -72,6 +79,7 @@ for (const dialect of DIALECTS) {
Connection.prototype,
'saveTransaction',
)
+ pgliteTransactionSpy = sandbox.spy(PGlite.prototype, 'transaction')
})
afterEach(async () => {
@@ -90,24 +98,31 @@ for (const dialect of DIALECTS) {
await trx.commit().execute()
- if (dialect == 'postgres') {
+ if (sqlSpec === 'postgres') {
+ const query = {
+ sql: 'insert into "person" ("first_name", "last_name", "gender") values ($1, $2, $3)',
+ parameters: ['Foo', 'Barson', 'male'],
+ }
+
+ if (variant === 'pglite') {
+ expect(pgliteTransactionSpy.calledOnce).to.be.true
+ }
+
expect(
executedQueries.map((it) => ({
sql: it.sql,
parameters: it.parameters,
})),
- ).to.eql([
- {
- sql: 'begin',
- parameters: [],
- },
- {
- sql: 'insert into "person" ("first_name", "last_name", "gender") values ($1, $2, $3)',
- parameters: ['Foo', 'Barson', 'male'],
- },
- { sql: 'commit', parameters: [] },
- ])
- } else if (dialect === 'mysql') {
+ ).to.eql(
+ variant === 'pglite'
+ ? [query]
+ : [
+ { sql: 'begin', parameters: [] },
+ query,
+ { sql: 'commit', parameters: [] },
+ ],
+ )
+ } else if (sqlSpec === 'mysql') {
expect(
executedQueries.map((it) => ({
sql: it.sql,
@@ -124,7 +139,7 @@ for (const dialect of DIALECTS) {
},
{ sql: 'commit', parameters: [] },
])
- } else if (dialect === 'mssql') {
+ } else if (sqlSpec === 'mssql') {
expect(tediousBeginTransactionSpy.calledOnce).to.be.true
expect(tediousBeginTransactionSpy.getCall(0).args[1]).to.be.undefined
expect(tediousBeginTransactionSpy.getCall(0).args[2]).to.be.undefined
@@ -160,6 +175,14 @@ for (const dialect of DIALECTS) {
{ sql: 'commit', parameters: [] },
])
}
+
+ const person = await ctx.db
+ .selectFrom('person')
+ .where('first_name', '=', 'Foo')
+ .select('first_name')
+ .executeTakeFirst()
+
+ expect(person).not.to.be.undefined
})
it('should be able to start and rollback a transaction', async () => {
@@ -169,24 +192,34 @@ for (const dialect of DIALECTS) {
await trx.rollback().execute()
- if (dialect == 'postgres') {
+ if (sqlSpec === 'postgres') {
+ const query = {
+ sql: 'insert into "person" ("first_name", "last_name", "gender") values ($1, $2, $3)',
+ parameters: ['Foo', 'Barson', 'male'],
+ }
+
+ if (variant === 'pglite') {
+ expect(pgliteTransactionSpy.calledOnce).to.be.true
+ }
+
expect(
executedQueries.map((it) => ({
sql: it.sql,
parameters: it.parameters,
})),
- ).to.eql([
- {
- sql: 'begin',
- parameters: [],
- },
- {
- sql: 'insert into "person" ("first_name", "last_name", "gender") values ($1, $2, $3)',
- parameters: ['Foo', 'Barson', 'male'],
- },
- { sql: 'rollback', parameters: [] },
- ])
- } else if (dialect === 'mysql') {
+ ).to.eql(
+ variant === 'pglite'
+ ? [query]
+ : [
+ {
+ sql: 'begin',
+ parameters: [],
+ },
+ query,
+ { sql: 'rollback', parameters: [] },
+ ],
+ )
+ } else if (sqlSpec === 'mysql') {
expect(
executedQueries.map((it) => ({
sql: it.sql,
@@ -203,7 +236,7 @@ for (const dialect of DIALECTS) {
},
{ sql: 'rollback', parameters: [] },
])
- } else if (dialect === 'mssql') {
+ } else if (sqlSpec === 'mssql') {
expect(tediousBeginTransactionSpy.calledOnce).to.be.true
expect(tediousBeginTransactionSpy.getCall(0).args[1]).to.be.undefined
expect(tediousBeginTransactionSpy.getCall(0).args[2]).to.be.undefined
@@ -249,7 +282,10 @@ for (const dialect of DIALECTS) {
expect(person).to.be.undefined
})
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (
+ (sqlSpec === 'postgres' && variant !== 'pglite') ||
+ sqlSpec === 'mysql'
+ ) {
for (const accessMode of TRANSACTION_ACCESS_MODES) {
it(`should set the transaction access mode as "${accessMode}"`, async () => {
const trx = await ctx.db
@@ -279,19 +315,23 @@ for (const dialect of DIALECTS) {
{ sql: 'select * from `person`', parameters: [] },
{ sql: 'commit', parameters: [] },
],
- }[dialect],
+ }[sqlSpec],
)
})
}
}
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'mssql') {
+ if (
+ (sqlSpec === 'postgres' && variant !== 'pglite') ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
+ ) {
for (const isolationLevel of [
'read uncommitted',
'read committed',
'repeatable read',
'serializable',
- ...(dialect === 'mssql' ? (['snapshot'] as const) : []),
+ ...(sqlSpec === 'mssql' ? (['snapshot'] as const) : []),
] as const) {
it(`should set the transaction isolation level as "${isolationLevel}"`, async () => {
const trx = await ctx.db
@@ -303,7 +343,7 @@ for (const dialect of DIALECTS) {
await trx.commit().execute()
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
expect(tediousBeginTransactionSpy.calledOnce).to.be.true
expect(tediousBeginTransactionSpy.getCall(0).args[1]).to.not.be
.undefined
@@ -351,7 +391,7 @@ for (const dialect of DIALECTS) {
parameters: ['Foo', 'Barson', 'male'],
},
],
- }[dialect],
+ }[sqlSpec],
)
})
}
@@ -402,14 +442,8 @@ for (const dialect of DIALECTS) {
await trxAfterFoo.commit().execute()
- if (dialect == 'postgres') {
- expect(
- executedQueries.map((it) => ({
- sql: it.sql,
- parameters: it.parameters,
- })),
- ).to.eql([
- { sql: 'begin', parameters: [] },
+ if (sqlSpec === 'postgres') {
+ const ops = [
{
sql: 'insert into "person" ("first_name", "last_name", "gender") values ($1, $2, $3)',
parameters: ['Foo', 'Barson', 'male'],
@@ -420,9 +454,23 @@ for (const dialect of DIALECTS) {
parameters: ['Fizz', 'Buzzson', 'female'],
},
{ sql: 'rollback to "foo"', parameters: [] },
- { sql: 'commit', parameters: [] },
- ])
- } else if (dialect === 'mysql') {
+ ]
+
+ expect(
+ executedQueries.map((it) => ({
+ sql: it.sql,
+ parameters: it.parameters,
+ })),
+ ).to.eql(
+ variant === 'pglite'
+ ? ops
+ : [
+ { sql: 'begin', parameters: [] },
+ ...ops,
+ { sql: 'commit', parameters: [] },
+ ],
+ )
+ } else if (sqlSpec === 'mysql') {
expect(
executedQueries.map((it) => ({
sql: it.sql,
@@ -442,7 +490,7 @@ for (const dialect of DIALECTS) {
{ sql: 'rollback to `foo`', parameters: [] },
{ sql: 'commit', parameters: [] },
])
- } else if (dialect === 'mssql') {
+ } else if (sqlSpec === 'mssql') {
expect(tediousBeginTransactionSpy.calledOnce).to.be.true
expect(tediousBeginTransactionSpy.getCall(0).args[1]).to.be.undefined
expect(tediousBeginTransactionSpy.getCall(0).args[2]).to.be.undefined
@@ -502,7 +550,7 @@ for (const dialect of DIALECTS) {
expect(results[0].first_name).to.equal('Foo')
})
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
it('should be able to savepoint and release savepoint', async () => {
const trx = await ctx.db.startTransaction().execute()
@@ -516,14 +564,8 @@ for (const dialect of DIALECTS) {
await trxAfterFoo.commit().execute()
- if (dialect == 'postgres') {
- expect(
- executedQueries.map((it) => ({
- sql: it.sql,
- parameters: it.parameters,
- })),
- ).to.eql([
- { sql: 'begin', parameters: [] },
+ if (sqlSpec === 'postgres') {
+ const ops = [
{
sql: 'insert into "person" ("first_name", "last_name", "gender") values ($1, $2, $3)',
parameters: ['Foo', 'Barson', 'male'],
@@ -534,9 +576,23 @@ for (const dialect of DIALECTS) {
parameters: ['Fizz', 'Buzzson', 'female'],
},
{ sql: 'release "foo"', parameters: [] },
- { sql: 'commit', parameters: [] },
- ])
- } else if (dialect === 'mysql') {
+ ]
+
+ expect(
+ executedQueries.map((it) => ({
+ sql: it.sql,
+ parameters: it.parameters,
+ })),
+ ).to.eql(
+ variant === 'pglite'
+ ? ops
+ : [
+ { sql: 'begin', parameters: [] },
+ ...ops,
+ { sql: 'commit', parameters: [] },
+ ],
+ )
+ } else if (sqlSpec === 'mysql') {
expect(
executedQueries.map((it) => ({
sql: it.sql,
@@ -591,7 +647,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should throw an error when trying to release a savepoint as it is not supported', async () => {
const trx = await ctx.db.startTransaction().execute()
@@ -660,16 +716,12 @@ describe('custom dialect: controlled transaction', () => {
})
it('should throw an error when trying to savepoint on a dialect that does not support it', async () => {
- const trx = await db.startTransaction().execute()
-
await expect(trx.savepoint('foo').execute()).to.be.rejectedWith(
'The `savepoint` method is not supported by this driver',
)
})
it('should throw an error when trying to rollback to a savepoint on a dialect that does not support it', async () => {
- const trx = await db.startTransaction().execute()
-
await expect(
trx.rollbackToSavepoint('foo' as never).execute(),
).to.be.rejectedWith(
@@ -678,8 +730,6 @@ describe('custom dialect: controlled transaction', () => {
})
it('should throw an error when trying to release a savepoint on a dialect that does not support it', async () => {
- const trx = await db.startTransaction().execute()
-
await expect(
trx.releaseSavepoint('foo' as never).execute(),
).to.be.rejectedWith(
diff --git a/test/node/src/deduplicate-joins.test.ts b/test/node/src/deduplicate-joins.test.ts
index 40eec46fc..7a7b0ca00 100644
--- a/test/node/src/deduplicate-joins.test.ts
+++ b/test/node/src/deduplicate-joins.test.ts
@@ -11,7 +11,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: deduplicate joins`, () => {
+ const { variant } = dialect
+
+ describe(`${variant}: deduplicate joins`, () => {
let ctx: TestContext
before(async function () {
diff --git a/test/node/src/delete.test.ts b/test/node/src/delete.test.ts
index 81f4fd386..5dc182aa5 100644
--- a/test/node/src/delete.test.ts
+++ b/test/node/src/delete.test.ts
@@ -15,7 +15,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: delete`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: delete`, () => {
let ctx: TestContext
before(async function () {
@@ -139,7 +141,7 @@ for (const dialect of DIALECTS) {
expect(result.numDeletedRows).to.equal(0n)
})
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should order and limit the deleted rows', async () => {
const query = ctx.db.deleteFrom('person').orderBy('first_name').limit(2)
@@ -160,7 +162,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should return deleted rows when `returning` is used', async () => {
const query = ctx.db
.deleteFrom('person')
@@ -217,7 +219,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should delete from t1 using t2', async () => {
const query = ctx.db
.deleteFrom('person')
@@ -510,7 +512,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should delete from t1 using t1 inner join t2', async () => {
const query = ctx.db
.deleteFrom('person')
@@ -873,7 +875,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('modifyEnd should add arbitrary SQL to the end of the query', async () => {
const query = ctx.db
.deleteFrom('person')
@@ -899,7 +901,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres' && variant !== 'pglite') {
it('should delete all rows and stream returned results', async () => {
const stream = ctx.db
.deleteFrom('person')
@@ -923,7 +925,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should delete top', async () => {
const query = ctx.db
.deleteFrom('person')
@@ -968,7 +970,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should return deleted rows when `output` is used', async () => {
const query = ctx.db
.deleteFrom('person')
diff --git a/test/node/src/disconnects.test.ts b/test/node/src/disconnects.test.ts
index cf6e7b256..5b9d6f909 100644
--- a/test/node/src/disconnects.test.ts
+++ b/test/node/src/disconnects.test.ts
@@ -3,10 +3,10 @@ import { DIALECTS, DIALECT_CONFIGS, Database, expect } from './test-setup'
import * as tarn from 'tarn'
import * as tedious from 'tedious'
-const dialect = 'mssql'
+const VARIANT = 'mssql'
-if (DIALECTS.includes(dialect)) {
- describe(`${dialect}: disconnects`, () => {
+if (DIALECTS.some((d) => d.variant === VARIANT)) {
+ describe(`${VARIANT}: disconnects`, () => {
let connection: tedious.Connection
let connectionFactoryTimesCalled = 0
let db: Kysely
@@ -27,7 +27,7 @@ if (DIALECTS.includes(dialect)) {
connectionFactoryTimesCalled++
return (connection = new tedious.Connection(
- DIALECT_CONFIGS[dialect],
+ DIALECT_CONFIGS[VARIANT],
))
},
},
diff --git a/test/node/src/error-stack.test.ts b/test/node/src/error-stack.test.ts
index ac2bde556..52a82d72a 100644
--- a/test/node/src/error-stack.test.ts
+++ b/test/node/src/error-stack.test.ts
@@ -10,7 +10,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: error stack`, () => {
+ const { variant } = dialect
+
+ describe(`${variant}: error stack`, () => {
let ctx: TestContext
before(async function () {
diff --git a/test/node/src/execute.test.ts b/test/node/src/execute.test.ts
index 8fb97c1bf..c978c076a 100644
--- a/test/node/src/execute.test.ts
+++ b/test/node/src/execute.test.ts
@@ -16,7 +16,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: execute`, () => {
+ const { variant } = dialect
+
+ describe(`${variant}: execute`, () => {
let ctx: TestContext
before(async function () {
diff --git a/test/node/src/explain.test.ts b/test/node/src/explain.test.ts
index 441cfcd1a..ef87522ca 100644
--- a/test/node/src/explain.test.ts
+++ b/test/node/src/explain.test.ts
@@ -11,147 +11,152 @@ import {
DIALECTS,
} from './test-setup.js'
-for (const dialect of DIALECTS.filter((dialect) => dialect !== 'mssql')) {
- describe(`${dialect}: explain test`, () => {
- let ctx: TestContext
- const sandbox = createSandbox()
- let executeQuerySpy: SinonSpy
-
- before(async function () {
- ctx = await initTest(this, dialect)
- })
+for (const dialect of DIALECTS) {
+ const { sqlSpec, variant } = dialect
- beforeEach(async () => {
- await insertDefaultDataSet(ctx)
- executeQuerySpy = sandbox.spy(
- DefaultQueryExecutor.prototype,
- 'executeQuery',
- )
- })
+ if (sqlSpec !== 'mssql') {
+ describe(`${variant}: explain`, () => {
+ let ctx: TestContext
+ const sandbox = createSandbox()
+ let executeQuerySpy: SinonSpy
- afterEach(async () => {
- await clearDatabase(ctx)
- sandbox.restore()
- })
+ before(async function () {
+ ctx = await initTest(this, dialect)
+ })
- after(async () => {
- await destroyTest(ctx)
- })
+ beforeEach(async () => {
+ await insertDefaultDataSet(ctx)
+ executeQuerySpy = sandbox.spy(
+ DefaultQueryExecutor.prototype,
+ 'executeQuery',
+ )
+ })
- it('should add explain statement before selects', async () => {
- await ctx.db.selectFrom('person').selectAll().limit(5).explain()
-
- expect(executeQuerySpy.calledOnce).to.be.true
- expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
- {
- postgres: 'explain select * from "person" limit $1',
- mysql: 'explain select * from `person` limit ?',
- mssql: NOT_SUPPORTED,
- sqlite: 'explain select * from "person" limit ?',
- }[dialect],
- )
- })
+ afterEach(async () => {
+ await clearDatabase(ctx)
+ sandbox.restore()
+ })
- it('should add explain statement before inserts', async () => {
- await ctx.db.insertInto('person').values({ gender: 'female' }).explain()
-
- expect(executeQuerySpy.calledOnce).to.be.true
- expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
- {
- postgres: 'explain insert into "person" ("gender") values ($1)',
- mysql: 'explain insert into `person` (`gender`) values (?)',
- mssql: NOT_SUPPORTED,
- sqlite: 'explain insert into "person" ("gender") values (?)',
- }[dialect],
- )
- })
+ after(async () => {
+ await destroyTest(ctx)
+ })
- it('should add explain statement before updates', async () => {
- await ctx.db
- .updateTable('person')
- .set({ gender: 'female' })
- .where('id', '=', 123)
- .explain()
-
- expect(executeQuerySpy.calledOnce).to.be.true
- expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
- {
- postgres: 'explain update "person" set "gender" = $1 where "id" = $2',
- mysql: 'explain update `person` set `gender` = ? where `id` = ?',
- mssql: NOT_SUPPORTED,
- sqlite: 'explain update "person" set "gender" = ? where "id" = ?',
- }[dialect],
- )
- })
+ it('should add explain statement before selects', async () => {
+ await ctx.db.selectFrom('person').selectAll().limit(5).explain()
- it('should add explain statement before deletes', async () => {
- await ctx.db.deleteFrom('person').where('id', '=', 123).explain()
-
- expect(executeQuerySpy.calledOnce).to.be.true
- expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
- {
- postgres: 'explain delete from "person" where "id" = $1',
- mysql: 'explain delete from `person` where `id` = ?',
- mssql: NOT_SUPPORTED,
- sqlite: 'explain delete from "person" where "id" = ?',
- }[dialect],
- )
- })
+ expect(executeQuerySpy.calledOnce).to.be.true
+ expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
+ {
+ postgres: 'explain select * from "person" limit $1',
+ mysql: 'explain select * from `person` limit ?',
+ mssql: NOT_SUPPORTED,
+ sqlite: 'explain select * from "person" limit ?',
+ }[sqlSpec],
+ )
+ })
- if (dialect === 'mysql') {
- it('should add explain statement before replaces', async () => {
- await ctx.db
- .replaceInto('person')
- .values({ id: 123, gender: 'female' })
- .explain()
+ it('should add explain statement before inserts', async () => {
+ await ctx.db.insertInto('person').values({ gender: 'female' }).explain()
expect(executeQuerySpy.calledOnce).to.be.true
expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
- 'explain replace into `person` (`id`, `gender`) values (?, ?)',
+ {
+ postgres: 'explain insert into "person" ("gender") values ($1)',
+ mysql: 'explain insert into `person` (`gender`) values (?)',
+ mssql: NOT_SUPPORTED,
+ sqlite: 'explain insert into "person" ("gender") values (?)',
+ }[sqlSpec],
)
})
- }
- if (dialect === 'postgres') {
- it('should add explain statement before select, with analyze', async () => {
+ it('should add explain statement before updates', async () => {
await ctx.db
- .selectFrom('person')
+ .updateTable('person')
+ .set({ gender: 'female' })
.where('id', '=', 123)
- .selectAll()
- .explain('json', sql`analyze`)
+ .explain()
expect(executeQuerySpy.calledOnce).to.be.true
expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
{
postgres:
- 'explain (analyze, format json) select * from "person" where "id" = $1',
- mysql: NOT_SUPPORTED,
+ 'explain update "person" set "gender" = $1 where "id" = $2',
+ mysql: 'explain update `person` set `gender` = ? where `id` = ?',
mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
- }[dialect],
+ sqlite: 'explain update "person" set "gender" = ? where "id" = ?',
+ }[sqlSpec],
)
})
- }
- if (dialect === 'mysql') {
- it('should add explain statement before select, with analyze', async () => {
- await ctx.db
- .selectFrom('person')
- .where('id', '=', 123)
- .selectAll()
- .explain('tree', sql`analyze`)
+ it('should add explain statement before deletes', async () => {
+ await ctx.db.deleteFrom('person').where('id', '=', 123).explain()
expect(executeQuerySpy.calledOnce).to.be.true
expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
{
- postgres: NOT_SUPPORTED,
- mysql:
- 'explain analyze format=tree select * from `person` where `id` = ?',
+ postgres: 'explain delete from "person" where "id" = $1',
+ mysql: 'explain delete from `person` where `id` = ?',
mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
- }[dialect],
+ sqlite: 'explain delete from "person" where "id" = ?',
+ }[sqlSpec],
)
})
- }
- })
+
+ if (sqlSpec === 'mysql') {
+ it('should add explain statement before replaces', async () => {
+ await ctx.db
+ .replaceInto('person')
+ .values({ id: 123, gender: 'female' })
+ .explain()
+
+ expect(executeQuerySpy.calledOnce).to.be.true
+ expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
+ 'explain replace into `person` (`id`, `gender`) values (?, ?)',
+ )
+ })
+ }
+
+ if (sqlSpec === 'postgres') {
+ it('should add explain statement before select, with analyze', async () => {
+ await ctx.db
+ .selectFrom('person')
+ .where('id', '=', 123)
+ .selectAll()
+ .explain('json', sql`analyze`)
+
+ expect(executeQuerySpy.calledOnce).to.be.true
+ expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
+ {
+ postgres:
+ 'explain (analyze, format json) select * from "person" where "id" = $1',
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ }[sqlSpec],
+ )
+ })
+ }
+
+ if (sqlSpec === 'mysql') {
+ it('should add explain statement before select, with analyze', async () => {
+ await ctx.db
+ .selectFrom('person')
+ .where('id', '=', 123)
+ .selectAll()
+ .explain('tree', sql`analyze`)
+
+ expect(executeQuerySpy.calledOnce).to.be.true
+ expect(executeQuerySpy.getCall(0).args[0].sql).to.equal(
+ {
+ postgres: NOT_SUPPORTED,
+ mysql:
+ 'explain analyze format=tree select * from `person` where `id` = ?',
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ }[sqlSpec],
+ )
+ })
+ }
+ })
+ }
}
diff --git a/test/node/src/expression.test.ts b/test/node/src/expression.test.ts
index fc9bd13fb..e208e2feb 100644
--- a/test/node/src/expression.test.ts
+++ b/test/node/src/expression.test.ts
@@ -9,7 +9,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: expressions`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: expressions`, () => {
let ctx: TestContext
before(async function () {
@@ -69,14 +71,14 @@ for (const dialect of DIALECTS) {
last_name: eb.ref('first_name'),
}),
// Boolean literal
- ...(dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ ...(sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
? [eb.lit(true)]
: []),
// Between expressions
eb.between('id', 1000, 2000),
- ...(dialect === 'postgres'
+ ...(sqlSpec === 'postgres'
? [eb.betweenSymmetric('id', 3000, 4000)]
: []),
]),
diff --git a/test/node/src/group-by.test.ts b/test/node/src/group-by.test.ts
index af5e3c198..2e490f9ba 100644
--- a/test/node/src/group-by.test.ts
+++ b/test/node/src/group-by.test.ts
@@ -13,7 +13,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: group by`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: group by`, () => {
let ctx: TestContext
before(async function () {
@@ -73,7 +75,7 @@ for (const dialect of DIALECTS) {
])
})
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
it('group by selection', async () => {
const query = ctx.db
.selectFrom('person')
@@ -200,7 +202,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
it('group by a sub query', async () => {
const query = ctx.db
.selectFrom('person')
diff --git a/test/node/src/handle-empty-in-lists-plugin.test.ts b/test/node/src/handle-empty-in-lists-plugin.test.ts
index 38d5ee9ce..8f82b2986 100644
--- a/test/node/src/handle-empty-in-lists-plugin.test.ts
+++ b/test/node/src/handle-empty-in-lists-plugin.test.ts
@@ -11,7 +11,7 @@ import {
expect,
DIALECTS,
insertDefaultDataSet,
- BuiltInDialect,
+ DialectDescriptor,
NOT_SUPPORTED,
clearDatabase,
} from './test-setup.js'
@@ -20,19 +20,19 @@ const fixtures = [
{
strategy: replaceWithNoncontingentExpression,
replaceIn: (_lhs: string) => '1 = 0',
- inReturnValue: (dialect: BuiltInDialect) =>
+ inReturnValue: ({ sqlSpec }: DialectDescriptor) =>
({
- [dialect]: false,
+ [sqlSpec]: false,
mysql: '0',
sqlite: 0,
- })[dialect],
+ })[sqlSpec],
replaceNotIn: (_lhs: string) => '1 = 1',
- notInReturnValue: (dialect: BuiltInDialect) =>
+ notInReturnValue: ({ sqlSpec }: DialectDescriptor) =>
({
- [dialect]: true,
+ [sqlSpec]: true,
mysql: '1',
sqlite: 1,
- })[dialect],
+ })[sqlSpec],
},
{
strategy: pushValueIntoList('__kysely_no_values_were_provided__'),
@@ -40,17 +40,19 @@ const fixtures = [
inReturnValue: () => null,
replaceNotIn: (lhs: string) =>
`cast(${lhs} as char) not in ('__kysely_no_values_were_provided__')`,
- notInReturnValue: (dialect: BuiltInDialect) =>
+ notInReturnValue: ({ sqlSpec }: DialectDescriptor) =>
({
- [dialect]: true,
+ [sqlSpec]: true,
mysql: '1',
sqlite: 1,
- })[dialect],
+ })[sqlSpec],
},
] as const
for (const dialect of DIALECTS) {
- describe(`${dialect}: handle empty in lists plugin`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: handle empty in lists plugin`, () => {
for (const fixture of fixtures) {
describe(`strategy: ${fixture.strategy.name}`, () => {
let ctx: TestContext
@@ -218,9 +220,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'mysql' ||
- dialect === 'postgres' ||
- dialect === 'sqlite'
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'sqlite'
) {
it('should handle `select ... in (), ... not in ()`', async () => {
const query = ctx.db
diff --git a/test/node/src/having.test.ts b/test/node/src/having.test.ts
index 4c7a38c63..d7b1cf581 100644
--- a/test/node/src/having.test.ts
+++ b/test/node/src/having.test.ts
@@ -12,7 +12,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: having`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: having`, () => {
let ctx: TestContext
before(async function () {
@@ -112,7 +114,7 @@ for (const dialect of DIALECTS) {
const result = await query.execute()
expect(result).to.have.length(2)
- if (dialect === 'mssql' || dialect === 'sqlite') {
+ if (sqlSpec === 'mssql' || sqlSpec === 'sqlite' || variant === 'pglite') {
expect(result).to.containSubset([
{ first_name: 'Jennifer', num_pets: 2 },
{ first_name: 'Arnold', num_pets: 2 },
@@ -181,7 +183,7 @@ for (const dialect of DIALECTS) {
const result = await query.execute()
expect(result).to.have.length(2)
- if (dialect === 'mssql' || dialect === 'sqlite') {
+ if (sqlSpec === 'mssql' || sqlSpec === 'sqlite' || variant === 'pglite') {
expect(result).to.containSubset([{ num_pets: 2 }, { num_pets: 2 }])
} else {
expect(result).to.containSubset([{ num_pets: '2' }, { num_pets: '2' }])
diff --git a/test/node/src/immediate-value-plugin.test.ts b/test/node/src/immediate-value-plugin.test.ts
index 25845956a..f6e685fa2 100644
--- a/test/node/src/immediate-value-plugin.test.ts
+++ b/test/node/src/immediate-value-plugin.test.ts
@@ -30,14 +30,18 @@ describe('ImmediateValuePlugin', () => {
.where('gender', 'in', ['male', 'other'])
.selectAll()
- testSql(query, 'postgres', {
- postgres: {
- parameters: [],
- sql: `select * from "person" where "first_name" = 'Sylvester' and "gender" in ('male', 'other')`,
+ testSql(
+ query,
+ { sqlSpec: 'postgres', variant: 'postgres' },
+ {
+ postgres: {
+ parameters: [],
+ sql: `select * from "person" where "first_name" = 'Sylvester' and "gender" in ('male', 'other')`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
},
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
- })
+ )
})
})
diff --git a/test/node/src/insert.test.ts b/test/node/src/insert.test.ts
index 418694fda..b2b001e0f 100644
--- a/test/node/src/insert.test.ts
+++ b/test/node/src/insert.test.ts
@@ -16,7 +16,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: insert into`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: insert into`, () => {
let ctx: TestContext
before(async function () {
@@ -65,7 +67,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(InsertResult)
expect(result.numInsertedOrUpdatedRows).to.equal(1n)
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
expect(result.insertId).to.be.undefined
} else {
expect(result.insertId).to.be.a('bigint')
@@ -106,7 +108,7 @@ for (const dialect of DIALECTS) {
.selectFrom('pet')
.select(sql`max(name)`.as('max_name')),
last_name:
- dialect === 'sqlite'
+ sqlSpec === 'sqlite'
? sql`'Bar' || 'son'`
: sql`concat('Bar', 'son')`,
gender: 'other',
@@ -141,7 +143,7 @@ for (const dialect of DIALECTS) {
})
})
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
it('should insert one row with expressions', async () => {
const query = ctx.db.insertInto('person').values(({ selectFrom }) => ({
first_name: selectFrom('pet')
@@ -231,7 +233,7 @@ for (const dialect of DIALECTS) {
])
})
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('should insert the result of a values expression', async () => {
const query = ctx.db
.insertInto('person')
@@ -250,7 +252,7 @@ for (const dialect of DIALECTS) {
.select(['t.a', 't.b']),
)
.$call((qb) =>
- dialect === 'postgres'
+ sqlSpec === 'postgres'
? qb.returning(['first_name', 'gender'])
: qb.output(['inserted.first_name', 'inserted.gender']),
)
@@ -310,14 +312,14 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(InsertResult)
expect(result.numInsertedOrUpdatedRows).to.equal(1n)
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
expect(result.insertId).to.be.undefined
} else {
expect(result.insertId).to.be.a('bigint')
}
})
- if (dialect === 'sqlite') {
+ if (sqlSpec === 'sqlite') {
for (const { method, action } of [
{ method: 'orAbort', action: 'abort' },
{ method: 'orFail', action: 'fail' },
@@ -346,7 +348,7 @@ for (const dialect of DIALECTS) {
}
}
- if (dialect === 'mysql' || dialect == 'sqlite') {
+ if (sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
it('should insert one row and ignore conflicts using insert ignore', async () => {
const [{ id, ...existingPet }] = await ctx.db
.selectFrom('pet')
@@ -381,7 +383,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(InsertResult)
expect(result.numInsertedOrUpdatedRows).to.equal(0n)
- if (dialect === 'sqlite') {
+ if (sqlSpec === 'sqlite') {
// SQLite seems to return the last inserted id even if nothing got inserted.
expect(result.insertId! > 0n).to.be.equal(true)
} else {
@@ -390,7 +392,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should insert one row and ignore conflicts using `on conflict do nothing`', async () => {
const [{ id, ...existingPet }] = await ctx.db
.selectFrom('pet')
@@ -429,7 +431,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(InsertResult)
expect(result.numInsertedOrUpdatedRows).to.equal(0n)
- if (dialect === 'sqlite') {
+ if (sqlSpec === 'sqlite') {
// SQLite seems to return the last inserted id even if nothing got inserted.
expect(result.insertId! > 0n).to.be.equal(true)
} else {
@@ -438,7 +440,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should insert one row and ignore conflicts using `on conflict on constraint do nothing`', async () => {
const [{ id, ...existingPet }] = await ctx.db
.selectFrom('pet')
@@ -473,7 +475,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should update instead of insert on conflict when using onDuplicateKeyUpdate', async () => {
const [{ id, ...existingPet }] = await ctx.db
.selectFrom('pet')
@@ -520,7 +522,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should update instead of insert on conflict when using `on conflict do update`', async () => {
const [{ id, ...existingPet }] = await ctx.db
.selectFrom('pet')
@@ -563,7 +565,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(InsertResult)
expect(result.numInsertedOrUpdatedRows).to.equal(1n)
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
expect(result.insertId).to.be.undefined
} else {
expect(result.insertId).to.be.a('bigint')
@@ -582,7 +584,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should update instead of insert on conflict when using `on conflict on constraint do update`', async () => {
const [{ id, ...existingPet }] = await ctx.db
.selectFrom('pet')
@@ -708,7 +710,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(InsertResult)
expect(result.numInsertedOrUpdatedRows).to.equal(2n)
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
expect(result.insertId).to.be.undefined
} else {
expect(result.insertId).to.be.a('bigint')
@@ -877,7 +879,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should insert a row and return data using `returning`', async () => {
const result = await ctx.db
.insertInto('person')
@@ -887,7 +889,7 @@ for (const dialect of DIALECTS) {
.selectFrom('person')
.select(sql`max(first_name)`.as('max_first_name')),
last_name:
- dialect === 'postgres'
+ sqlSpec === 'postgres'
? sql`concat(cast(${'Bar'} as varchar), cast(${'son'} as varchar))`
: sql`cast(${'Bar'} as varchar) || cast(${'son'} as varchar)`,
})
@@ -933,7 +935,7 @@ for (const dialect of DIALECTS) {
.selectFrom('person')
.select(sql`max(first_name)`.as('max_first_name')),
last_name:
- dialect === 'postgres'
+ sqlSpec === 'postgres'
? sql`concat(cast(${'Bar'} as varchar), cast(${'son'} as varchar))`
: sql`cast(${'Bar'} as varchar) || cast(${'son'} as varchar)`,
})
@@ -953,7 +955,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('modifyEnd should add arbitrary SQL to the end of the query', async () => {
const query = ctx.db
.insertInto('person')
@@ -981,7 +983,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres' && variant !== 'pglite') {
it('should insert multiple rows and stream returned results', async () => {
const values = [
{
@@ -1013,7 +1015,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should insert top', async () => {
const query = ctx.db
.insertInto('person')
@@ -1059,7 +1061,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should insert a row and return data using `output`', async () => {
const result = await ctx.db
.insertInto('person')
diff --git a/test/node/src/introspect.test.ts b/test/node/src/introspect.test.ts
index d56bf6aa6..dd51be9d7 100644
--- a/test/node/src/introspect.test.ts
+++ b/test/node/src/introspect.test.ts
@@ -11,13 +11,15 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: introspect`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: introspect`, () => {
let ctx: TestContext
before(async function () {
ctx = await initTest(this, dialect)
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
await dropSchema()
await createSchema()
}
@@ -36,7 +38,7 @@ for (const dialect of DIALECTS) {
after(async () => {
await dropView()
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
await dropSchema()
}
@@ -47,7 +49,7 @@ for (const dialect of DIALECTS) {
it('should get schema names', async () => {
const schemas = await ctx.db.introspection.getSchemas()
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
expect(schemas).to.containSubset([
{ name: 'public' },
{ name: 'information_schema' },
@@ -55,7 +57,7 @@ for (const dialect of DIALECTS) {
{ name: 'some_schema' },
{ name: 'dtype_schema' },
])
- } else if (dialect === 'mysql') {
+ } else if (sqlSpec === 'mysql') {
expect(schemas).to.containSubset([
{ name: 'mysql' },
{ name: 'information_schema' },
@@ -63,7 +65,7 @@ for (const dialect of DIALECTS) {
{ name: 'sys' },
{ name: 'kysely_test' },
])
- } else if (dialect === 'mssql') {
+ } else if (sqlSpec === 'mssql') {
expect(schemas).to.containSubset([
{ name: 'dbo' },
{ name: 'sys' },
@@ -71,7 +73,7 @@ for (const dialect of DIALECTS) {
{ name: 'INFORMATION_SCHEMA' },
{ name: 'some_schema' },
])
- } else if (dialect === 'sqlite') {
+ } else if (sqlSpec === 'sqlite') {
expect(schemas).to.eql([])
}
})
@@ -81,10 +83,11 @@ for (const dialect of DIALECTS) {
it('should get table metadata', async () => {
const meta = await ctx.db.introspection.getTables()
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
expect(meta).to.eql([
{
name: 'person',
+ isForeign: false,
isView: false,
schema: 'public',
columns: [
@@ -157,6 +160,7 @@ for (const dialect of DIALECTS) {
{
name: 'pet',
isView: false,
+ isForeign: false,
schema: 'public',
columns: [
{
@@ -200,6 +204,7 @@ for (const dialect of DIALECTS) {
{
name: 'toy',
isView: false,
+ isForeign: false,
schema: 'public',
columns: [
{
@@ -242,6 +247,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'toy_names',
+ isForeign: false,
isView: true,
schema: 'public',
columns: [
@@ -258,6 +264,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'MixedCaseTable',
+ isForeign: false,
isView: false,
schema: 'some_schema',
columns: [
@@ -274,6 +281,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'pet',
+ isForeign: false,
isView: false,
schema: 'some_schema',
columns: [
@@ -299,6 +307,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'pet_partition',
+ isForeign: false,
isView: false,
schema: 'some_schema',
columns: [
@@ -314,10 +323,11 @@ for (const dialect of DIALECTS) {
],
},
])
- } else if (dialect === 'mysql') {
+ } else if (sqlSpec === 'mysql') {
expect(meta).to.eql([
{
name: 'person',
+ isForeign: false,
isView: false,
schema: 'kysely_test',
columns: [
@@ -382,6 +392,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'pet',
+ isForeign: false,
isView: false,
schema: 'kysely_test',
columns: [
@@ -421,6 +432,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'toy',
+ isForeign: false,
isView: false,
schema: 'kysely_test',
columns: [
@@ -460,6 +472,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'toy_names',
+ isForeign: false,
isView: true,
schema: 'kysely_test',
columns: [
@@ -474,9 +487,10 @@ for (const dialect of DIALECTS) {
],
},
])
- } else if (dialect === 'mssql') {
+ } else if (sqlSpec === 'mssql') {
expect(meta).to.eql([
{
+ isForeign: false,
isView: false,
name: 'person',
schema: 'dbo',
@@ -547,6 +561,7 @@ for (const dialect of DIALECTS) {
],
},
{
+ isForeign: false,
isView: false,
name: 'pet',
schema: 'dbo',
@@ -590,6 +605,7 @@ for (const dialect of DIALECTS) {
],
},
{
+ isForeign: false,
isView: false,
name: 'toy',
schema: 'dbo',
@@ -633,6 +649,7 @@ for (const dialect of DIALECTS) {
],
},
{
+ isForeign: false,
isView: true,
name: 'toy_names',
schema: 'dbo',
@@ -649,6 +666,7 @@ for (const dialect of DIALECTS) {
],
},
{
+ isForeign: false,
isView: false,
name: 'pet',
schema: 'some_schema',
@@ -674,10 +692,11 @@ for (const dialect of DIALECTS) {
],
},
])
- } else if (dialect === 'sqlite') {
+ } else if (sqlSpec === 'sqlite') {
expect(meta).to.eql([
{
name: 'person',
+ isForeign: false,
isView: false,
columns: [
{
@@ -741,6 +760,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'pet',
+ isForeign: false,
isView: false,
columns: [
{
@@ -779,6 +799,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'toy',
+ isForeign: false,
isView: false,
columns: [
{
@@ -817,6 +838,7 @@ for (const dialect of DIALECTS) {
},
{
name: 'toy_names',
+ isForeign: false,
isView: true,
columns: [
{
@@ -833,7 +855,7 @@ for (const dialect of DIALECTS) {
}
})
- if (dialect === 'sqlite') {
+ if (sqlSpec === 'sqlite') {
describe('implicit autoincrement', () => {
const testTableName = 'implicit_increment_test'
@@ -856,6 +878,7 @@ for (const dialect of DIALECTS) {
expect(testTable).to.eql({
name: testTableName,
+ isForeign: false,
isView: false,
columns: [
{
@@ -887,7 +910,7 @@ for (const dialect of DIALECTS) {
async function createSchema() {
await ctx.db.schema.createSchema('some_schema').execute()
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
await ctx.db.schema.createSchema('dtype_schema').execute()
await ctx.db.schema
.createType('dtype_schema.species')
@@ -939,7 +962,7 @@ for (const dialect of DIALECTS) {
.execute()
await ctx.db.schema.dropSchema('some_schema').ifExists().execute()
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
await ctx.db.schema
.dropType('dtype_schema.species')
.ifExists()
diff --git a/test/node/src/join.test.ts b/test/node/src/join.test.ts
index 56e689081..895e5f2c3 100644
--- a/test/node/src/join.test.ts
+++ b/test/node/src/join.test.ts
@@ -14,7 +14,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: join`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: join`, () => {
let ctx: TestContext
before(async function () {
@@ -685,7 +687,7 @@ for (const dialect of DIALECTS) {
})
})
- if (dialect === 'postgres' || dialect === 'mssql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql' || sqlSpec === 'sqlite') {
describe('full join', () => {
it(`should full join a table`, async () => {
const query = ctx.db
@@ -746,7 +748,7 @@ for (const dialect of DIALECTS) {
})
})
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
describe('lateral join', () => {
it('should join an expression laterally', async () => {
const query = ctx.db
@@ -859,7 +861,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
describe('apply', () => {
it('should cross apply an expression', async () => {
const q = ctx.db
diff --git a/test/node/src/json-traversal.test.ts b/test/node/src/json-traversal.test.ts
index 363aeb33e..5c9dc3794 100644
--- a/test/node/src/json-traversal.test.ts
+++ b/test/node/src/json-traversal.test.ts
@@ -1,12 +1,12 @@
import {
ColumnDefinitionBuilder,
- JSONColumnType,
+ Json,
ParseJSONResultsPlugin,
SqlBool,
sql,
} from '../../..'
import {
- BuiltInDialect,
+ DialectDescriptor,
DIALECTS,
NOT_SUPPORTED,
clearDatabase,
@@ -19,711 +19,735 @@ import {
type TestContext = Awaited>
-for (const dialect of DIALECTS.filter((dialect) => dialect !== 'mssql')) {
- describe(`${dialect}: json traversal`, () => {
- let ctx: TestContext
+for (const dialect of DIALECTS) {
+ const { sqlSpec, variant } = dialect
- before(async function () {
- ctx = await initJSONTest(this, dialect)
- })
-
- beforeEach(async () => {
- await insertDefaultJSONDataSet(ctx)
- })
+ if (sqlSpec !== 'mssql') {
+ describe(`${variant}: json traversal`, () => {
+ let ctx: TestContext
- afterEach(async () => {
- await clearJSONDatabase(ctx)
- })
+ before(async function () {
+ ctx = await initJSONTest(this, dialect)
+ })
- after(async () => {
- await destroyJSONTest(ctx)
- })
+ beforeEach(async () => {
+ await insertDefaultJSONDataSet(ctx)
+ })
- if (dialect === 'mysql' || dialect === 'sqlite') {
- describe('JSON reference using JSON Path syntax ($)', () => {
- const jsonOperator = dialect === 'mysql' ? '->$' : '->>$'
-
- it(`should execute a query with column${jsonOperator}.key in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb.ref('website', jsonOperator).key('url').as('website_url'),
- )
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: [],
- sql: "select `website`->'$.url' as `website_url` from `person_metadata`",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "website"->>'$.url' as "website_url" from "person_metadata"`,
- },
- })
+ afterEach(async () => {
+ await clearJSONDatabase(ctx)
+ })
- const results = await query.execute()
+ after(async () => {
+ await destroyJSONTest(ctx)
+ })
- expect(results).to.containSubset([
- { website_url: 'https://www.jenniferaniston.com' },
- { website_url: 'https://www.arnoldschwarzenegger.com' },
- { website_url: 'https://www.sylvesterstallone.com' },
- ])
- })
+ if (sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
+ describe('JSON reference using JSON Path syntax ($)', () => {
+ const jsonOperator = sqlSpec === 'mysql' ? '->$' : '->>$'
- it(`should execute a query with column${jsonOperator}[0] in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb.ref('nicknames', jsonOperator).at(0).as('nickname'),
- )
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: [],
- sql: "select `nicknames`->'$[0]' as `nickname` from `person_metadata`",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "nicknames"->>'$[0]' as "nickname" from "person_metadata"`,
- },
- })
+ it(`should execute a query with column${jsonOperator}.key in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb.ref('website', jsonOperator).key('url').as('website_url'),
+ )
- const results = await query.execute()
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ parameters: [],
+ sql: "select `website`->'$.url' as `website_url` from `person_metadata`",
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "website"->>'$.url' as "website_url" from "person_metadata"`,
+ },
+ })
- expect(results).to.containSubset([
- { nickname: 'J.A.' },
- { nickname: 'A.S.' },
- { nickname: 'S.S.' },
- ])
- })
+ const results = await query.execute()
- it(`should execute a query with column${jsonOperator}.key.key in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb
- .ref('profile', jsonOperator)
- .key('auth')
- .key('roles')
- .as('roles'),
- )
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: [],
- sql: "select `profile`->'$.auth.roles' as `roles` from `person_metadata`",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "profile"->>'$.auth.roles' as "roles" from "person_metadata"`,
- },
+ expect(results).to.containSubset([
+ { website_url: 'https://www.jenniferaniston.com' },
+ { website_url: 'https://www.arnoldschwarzenegger.com' },
+ { website_url: 'https://www.sylvesterstallone.com' },
+ ])
})
- const results = await query.execute()
+ it(`should execute a query with column${jsonOperator}[0] in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb.ref('nicknames', jsonOperator).at(0).as('nickname'),
+ )
- expect(results).to.containSubset([
- { roles: ['contributor', 'moderator'] },
- { roles: ['contributor', 'moderator'] },
- { roles: ['contributor', 'moderator'] },
- ])
- })
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ parameters: [],
+ sql: "select `nicknames`->'$[0]' as `nickname` from `person_metadata`",
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "nicknames"->>'$[0]' as "nickname" from "person_metadata"`,
+ },
+ })
- it(`should execute a query with column${jsonOperator}.key[0] in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb.ref('profile', jsonOperator).key('tags').at(0).as('main_tag'),
- )
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: [],
- sql: "select `profile`->'$.tags[0]' as `main_tag` from `person_metadata`",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "profile"->>'$.tags[0]' as "main_tag" from "person_metadata"`,
- },
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ { nickname: 'J.A.' },
+ { nickname: 'A.S.' },
+ { nickname: 'S.S.' },
+ ])
})
- const results = await query.execute()
+ it(`should execute a query with column${jsonOperator}.key.key in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('roles')
+ .as('roles'),
+ )
- expect(results).to.containSubset([
- { main_tag: 'awesome' },
- { main_tag: 'awesome' },
- { main_tag: 'awesome' },
- ])
- })
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ parameters: [],
+ sql: "select `profile`->'$.auth.roles' as `roles` from `person_metadata`",
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "profile"->>'$.auth.roles' as "roles" from "person_metadata"`,
+ },
+ })
- it(`should execute a query with column${jsonOperator}[0].key in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb
- .ref('experience', jsonOperator)
- .at(0)
- .key('establishment')
- .as('establishment'),
- )
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: [],
- sql: "select `experience`->'$[0].establishment' as `establishment` from `person_metadata`",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "experience"->>'$[0].establishment' as "establishment" from "person_metadata"`,
- },
- })
+ const results = await query.execute()
- const results = await query.execute()
+ expect(results).to.containSubset([
+ { roles: ['contributor', 'moderator'] },
+ { roles: ['contributor', 'moderator'] },
+ { roles: ['contributor', 'moderator'] },
+ ])
+ })
- expect(results).to.containSubset([
- { establishment: 'The University of Life' },
- { establishment: 'The University of Life' },
- { establishment: 'The University of Life' },
- ])
- })
+ it(`should execute a query with column${jsonOperator}.key[0] in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb
+ .ref('profile', jsonOperator)
+ .key('tags')
+ .at(0)
+ .as('main_tag'),
+ )
- it(`should execute a query with column${jsonOperator}[0][0] in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb
- .ref('schedule', jsonOperator)
- .at(0)
- .at(0)
- .as('january_1st_schedule'),
- )
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: [],
- sql: "select `schedule`->'$[0][0]' as `january_1st_schedule` from `person_metadata`",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "schedule"->>'$[0][0]' as "january_1st_schedule" from "person_metadata"`,
- },
- })
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ parameters: [],
+ sql: "select `profile`->'$.tags[0]' as `main_tag` from `person_metadata`",
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "profile"->>'$.tags[0]' as "main_tag" from "person_metadata"`,
+ },
+ })
- const results = await query.execute()
+ const results = await query.execute()
- expect(results).to.containSubset([
- { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
- { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
- { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
- ])
- })
+ expect(results).to.containSubset([
+ { main_tag: 'awesome' },
+ { main_tag: 'awesome' },
+ { main_tag: 'awesome' },
+ ])
+ })
- if (dialect === 'mysql') {
- it('should execute a query with column->$[last] in select clause', async () => {
+ it(`should execute a query with column${jsonOperator}[0].key in select clause`, async () => {
const query = ctx.db
.selectFrom('person_metadata')
.select((eb) =>
- eb.ref('nicknames', '->$').at('last').as('nickname'),
+ eb
+ .ref('experience', jsonOperator)
+ .at(0)
+ .key('establishment')
+ .as('establishment'),
)
testSql(query, dialect, {
postgres: NOT_SUPPORTED,
mysql: {
parameters: [],
- sql: "select `nicknames`->'$[last]' as `nickname` from `person_metadata`",
+ sql: "select `experience`->'$[0].establishment' as `establishment` from `person_metadata`",
},
mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "experience"->>'$[0].establishment' as "establishment" from "person_metadata"`,
+ },
})
const results = await query.execute()
expect(results).to.containSubset([
- { nickname: 'Aniston the Magnificent' },
- { nickname: 'Schwarzenegger the Magnificent' },
- { nickname: 'Stallone the Magnificent' },
+ { establishment: 'The University of Life' },
+ { establishment: 'The University of Life' },
+ { establishment: 'The University of Life' },
])
})
- }
- if (dialect === 'sqlite') {
- it('should execute a query with column->>$[#-1] in select clause', async () => {
+ it(`should execute a query with column${jsonOperator}[0][0] in select clause`, async () => {
const query = ctx.db
.selectFrom('person_metadata')
.select((eb) =>
- eb.ref('nicknames', '->>$').at('#-1').as('nickname'),
+ eb
+ .ref('schedule', jsonOperator)
+ .at(0)
+ .at(0)
+ .as('january_1st_schedule'),
)
testSql(query, dialect, {
postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
+ mysql: {
+ parameters: [],
+ sql: "select `schedule`->'$[0][0]' as `january_1st_schedule` from `person_metadata`",
+ },
mssql: NOT_SUPPORTED,
sqlite: {
parameters: [],
- sql: `select "nicknames"->>'$[#-1]' as "nickname" from "person_metadata"`,
+ sql: `select "schedule"->>'$[0][0]' as "january_1st_schedule" from "person_metadata"`,
},
})
const results = await query.execute()
expect(results).to.containSubset([
- { nickname: 'Aniston the Magnificent' },
- { nickname: 'Schwarzenegger the Magnificent' },
- { nickname: 'Stallone the Magnificent' },
+ { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
+ { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
+ { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
])
})
- }
- const expectedBooleanValue = dialect === 'mysql' ? true : 1
-
- it(`should execute a query with column${jsonOperator} in select clause with non-string properties`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) => [
- eb
- .ref('profile', jsonOperator)
- .key('auth')
- .key('is_verified')
- .as('is_verified'),
- eb
- .ref('profile', jsonOperator)
- .key('auth')
- .key('login_count')
- .as('login_count'),
- eb.ref('profile', jsonOperator).key('avatar').as('avatar'),
- ])
+ if (sqlSpec === 'mysql') {
+ it('should execute a query with column->$[last] in select clause', async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb.ref('nicknames', '->$').at('last').as('nickname'),
+ )
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ parameters: [],
+ sql: "select `nicknames`->'$[last]' as `nickname` from `person_metadata`",
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ { nickname: 'Aniston the Magnificent' },
+ { nickname: 'Schwarzenegger the Magnificent' },
+ { nickname: 'Stallone the Magnificent' },
+ ])
+ })
+ }
+
+ if (sqlSpec === 'sqlite') {
+ it('should execute a query with column->>$[#-1] in select clause', async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb.ref('nicknames', '->>$').at('#-1').as('nickname'),
+ )
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "nicknames"->>'$[#-1]' as "nickname" from "person_metadata"`,
+ },
+ })
+
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ { nickname: 'Aniston the Magnificent' },
+ { nickname: 'Schwarzenegger the Magnificent' },
+ { nickname: 'Stallone the Magnificent' },
+ ])
+ })
+ }
- const results = await query.execute()
+ const expectedBooleanValue = sqlSpec === 'mysql' ? true : 1
- expect(results).to.containSubset([
- {
- is_verified: expectedBooleanValue,
- login_count: 12,
- avatar: null,
- },
- ])
- })
+ it(`should execute a query with column${jsonOperator} in select clause with non-string properties`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) => [
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('is_verified')
+ .as('is_verified'),
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('login_count')
+ .as('login_count'),
+ eb.ref('profile', jsonOperator).key('avatar').as('avatar'),
+ ])
- it(`should execute a query with column${jsonOperator}.key.key in where clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .where((eb) =>
- eb(
- eb.ref('profile', jsonOperator).key('auth').key('login_count'),
- '=',
- 12,
- ),
- )
- .selectAll()
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: [12],
- sql: "select * from `person_metadata` where `profile`->'$.auth.login_count' = ?",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [12],
- sql: `select * from "person_metadata" where "profile"->>'$.auth.login_count' = ?`,
- },
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ {
+ is_verified: expectedBooleanValue,
+ login_count: 12,
+ avatar: null,
+ },
+ ])
})
- const results = await query.execute()
+ it(`should execute a query with column${jsonOperator}.key.key in where clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .where((eb) =>
+ eb(
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('login_count'),
+ '=',
+ 12,
+ ),
+ )
+ .selectAll()
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ parameters: [12],
+ sql: "select * from `person_metadata` where `profile`->'$.auth.login_count' = ?",
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [12],
+ sql: `select * from "person_metadata" where "profile"->>'$.auth.login_count' = ?`,
+ },
+ })
- expect(results).to.have.length(1)
- expect(results[0].profile.auth.login_count).to.equal(12)
- })
+ const results = await query.execute()
- it(`should execute a query with column${jsonOperator}.key.key in order by clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .orderBy(
- (eb) =>
- eb.ref('profile', jsonOperator).key('auth').key('login_count'),
- 'desc',
- )
- .selectAll()
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: [],
- sql: "select * from `person_metadata` order by `profile`->'$.auth.login_count' desc",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select * from "person_metadata" order by "profile"->>'$.auth.login_count' desc`,
- },
+ expect(results).to.have.length(1)
+ expect(results[0].profile.auth.login_count).to.equal(12)
})
- const results = await query.execute()
+ it(`should execute a query with column${jsonOperator}.key.key in order by clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .orderBy(
+ (eb) =>
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('login_count'),
+ 'desc',
+ )
+ .selectAll()
- expect(results).to.have.length(3)
- expect(results[0].profile.auth.login_count).to.equal(14)
- expect(results[1].profile.auth.login_count).to.equal(13)
- expect(results[2].profile.auth.login_count).to.equal(12)
- })
- })
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ parameters: [],
+ sql: "select * from `person_metadata` order by `profile`->'$.auth.login_count' desc",
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select * from "person_metadata" order by "profile"->>'$.auth.login_count' desc`,
+ },
+ })
- describe('Standalone JSON path syntax ($)', () => {
- it('should execute a query with json_set function', async () => {
- const lastItem = dialect === 'mysql' ? 'last' : '#-1'
-
- const query = ctx.db
- .updateTable('person_metadata')
- .set('experience', (eb) =>
- eb.fn('json_set', [
- 'experience',
- eb.jsonPath<'experience'>().at(lastItem).key('establishment'),
- eb.val('Papa Johns'),
- ]),
- )
- .where('person_id', '=', 911)
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: {
- parameters: ['Papa Johns', 911],
- sql: "update `person_metadata` set `experience` = json_set(`experience`, '$[last].establishment', ?) where `person_id` = ?",
- },
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: ['Papa Johns', 911],
- sql: `update "person_metadata" set "experience" = json_set("experience", '$[#-1].establishment', ?) where "person_id" = ?`,
- },
- })
+ const results = await query.execute()
- await query.execute()
+ expect(results).to.have.length(3)
+ expect(results[0].profile.auth.login_count).to.equal(14)
+ expect(results[1].profile.auth.login_count).to.equal(13)
+ expect(results[2].profile.auth.login_count).to.equal(12)
+ })
})
- })
- }
- if (dialect === 'postgres' || dialect === 'sqlite') {
- describe('JSON reference using PostgreSQL-style syntax (->->->>)', () => {
- const jsonOperator = dialect === 'postgres' ? '->' : '->>'
-
- it(`should execute a query with column${jsonOperator}key in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb.ref('website', jsonOperator).key('url').as('website_url'),
- )
-
- testSql(query, dialect, {
- postgres: {
- parameters: [],
- sql: `select "website"->'url' as "website_url" from "person_metadata"`,
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "website"->>'url' as "website_url" from "person_metadata"`,
- },
- })
+ describe('Standalone JSON path syntax ($)', () => {
+ it('should execute a query with json_set function', async () => {
+ const lastItem = sqlSpec === 'mysql' ? 'last' : '#-1'
- const results = await query.execute()
+ const query = ctx.db
+ .updateTable('person_metadata')
+ .set('experience', (eb) =>
+ eb.fn('json_set', [
+ 'experience',
+ eb.jsonPath<'experience'>().at(lastItem).key('establishment'),
+ eb.val('Papa Johns'),
+ ]),
+ )
+ .where('person_id', '=', 911)
- expect(results).to.containSubset([
- { website_url: 'https://www.jenniferaniston.com' },
- { website_url: 'https://www.arnoldschwarzenegger.com' },
- { website_url: 'https://www.sylvesterstallone.com' },
- ])
- })
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ parameters: ['Papa Johns', 911],
+ sql: "update `person_metadata` set `experience` = json_set(`experience`, '$[last].establishment', ?) where `person_id` = ?",
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: ['Papa Johns', 911],
+ sql: `update "person_metadata" set "experience" = json_set("experience", '$[#-1].establishment', ?) where "person_id" = ?`,
+ },
+ })
- it(`should execute a query with column${jsonOperator}0 in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb.ref('nicknames', jsonOperator).at(0).as('nickname'),
- )
-
- testSql(query, dialect, {
- postgres: {
- parameters: [],
- sql: `select "nicknames"->0 as "nickname" from "person_metadata"`,
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "nicknames"->>0 as "nickname" from "person_metadata"`,
- },
+ await query.execute()
})
+ })
+ }
- const results = await query.execute()
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
+ describe('JSON reference using PostgreSQL-style syntax (->->->>)', () => {
+ const jsonOperator = sqlSpec === 'postgres' ? '->' : '->>'
- expect(results).to.containSubset([
- { nickname: 'J.A.' },
- { nickname: 'A.S.' },
- { nickname: 'S.S.' },
- ])
- })
+ it(`should execute a query with column${jsonOperator}key in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb.ref('website', jsonOperator).key('url').as('website_url'),
+ )
- it(`should execute a query with column->key${jsonOperator}key in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb
- .ref('profile', jsonOperator)
- .key('auth')
- .key('roles')
- .as('roles'),
- )
-
- testSql(query, dialect, {
- postgres: {
- parameters: [],
- sql: `select "profile"->'auth'->'roles' as "roles" from "person_metadata"`,
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "profile"->'auth'->>'roles' as "roles" from "person_metadata"`,
- },
+ testSql(query, dialect, {
+ postgres: {
+ parameters: [],
+ sql: `select "website"->'url' as "website_url" from "person_metadata"`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "website"->>'url' as "website_url" from "person_metadata"`,
+ },
+ })
+
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ { website_url: 'https://www.jenniferaniston.com' },
+ { website_url: 'https://www.arnoldschwarzenegger.com' },
+ { website_url: 'https://www.sylvesterstallone.com' },
+ ])
})
- const results = await query.execute()
+ it(`should execute a query with column${jsonOperator}0 in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb.ref('nicknames', jsonOperator).at(0).as('nickname'),
+ )
- expect(results).to.containSubset([
- { roles: ['contributor', 'moderator'] },
- { roles: ['contributor', 'moderator'] },
- { roles: ['contributor', 'moderator'] },
- ])
- })
+ testSql(query, dialect, {
+ postgres: {
+ parameters: [],
+ sql: `select "nicknames"->0 as "nickname" from "person_metadata"`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "nicknames"->>0 as "nickname" from "person_metadata"`,
+ },
+ })
- it(`should execute a query with column->key${jsonOperator}0 in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb.ref('profile', jsonOperator).key('tags').at(0).as('main_tag'),
- )
-
- testSql(query, dialect, {
- postgres: {
- parameters: [],
- sql: `select "profile"->'tags'->0 as "main_tag" from "person_metadata"`,
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "profile"->'tags'->>0 as "main_tag" from "person_metadata"`,
- },
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ { nickname: 'J.A.' },
+ { nickname: 'A.S.' },
+ { nickname: 'S.S.' },
+ ])
})
- const results = await query.execute()
+ it(`should execute a query with column->key${jsonOperator}key in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('roles')
+ .as('roles'),
+ )
- expect(results).to.containSubset([
- { main_tag: 'awesome' },
- { main_tag: 'awesome' },
- { main_tag: 'awesome' },
- ])
- })
+ testSql(query, dialect, {
+ postgres: {
+ parameters: [],
+ sql: `select "profile"->'auth'->'roles' as "roles" from "person_metadata"`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "profile"->'auth'->>'roles' as "roles" from "person_metadata"`,
+ },
+ })
- it(`should execute a query with column->0${jsonOperator}key in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb
- .ref('experience', jsonOperator)
- .at(0)
- .key('establishment')
- .as('establishment'),
- )
-
- testSql(query, dialect, {
- postgres: {
- parameters: [],
- sql: `select "experience"->0->'establishment' as "establishment" from "person_metadata"`,
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "experience"->0->>'establishment' as "establishment" from "person_metadata"`,
- },
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ { roles: ['contributor', 'moderator'] },
+ { roles: ['contributor', 'moderator'] },
+ { roles: ['contributor', 'moderator'] },
+ ])
})
- const results = await query.execute()
+ it(`should execute a query with column->key${jsonOperator}0 in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb
+ .ref('profile', jsonOperator)
+ .key('tags')
+ .at(0)
+ .as('main_tag'),
+ )
- expect(results).to.containSubset([
- { establishment: 'The University of Life' },
- { establishment: 'The University of Life' },
- { establishment: 'The University of Life' },
- ])
- })
+ testSql(query, dialect, {
+ postgres: {
+ parameters: [],
+ sql: `select "profile"->'tags'->0 as "main_tag" from "person_metadata"`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "profile"->'tags'->>0 as "main_tag" from "person_metadata"`,
+ },
+ })
- it(`should execute a query with column->0${jsonOperator}0 in select clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) =>
- eb
- .ref('schedule', jsonOperator)
- .at(0)
- .at(0)
- .as('january_1st_schedule'),
- )
-
- testSql(query, dialect, {
- postgres: {
- parameters: [],
- sql: `select "schedule"->0->0 as "january_1st_schedule" from "person_metadata"`,
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select "schedule"->0->>0 as "january_1st_schedule" from "person_metadata"`,
- },
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ { main_tag: 'awesome' },
+ { main_tag: 'awesome' },
+ { main_tag: 'awesome' },
+ ])
})
- const results = await query.execute()
+ it(`should execute a query with column->0${jsonOperator}key in select clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) =>
+ eb
+ .ref('experience', jsonOperator)
+ .at(0)
+ .key('establishment')
+ .as('establishment'),
+ )
- expect(results).to.containSubset([
- { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
- { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
- { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
- ])
- })
+ testSql(query, dialect, {
+ postgres: {
+ parameters: [],
+ sql: `select "experience"->0->'establishment' as "establishment" from "person_metadata"`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "experience"->0->>'establishment' as "establishment" from "person_metadata"`,
+ },
+ })
+
+ const results = await query.execute()
- if (dialect === 'postgres') {
- it('should execute a query with column->-1 in select clause', async () => {
+ expect(results).to.containSubset([
+ { establishment: 'The University of Life' },
+ { establishment: 'The University of Life' },
+ { establishment: 'The University of Life' },
+ ])
+ })
+
+ it(`should execute a query with column->0${jsonOperator}0 in select clause`, async () => {
const query = ctx.db
.selectFrom('person_metadata')
- .select((eb) => eb.ref('nicknames', '->').at(-1).as('nickname'))
+ .select((eb) =>
+ eb
+ .ref('schedule', jsonOperator)
+ .at(0)
+ .at(0)
+ .as('january_1st_schedule'),
+ )
testSql(query, dialect, {
postgres: {
parameters: [],
- sql: `select "nicknames"->-1 as "nickname" from "person_metadata"`,
+ sql: `select "schedule"->0->0 as "january_1st_schedule" from "person_metadata"`,
},
mysql: NOT_SUPPORTED,
mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select "schedule"->0->>0 as "january_1st_schedule" from "person_metadata"`,
+ },
})
const results = await query.execute()
expect(results).to.containSubset([
- { nickname: 'Aniston the Magnificent' },
- { nickname: 'Schwarzenegger the Magnificent' },
- { nickname: 'Stallone the Magnificent' },
+ { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
+ { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
+ { january_1st_schedule: [{ name: 'Gym', time: '12:15' }] },
])
})
- }
- const expectedBooleanValue = dialect === 'postgres' ? true : 1
-
- it(`should execute a query with column${jsonOperator} in select clause with non-string properties`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .select((eb) => [
- eb
- .ref('profile', jsonOperator)
- .key('auth')
- .key('is_verified')
- .as('is_verified'),
- eb
- .ref('profile', jsonOperator)
- .key('auth')
- .key('login_count')
- .as('login_count'),
- eb.ref('profile', jsonOperator).key('avatar').as('avatar'),
- ])
+ if (sqlSpec === 'postgres') {
+ it('should execute a query with column->-1 in select clause', async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) => eb.ref('nicknames', '->').at(-1).as('nickname'))
+
+ testSql(query, dialect, {
+ postgres: {
+ parameters: [],
+ sql: `select "nicknames"->-1 as "nickname" from "person_metadata"`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ { nickname: 'Aniston the Magnificent' },
+ { nickname: 'Schwarzenegger the Magnificent' },
+ { nickname: 'Stallone the Magnificent' },
+ ])
+ })
+ }
- const results = await query.execute()
+ const expectedBooleanValue = sqlSpec === 'postgres' ? true : 1
- expect(results).to.containSubset([
- {
- is_verified: expectedBooleanValue,
- login_count: 12,
- avatar: null,
- },
- ])
- })
+ it(`should execute a query with column${jsonOperator} in select clause with non-string properties`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .select((eb) => [
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('is_verified')
+ .as('is_verified'),
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('login_count')
+ .as('login_count'),
+ eb.ref('profile', jsonOperator).key('avatar').as('avatar'),
+ ])
- it(`should execute a query with column->key${jsonOperator}key in where clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .where((eb) =>
- eb(
- eb.ref('profile', jsonOperator).key('auth').key('login_count'),
- '=',
- 12,
- ),
- )
- .selectAll()
-
- testSql(query, dialect, {
- postgres: {
- parameters: [12],
- sql: `select * from "person_metadata" where "profile"->'auth'->'login_count' = $1`,
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [12],
- sql: `select * from "person_metadata" where "profile"->'auth'->>'login_count' = ?`,
- },
+ const results = await query.execute()
+
+ expect(results).to.containSubset([
+ {
+ is_verified: expectedBooleanValue,
+ login_count: 12,
+ avatar: null,
+ },
+ ])
})
- const results = await query.execute()
+ it(`should execute a query with column->key${jsonOperator}key in where clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .where((eb) =>
+ eb(
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('login_count'),
+ '=',
+ 12,
+ ),
+ )
+ .selectAll()
- expect(results).to.have.length(1)
- expect(results[0].profile.auth.login_count).to.equal(12)
- })
+ testSql(query, dialect, {
+ postgres: {
+ parameters: [12],
+ sql: `select * from "person_metadata" where "profile"->'auth'->'login_count' = $1`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [12],
+ sql: `select * from "person_metadata" where "profile"->'auth'->>'login_count' = ?`,
+ },
+ })
- it(`should execute a query with column->key${jsonOperator}key in order by clause`, async () => {
- const query = ctx.db
- .selectFrom('person_metadata')
- .orderBy(
- (eb) =>
- eb.ref('profile', jsonOperator).key('auth').key('login_count'),
- 'desc',
- )
- .selectAll()
-
- testSql(query, dialect, {
- postgres: {
- parameters: [],
- sql: `select * from "person_metadata" order by "profile"->'auth'->'login_count' desc`,
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: {
- parameters: [],
- sql: `select * from "person_metadata" order by "profile"->'auth'->>'login_count' desc`,
- },
+ const results = await query.execute()
+
+ expect(results).to.have.length(1)
+ expect(results[0].profile.auth.login_count).to.equal(12)
})
- const results = await query.execute()
+ it(`should execute a query with column->key${jsonOperator}key in order by clause`, async () => {
+ const query = ctx.db
+ .selectFrom('person_metadata')
+ .orderBy(
+ (eb) =>
+ eb
+ .ref('profile', jsonOperator)
+ .key('auth')
+ .key('login_count'),
+ 'desc',
+ )
+ .selectAll()
- expect(results).to.have.length(3)
- expect(results[0].profile.auth.login_count).to.equal(14)
- expect(results[1].profile.auth.login_count).to.equal(13)
- expect(results[2].profile.auth.login_count).to.equal(12)
+ testSql(query, dialect, {
+ postgres: {
+ parameters: [],
+ sql: `select * from "person_metadata" order by "profile"->'auth'->'login_count' desc`,
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: {
+ parameters: [],
+ sql: `select * from "person_metadata" order by "profile"->'auth'->>'login_count' desc`,
+ },
+ })
+
+ const results = await query.execute()
+
+ expect(results).to.have.length(3)
+ expect(results[0].profile.auth.login_count).to.equal(14)
+ expect(results[1].profile.auth.login_count).to.equal(13)
+ expect(results[2].profile.auth.login_count).to.equal(12)
+ })
})
- })
- }
- })
+ }
+ })
+ }
}
-async function initJSONTest(
+async function initJSONTest(
ctx: Mocha.Context,
dialect: D,
) {
@@ -732,9 +756,9 @@ async function initJSONTest(
let db = testContext.db.withTables<{
person_metadata: {
person_id: number
- website: JSONColumnType<{ url: string }>
- nicknames: JSONColumnType
- profile: JSONColumnType<{
+ website: Json<{ url: string }>
+ nicknames: Json
+ profile: Json<{
auth: {
roles: string[]
last_login?: { device: string }
@@ -744,16 +768,16 @@ async function initJSONTest(
avatar: string | null
tags: string[]
}>
- experience: JSONColumnType<
+ experience: Json<
{
establishment: string
}[]
>
- schedule: JSONColumnType<{ name: string; time: string }[][][]>
+ schedule: Json<{ name: string; time: string }[][][]>
}
}>()
- if (dialect === 'sqlite') {
+ if (dialect.sqlSpec === 'sqlite') {
db = db.withPlugin(new ParseJSONResultsPlugin())
}
@@ -775,8 +799,8 @@ async function initJSONTest(
return { ...testContext, db }
}
-function resolveJSONColumnDataType(dialect: BuiltInDialect) {
- switch (dialect) {
+function resolveJSONColumnDataType(dialect: DialectDescriptor) {
+ switch (dialect.sqlSpec) {
case 'postgres':
return 'jsonb'
case 'mysql':
@@ -798,20 +822,20 @@ async function insertDefaultJSONDataSet(ctx: TestContext) {
await ctx.db
.insertInto('person_metadata')
- .values(
+ .values((eb) =>
people
.filter((person) => person.first_name && person.last_name)
.map((person, index) => ({
person_id: person.id,
- website: JSON.stringify({
+ website: eb.jval({
url: `https://www.${person.first_name!.toLowerCase()}${person.last_name!.toLowerCase()}.com`,
}),
- nicknames: JSON.stringify([
+ nicknames: eb.jval([
`${person.first_name![0]}.${person.last_name![0]}.`,
`${person.first_name} the Great`,
`${person.last_name} the Magnificent`,
]),
- profile: JSON.stringify({
+ profile: eb.jval({
tags: ['awesome'],
auth: {
roles: ['contributor', 'moderator'],
@@ -823,12 +847,12 @@ async function insertDefaultJSONDataSet(ctx: TestContext) {
},
avatar: null,
}),
- experience: JSON.stringify([
+ experience: eb.jval([
{
establishment: 'The University of Life',
},
]),
- schedule: JSON.stringify([[[{ name: 'Gym', time: '12:15' }]]]),
+ schedule: sql.jval([[[{ name: 'Gym', time: '12:15' }]]]),
})),
)
.execute()
diff --git a/test/node/src/json.test.ts b/test/node/src/json.test.ts
index 4f0933253..240a97e64 100644
--- a/test/node/src/json.test.ts
+++ b/test/node/src/json.test.ts
@@ -73,24 +73,26 @@ const jsonFunctions = {
} as const
for (const dialect of DIALECTS) {
+ const { sqlSpec, variant } = dialect
+
const { jsonArrayFrom, jsonObjectFrom, jsonBuildObject } =
- jsonFunctions[dialect]
+ jsonFunctions[sqlSpec]
- describe(`${dialect} json tests`, () => {
+ describe(`${variant}: json helpers`, () => {
let ctx: TestContext
let db: Kysely
before(async function () {
ctx = await initTest(this, dialect)
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
await ctx.db.schema
.createTable('json_table')
.ifNotExists()
.addColumn('id', 'serial', (col) => col.primaryKey())
.addColumn('data', 'jsonb')
.execute()
- } else if (dialect === 'mssql') {
+ } else if (sqlSpec === 'mssql') {
await sql`if object_id(N'json_table', N'U') is null begin create table json_table (id int primary key identity, data nvarchar(1024)); end;`.execute(
ctx.db,
)
@@ -105,7 +107,7 @@ for (const dialect of DIALECTS) {
db = ctx.db.withTables<{ json_table: JsonTable }>()
- if (dialect === 'mssql' || dialect === 'sqlite') {
+ if (sqlSpec === 'mssql' || sqlSpec === 'sqlite') {
db = db.withPlugin(new ParseJSONResultsPlugin())
}
})
@@ -158,7 +160,7 @@ for (const dialect of DIALECTS) {
expect(result.numInsertedOrUpdatedRows).to.equal(1n)
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should update json data of a row using the subscript syntax and a raw sql snippet', async () => {
await db
.insertInto('json_table')
@@ -185,7 +187,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should aggregate a joined table using json_agg', async () => {
const res = await db
.selectFrom('person')
@@ -416,7 +418,7 @@ for (const dialect of DIALECTS) {
first: eb.ref('first_name'),
last: eb.ref('last_name'),
full:
- dialect === 'sqlite'
+ sqlSpec === 'sqlite'
? sql`first_name || ' ' || last_name`
: eb.fn('concat', ['first_name', sql.lit(' '), 'last_name']),
}).as('name'),
@@ -432,7 +434,7 @@ for (const dialect of DIALECTS) {
const res = await query.execute()
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
// MySQL json_arrayagg produces an array with undefined order
// https://dev.mysql.com/doc/refman/8.0/en/aggregate-functions.html#function_json-arrayagg
res[1].pets[0].toys.sort((a, b) => a.name.localeCompare(b.name))
diff --git a/test/node/src/merge.test.ts b/test/node/src/merge.test.ts
index 136385079..b85d25618 100644
--- a/test/node/src/merge.test.ts
+++ b/test/node/src/merge.test.ts
@@ -12,352 +12,93 @@ import {
testSql,
} from './test-setup.js'
-for (const dialect of DIALECTS.filter(
- (dialect) => dialect === 'postgres' || dialect === 'mssql',
-)) {
- describe(`merge (${dialect})`, () => {
- let ctx: TestContext
-
- before(async function () {
- ctx = await initTest(this, dialect)
- })
-
- beforeEach(async () => {
- await insertDefaultDataSet(ctx)
- })
-
- afterEach(async () => {
- await clearDatabase(ctx)
- })
-
- after(async () => {
- await destroyTest(ctx)
- })
-
- describe('using', () => {
- it('should perform a merge...using table simple on...when matched then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete;',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
-
- const result = await query.executeTakeFirstOrThrow()
-
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
- })
-
- it('should add a modifyEnd clause to the query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .modifyEnd(sql.raw('-- this is a comment'))
- .whenMatched()
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete -- this is a comment',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete -- this is a comment;',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
- })
-
- it('should perform a merge...using table alias simple on alias...when matched then delete query', async () => {
- const query = ctx.db
- .mergeInto('person as pr')
- .using('pet as pt', 'pt.owner_id', 'pr.id')
- .whenMatched()
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" as "pr" using "pet" as "pt" on "pt"."owner_id" = "pr"."id" when matched then delete',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" as "pr" using "pet" as "pt" on "pt"."owner_id" = "pr"."id" when matched then delete;',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
-
- const result = await query.executeTakeFirstOrThrow()
-
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
- })
-
- it('should perform a merge...using table complex on...when matched then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', (on) =>
- on
- .onRef('pet.owner_id', '=', 'person.id')
- .on('pet.name', '=', 'Lucky'),
- )
- .whenMatched()
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" and "pet"."name" = $1 when matched then delete',
- parameters: ['Lucky'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" and "pet"."name" = @1 when matched then delete;',
- parameters: ['Lucky'],
- },
- sqlite: NOT_SUPPORTED,
- })
-
- const result = await query.executeTakeFirstOrThrow()
-
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
- })
-
- it('should perform a merge...using subquery simple on...when matched then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using(
- ctx.db
- .selectFrom('pet')
- .select('owner_id')
- .where('name', '=', 'Lucky')
- .as('pet'),
- 'pet.owner_id',
- 'person.id',
- )
- .whenMatched()
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using (select "owner_id" from "pet" where "name" = $1) as "pet" on "pet"."owner_id" = "person"."id" when matched then delete',
- parameters: ['Lucky'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using (select "owner_id" from "pet" where "name" = @1) as "pet" on "pet"."owner_id" = "person"."id" when matched then delete;',
- parameters: ['Lucky'],
- },
- sqlite: NOT_SUPPORTED,
- })
- })
- })
-
- describe('whenMatched', () => {
- it('should perform a merge...using table simple on...when matched and simple binary then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenMatchedAnd('person.gender', '=', 'female')
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and "person"."gender" = $1 then delete',
- parameters: ['female'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and "person"."gender" = @1 then delete;',
- parameters: ['female'],
- },
- sqlite: NOT_SUPPORTED,
- })
+for (const dialect of DIALECTS) {
+ const { sqlSpec, variant } = dialect
- const result = await query.executeTakeFirstOrThrow()
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
+ describe(`${variant}: merge`, () => {
+ let ctx: TestContext
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(1n)
+ before(async function () {
+ ctx = await initTest(this, dialect)
})
- it('should perform a merge...using table simple on...when matched and simple binary cross ref then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenMatchedAndRef('person.first_name', '=', 'pet.name')
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and "person"."first_name" = "pet"."name" then delete',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and "person"."first_name" = "pet"."name" then delete;',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
-
- const result = await query.executeTakeFirstOrThrow()
-
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ beforeEach(async () => {
+ await insertDefaultDataSet(ctx)
})
- it('should perform a merge...using table simple on...when matched and complex and then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenMatchedAnd((eb) =>
- eb('person.gender', '=', 'female').and(
- 'person.first_name',
- '=',
- eb.ref('pet.name'),
- ),
- )
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and ("person"."gender" = $1 and "person"."first_name" = "pet"."name") then delete',
- parameters: ['female'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and ("person"."gender" = @1 and "person"."first_name" = "pet"."name") then delete;',
- parameters: ['female'],
- },
- sqlite: NOT_SUPPORTED,
- })
-
- const result = await query.executeTakeFirstOrThrow()
-
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ afterEach(async () => {
+ await clearDatabase(ctx)
})
- it('should perform a merge...using table simple on...when matched and complex or then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenMatchedAnd((eb) =>
- eb('person.gender', '=', 'female').or(
- 'person.first_name',
- '=',
- eb.ref('pet.name'),
- ),
- )
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and ("person"."gender" = $1 or "person"."first_name" = "pet"."name") then delete',
- parameters: ['female'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and ("person"."gender" = @1 or "person"."first_name" = "pet"."name") then delete;',
- parameters: ['female'],
- },
- sqlite: NOT_SUPPORTED,
- })
-
- const result = await query.executeTakeFirstOrThrow()
-
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(1n)
+ after(async () => {
+ await destroyTest(ctx)
})
- if (dialect === 'postgres') {
- it('should perform a merge...using table...when matched then do nothing query', async () => {
+ describe('using', () => {
+ it('should perform a merge...using table simple on...when matched then delete query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
.whenMatched()
- .thenDoNothing()
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then do nothing',
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete',
parameters: [],
},
mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete;',
+ parameters: [],
+ },
sqlite: NOT_SUPPORTED,
})
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ expect(result.numChangedRows).to.equal(3n)
})
- }
- describe('update', () => {
- it('should perform a merge...using table simple on...when matched then update set object query', async () => {
+ it('should add a modifyEnd clause to the query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
+ .modifyEnd(sql.raw('-- this is a comment'))
.whenMatched()
- .thenUpdateSet({
- middle_name: 'pet owner',
- })
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = $1',
- parameters: ['pet owner'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete -- this is a comment',
+ parameters: [],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = @1;',
- parameters: ['pet owner'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete -- this is a comment;',
+ parameters: [],
},
sqlite: NOT_SUPPORTED,
})
-
- const result = await query.executeTakeFirstOrThrow()
-
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
})
- it('should perform a merge...using table simple on...when matched then update set object ref query', async () => {
+ it('should perform a merge...using table alias simple on alias...when matched then delete query', async () => {
const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
+ .mergeInto('person as pr')
+ .using('pet as pt', 'pt.owner_id', 'pr.id')
.whenMatched()
- .thenUpdateSet((eb) => ({
- first_name: eb.ref('person.last_name'),
- middle_name: eb.ref('pet.name'),
- }))
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name", "middle_name" = "pet"."name"',
+ sql: 'merge into "person" as "pr" using "pet" as "pt" on "pt"."owner_id" = "pr"."id" when matched then delete',
parameters: [],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name", "middle_name" = "pet"."name";',
+ sql: 'merge into "person" as "pr" using "pet" as "pt" on "pt"."owner_id" = "pr"."id" when matched then delete;',
parameters: [],
},
sqlite: NOT_SUPPORTED,
@@ -369,22 +110,26 @@ for (const dialect of DIALECTS.filter(
expect(result.numChangedRows).to.equal(3n)
})
- it('should perform a merge...using table simple on...when matched then update set column query', async () => {
+ it('should perform a merge...using table complex on...when matched then delete query', async () => {
const query = ctx.db
.mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
+ .using('pet', (on) =>
+ on
+ .onRef('pet.owner_id', '=', 'person.id')
+ .on('pet.name', '=', 'Lucky'),
+ )
.whenMatched()
- .thenUpdateSet('middle_name', 'pet owner')
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = $1',
- parameters: ['pet owner'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" and "pet"."name" = $1 when matched then delete',
+ parameters: ['Lucky'],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = @1;',
- parameters: ['pet owner'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" and "pet"."name" = @1 when matched then delete;',
+ parameters: ['Lucky'],
},
sqlite: NOT_SUPPORTED,
})
@@ -392,51 +137,56 @@ for (const dialect of DIALECTS.filter(
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
+ expect(result.numChangedRows).to.equal(0n)
})
- it('should perform a merge...using table simple on...when matched then update set column ref query', async () => {
+ it('should perform a merge...using subquery simple on...when matched then delete query', async () => {
const query = ctx.db
.mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
+ .using(
+ ctx.db
+ .selectFrom('pet')
+ .select('owner_id')
+ .where('name', '=', 'Lucky')
+ .as('pet'),
+ 'pet.owner_id',
+ 'person.id',
+ )
.whenMatched()
- .thenUpdateSet('first_name', (eb) => eb.ref('person.last_name'))
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name"',
- parameters: [],
+ sql: 'merge into "person" using (select "owner_id" from "pet" where "name" = $1) as "pet" on "pet"."owner_id" = "person"."id" when matched then delete',
+ parameters: ['Lucky'],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name";',
- parameters: [],
+ sql: 'merge into "person" using (select "owner_id" from "pet" where "name" = @1) as "pet" on "pet"."owner_id" = "person"."id" when matched then delete;',
+ parameters: ['Lucky'],
},
sqlite: NOT_SUPPORTED,
})
-
- const result = await query.executeTakeFirstOrThrow()
-
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
})
+ })
- it('should perform a merge...using table simple on...when matched then update set column cross ref query', async () => {
+ describe('whenMatched', () => {
+ it('should perform a merge...using table simple on...when matched and simple binary then delete query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenUpdateSet('middle_name', (eb) => eb.ref('pet.name'))
+ .whenMatchedAnd('person.gender', '=', 'female')
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = "pet"."name"',
- parameters: [],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and "person"."gender" = $1 then delete',
+ parameters: ['female'],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = "pet"."name";',
- parameters: [],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and "person"."gender" = @1 then delete;',
+ parameters: ['female'],
},
sqlite: NOT_SUPPORTED,
})
@@ -444,32 +194,25 @@ for (const dialect of DIALECTS.filter(
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
+ expect(result.numChangedRows).to.equal(1n)
})
- it('should perform a merge...using table simple on...when matched then update set complex query', async () => {
+ it('should perform a merge...using table simple on...when matched and simple binary cross ref then delete query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenUpdate((ub) =>
- ub
- .set('first_name', (eb) => eb.ref('person.last_name'))
- .set('middle_name', (eb) => eb.ref('pet.name'))
- .set({
- marital_status: 'single',
- }),
- )
+ .whenMatchedAndRef('person.first_name', '=', 'pet.name')
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name", "middle_name" = "pet"."name", "marital_status" = $1',
- parameters: ['single'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and "person"."first_name" = "pet"."name" then delete',
+ parameters: [],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name", "middle_name" = "pet"."name", "marital_status" = @1;',
- parameters: ['single'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and "person"."first_name" = "pet"."name" then delete;',
+ parameters: [],
},
sqlite: NOT_SUPPORTED,
})
@@ -477,27 +220,32 @@ for (const dialect of DIALECTS.filter(
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
+ expect(result.numChangedRows).to.equal(0n)
})
- })
- })
- describe('whenNotMatched', () => {
- if (dialect === 'postgres') {
- it('should perform a merge...using table simple on...when not matched then do nothing query', async () => {
+ it('should perform a merge...using table simple on...when matched and complex and then delete query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatched()
- .thenDoNothing()
+ .whenMatchedAnd((eb) =>
+ eb('person.gender', '=', 'female').and(
+ 'person.first_name',
+ '=',
+ eb.ref('pet.name'),
+ ),
+ )
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched then do nothing',
- parameters: [],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and ("person"."gender" = $1 and "person"."first_name" = "pet"."name") then delete',
+ parameters: ['female'],
},
mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and ("person"."gender" = @1 and "person"."first_name" = "pet"."name") then delete;',
+ parameters: ['female'],
+ },
sqlite: NOT_SUPPORTED,
})
@@ -506,34 +254,29 @@ for (const dialect of DIALECTS.filter(
expect(result).to.be.instanceOf(MergeResult)
expect(result.numChangedRows).to.equal(0n)
})
- }
- describe('insert', () => {
- it('should perform a merge...using table complex on...when not matched then insert values query', async () => {
+ it('should perform a merge...using table simple on...when matched and complex or then delete query', async () => {
const query = ctx.db
.mergeInto('person')
- .using('pet', (on) =>
- on
- .onRef('pet.owner_id', '=', 'person.id')
- .on('pet.name', '=', 'NO_SUCH_PET_NAME'),
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenMatchedAnd((eb) =>
+ eb('person.gender', '=', 'female').or(
+ 'person.first_name',
+ '=',
+ eb.ref('pet.name'),
+ ),
)
- .whenNotMatched()
- .thenInsertValues({
- gender: 'male',
- first_name: 'Dingo',
- middle_name: 'the',
- last_name: 'Dog',
- })
+ .thenDelete()
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" and "pet"."name" = $1 when not matched then insert ("gender", "first_name", "middle_name", "last_name") values ($2, $3, $4, $5)',
- parameters: ['NO_SUCH_PET_NAME', 'male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and ("person"."gender" = $1 or "person"."first_name" = "pet"."name") then delete',
+ parameters: ['female'],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" and "pet"."name" = @1 when not matched then insert ("gender", "first_name", "middle_name", "last_name") values (@2, @3, @4, @5);',
- parameters: ['NO_SUCH_PET_NAME', 'male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched and ("person"."gender" = @1 or "person"."first_name" = "pet"."name") then delete;',
+ parameters: ['female'],
},
sqlite: NOT_SUPPORTED,
})
@@ -541,31 +284,53 @@ for (const dialect of DIALECTS.filter(
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
+ expect(result.numChangedRows).to.equal(1n)
})
- describe('And', () => {
- it('should perform a merge...using table simple on...when not matched and simple binary then insert values query', async () => {
+ if (sqlSpec === 'postgres') {
+ it('should perform a merge...using table...when matched then do nothing query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedAnd('pet.name', '=', 'Dingo')
- .thenInsertValues({
- gender: 'male',
- first_name: 'Dingo',
- middle_name: 'the',
- last_name: 'Dog',
+ .whenMatched()
+ .thenDoNothing()
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then do nothing',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
+ })
+ }
+
+ describe('update', () => {
+ it('should perform a merge...using table simple on...when matched then update set object query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenUpdateSet({
+ middle_name: 'pet owner',
})
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and "pet"."name" = $1 then insert ("gender", "first_name", "middle_name", "last_name") values ($2, $3, $4, $5)',
- parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = $1',
+ parameters: ['pet owner'],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and "pet"."name" = @1 then insert ("gender", "first_name", "middle_name", "last_name") values (@2, @3, @4, @5);',
- parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = @1;',
+ parameters: ['pet owner'],
},
sqlite: NOT_SUPPORTED,
})
@@ -573,30 +338,28 @@ for (const dialect of DIALECTS.filter(
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ expect(result.numChangedRows).to.equal(3n)
})
- it('should perform a merge...using table simple on...when not matched and simple binary ref then insert values query', async () => {
+ it('should perform a merge...using table simple on...when matched then update set object ref query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedAndRef('pet.name', '=', 'pet.species')
- .thenInsertValues({
- gender: 'male',
- first_name: 'Dingo',
- middle_name: 'the',
- last_name: 'Dog',
- })
+ .whenMatched()
+ .thenUpdateSet((eb) => ({
+ first_name: eb.ref('person.last_name'),
+ middle_name: eb.ref('pet.name'),
+ }))
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and "pet"."name" = "pet"."species" then insert ("gender", "first_name", "middle_name", "last_name") values ($1, $2, $3, $4)',
- parameters: ['male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name", "middle_name" = "pet"."name"',
+ parameters: [],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and "pet"."name" = "pet"."species" then insert ("gender", "first_name", "middle_name", "last_name") values (@1, @2, @3, @4);',
- parameters: ['male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name", "middle_name" = "pet"."name";',
+ parameters: [],
},
sqlite: NOT_SUPPORTED,
})
@@ -604,36 +367,25 @@ for (const dialect of DIALECTS.filter(
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ expect(result.numChangedRows).to.equal(3n)
})
- it('should perform a merge...using table simple on...when not matched and complex and then insert values query', async () => {
+ it('should perform a merge...using table simple on...when matched then update set column query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedAnd((eb) =>
- eb('pet.name', '=', 'Dingo').and(
- 'pet.name',
- '=',
- eb.ref('pet.name'),
- ),
- )
- .thenInsertValues({
- gender: 'male',
- first_name: 'Dingo',
- middle_name: 'the',
- last_name: 'Dog',
- })
+ .whenMatched()
+ .thenUpdateSet('middle_name', 'pet owner')
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and ("pet"."name" = $1 and "pet"."name" = "pet"."name") then insert ("gender", "first_name", "middle_name", "last_name") values ($2, $3, $4, $5)',
- parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = $1',
+ parameters: ['pet owner'],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and ("pet"."name" = @1 and "pet"."name" = "pet"."name") then insert ("gender", "first_name", "middle_name", "last_name") values (@2, @3, @4, @5);',
- parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = @1;',
+ parameters: ['pet owner'],
},
sqlite: NOT_SUPPORTED,
})
@@ -641,36 +393,25 @@ for (const dialect of DIALECTS.filter(
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ expect(result.numChangedRows).to.equal(3n)
})
- it('should perform a merge...using table simple on...when not matched and complex or then insert values query', async () => {
+ it('should perform a merge...using table simple on...when matched then update set column ref query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedAnd((eb) =>
- eb('pet.name', '=', 'Dingo').or(
- 'pet.name',
- '=',
- eb.ref('pet.name'),
- ),
- )
- .thenInsertValues({
- gender: 'male',
- first_name: 'Dingo',
- middle_name: 'the',
- last_name: 'Dog',
- })
+ .whenMatched()
+ .thenUpdateSet('first_name', (eb) => eb.ref('person.last_name'))
testSql(query, dialect, {
postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and ("pet"."name" = $1 or "pet"."name" = "pet"."name") then insert ("gender", "first_name", "middle_name", "last_name") values ($2, $3, $4, $5)',
- parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name"',
+ parameters: [],
},
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and ("pet"."name" = @1 or "pet"."name" = "pet"."name") then insert ("gender", "first_name", "middle_name", "last_name") values (@2, @3, @4, @5);',
- parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name";',
+ parameters: [],
},
sqlite: NOT_SUPPORTED,
})
@@ -678,58 +419,86 @@ for (const dialect of DIALECTS.filter(
const result = await query.executeTakeFirstOrThrow()
expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ expect(result.numChangedRows).to.equal(3n)
})
- })
- it('should perform a merge...using table complex on...when not matched then insert values cross ref query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', (on) => on.on('pet.owner_id', 'is', null))
- .whenNotMatched()
- .thenInsertValues((eb) => ({
- gender: 'other',
- first_name: eb.ref('pet.name'),
- middle_name: 'the',
- last_name: eb.ref('pet.species'),
- }))
+ it('should perform a merge...using table simple on...when matched then update set column cross ref query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenUpdateSet('middle_name', (eb) => eb.ref('pet.name'))
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" is null when not matched then insert ("gender", "first_name", "middle_name", "last_name") values ($1, "pet"."name", $2, "pet"."species")',
- parameters: ['other', 'the'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" is null when not matched then insert ("gender", "first_name", "middle_name", "last_name") values (@1, "pet"."name", @2, "pet"."species");',
- parameters: ['other', 'the'],
- },
- sqlite: NOT_SUPPORTED,
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = "pet"."name"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "middle_name" = "pet"."name";',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(3n)
})
- const result = await query.executeTakeFirstOrThrow()
+ it('should perform a merge...using table simple on...when matched then update set complex query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenUpdate((ub) =>
+ ub
+ .set('first_name', (eb) => eb.ref('person.last_name'))
+ .set('middle_name', (eb) => eb.ref('pet.name'))
+ .set({
+ marital_status: 'single',
+ }),
+ )
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name", "middle_name" = "pet"."name", "marital_status" = $1',
+ parameters: ['single'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then update set "first_name" = "person"."last_name", "middle_name" = "pet"."name", "marital_status" = @1;',
+ parameters: ['single'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(3n)
+ })
})
})
- if (dialect === 'mssql') {
- describe('BySource', () => {
- it('should perform a merge...using table simple on...when not matched by source then delete query', async () => {
+ describe('whenNotMatched', () => {
+ if (sqlSpec === 'postgres') {
+ it('should perform a merge...using table simple on...when not matched then do nothing query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySource()
- .thenDelete()
+ .whenNotMatched()
+ .thenDoNothing()
testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then delete;',
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched then do nothing',
parameters: [],
},
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
sqlite: NOT_SUPPORTED,
})
@@ -738,21 +507,66 @@ for (const dialect of DIALECTS.filter(
expect(result).to.be.instanceOf(MergeResult)
expect(result.numChangedRows).to.equal(0n)
})
+ }
+
+ describe('insert', () => {
+ it('should perform a merge...using table complex on...when not matched then insert values query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', (on) =>
+ on
+ .onRef('pet.owner_id', '=', 'person.id')
+ .on('pet.name', '=', 'NO_SUCH_PET_NAME'),
+ )
+ .whenNotMatched()
+ .thenInsertValues({
+ gender: 'male',
+ first_name: 'Dingo',
+ middle_name: 'the',
+ last_name: 'Dog',
+ })
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" and "pet"."name" = $1 when not matched then insert ("gender", "first_name", "middle_name", "last_name") values ($2, $3, $4, $5)',
+ parameters: ['NO_SUCH_PET_NAME', 'male', 'Dingo', 'the', 'Dog'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" and "pet"."name" = @1 when not matched then insert ("gender", "first_name", "middle_name", "last_name") values (@2, @3, @4, @5);',
+ parameters: ['NO_SUCH_PET_NAME', 'male', 'Dingo', 'the', 'Dog'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(3n)
+ })
describe('And', () => {
- it('should perform a merge...using table simple on...when not matched by source and simple binary then delete query', async () => {
+ it('should perform a merge...using table simple on...when not matched and simple binary then insert values query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySourceAnd('person.first_name', '=', 'Jennifer')
- .thenDelete()
+ .whenNotMatchedAnd('pet.name', '=', 'Dingo')
+ .thenInsertValues({
+ gender: 'male',
+ first_name: 'Dingo',
+ middle_name: 'the',
+ last_name: 'Dog',
+ })
testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and "pet"."name" = $1 then insert ("gender", "first_name", "middle_name", "last_name") values ($2, $3, $4, $5)',
+ parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ },
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source and "person"."first_name" = @1 then delete;',
- parameters: ['Jennifer'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and "pet"."name" = @1 then insert ("gender", "first_name", "middle_name", "last_name") values (@2, @3, @4, @5);',
+ parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
},
sqlite: NOT_SUPPORTED,
})
@@ -763,23 +577,27 @@ for (const dialect of DIALECTS.filter(
expect(result.numChangedRows).to.equal(0n)
})
- it('should perform a merge...using table simple on...when not matched by source and simple binary ref then delete query', async () => {
+ it('should perform a merge...using table simple on...when not matched and simple binary ref then insert values query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySourceAndRef(
- 'person.first_name',
- '=',
- 'person.last_name',
- )
- .thenDelete()
+ .whenNotMatchedAndRef('pet.name', '=', 'pet.species')
+ .thenInsertValues({
+ gender: 'male',
+ first_name: 'Dingo',
+ middle_name: 'the',
+ last_name: 'Dog',
+ })
testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and "pet"."name" = "pet"."species" then insert ("gender", "first_name", "middle_name", "last_name") values ($1, $2, $3, $4)',
+ parameters: ['male', 'Dingo', 'the', 'Dog'],
+ },
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source and "person"."first_name" = "person"."last_name" then delete;',
- parameters: [],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and "pet"."name" = "pet"."species" then insert ("gender", "first_name", "middle_name", "last_name") values (@1, @2, @3, @4);',
+ parameters: ['male', 'Dingo', 'the', 'Dog'],
},
sqlite: NOT_SUPPORTED,
})
@@ -790,25 +608,33 @@ for (const dialect of DIALECTS.filter(
expect(result.numChangedRows).to.equal(0n)
})
- it('should perform a merge...using table simple on...when not matched by source and complex and then delete query', async () => {
+ it('should perform a merge...using table simple on...when not matched and complex and then insert values query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySourceAnd((eb) =>
- eb('person.gender', '=', 'female').and(
- 'person.first_name',
+ .whenNotMatchedAnd((eb) =>
+ eb('pet.name', '=', 'Dingo').and(
+ 'pet.name',
'=',
- eb.ref('person.last_name'),
+ eb.ref('pet.name'),
),
)
- .thenDelete()
+ .thenInsertValues({
+ gender: 'male',
+ first_name: 'Dingo',
+ middle_name: 'the',
+ last_name: 'Dog',
+ })
testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and ("pet"."name" = $1 and "pet"."name" = "pet"."name") then insert ("gender", "first_name", "middle_name", "last_name") values ($2, $3, $4, $5)',
+ parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ },
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source and ("person"."gender" = @1 and "person"."first_name" = "person"."last_name") then delete;',
- parameters: ['female'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and ("pet"."name" = @1 and "pet"."name" = "pet"."name") then insert ("gender", "first_name", "middle_name", "last_name") values (@2, @3, @4, @5);',
+ parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
},
sqlite: NOT_SUPPORTED,
})
@@ -819,25 +645,33 @@ for (const dialect of DIALECTS.filter(
expect(result.numChangedRows).to.equal(0n)
})
- it('should perform a merge...using table simple on...when not matched by source and complex or then delete query', async () => {
+ it('should perform a merge...using table simple on...when not matched and complex or then insert values query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySourceAnd((eb) =>
- eb('person.gender', '=', 'female').or(
- 'person.first_name',
+ .whenNotMatchedAnd((eb) =>
+ eb('pet.name', '=', 'Dingo').or(
+ 'pet.name',
'=',
- eb.ref('person.last_name'),
+ eb.ref('pet.name'),
),
)
- .thenDelete()
+ .thenInsertValues({
+ gender: 'male',
+ first_name: 'Dingo',
+ middle_name: 'the',
+ last_name: 'Dog',
+ })
testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and ("pet"."name" = $1 or "pet"."name" = "pet"."name") then insert ("gender", "first_name", "middle_name", "last_name") values ($2, $3, $4, $5)',
+ parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
+ },
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source and ("person"."gender" = @1 or "person"."first_name" = "person"."last_name") then delete;',
- parameters: ['female'],
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched and ("pet"."name" = @1 or "pet"."name" = "pet"."name") then insert ("gender", "first_name", "middle_name", "last_name") values (@2, @3, @4, @5);',
+ parameters: ['Dingo', 'male', 'Dingo', 'the', 'Dog'],
},
sqlite: NOT_SUPPORTED,
})
@@ -849,46 +683,52 @@ for (const dialect of DIALECTS.filter(
})
})
- describe('update', () => {
- it('should perform a merge...using table simple on...when not matched by source then update set object query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySource()
- .thenUpdateSet({
- middle_name: 'pet owner',
- })
+ it('should perform a merge...using table complex on...when not matched then insert values cross ref query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', (on) => on.on('pet.owner_id', 'is', null))
+ .whenNotMatched()
+ .thenInsertValues((eb) => ({
+ gender: 'other',
+ first_name: eb.ref('pet.name'),
+ middle_name: 'the',
+ last_name: eb.ref('pet.species'),
+ }))
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "middle_name" = @1;',
- parameters: ['pet owner'],
- },
- sqlite: NOT_SUPPORTED,
- })
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" is null when not matched then insert ("gender", "first_name", "middle_name", "last_name") values ($1, "pet"."name", $2, "pet"."species")',
+ parameters: ['other', 'the'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" is null when not matched then insert ("gender", "first_name", "middle_name", "last_name") values (@1, "pet"."name", @2, "pet"."species");',
+ parameters: ['other', 'the'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- const result = await query.executeTakeFirstOrThrow()
+ const result = await query.executeTakeFirstOrThrow()
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
- })
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(3n)
+ })
+ })
- it('should perform a merge...using table simple on...when not matched by source then update set object ref query', async () => {
+ if (sqlSpec === 'mssql') {
+ describe('BySource', () => {
+ it('should perform a merge...using table simple on...when not matched by source then delete query', async () => {
const query = ctx.db
.mergeInto('person')
.using('pet', 'pet.owner_id', 'person.id')
.whenNotMatchedBySource()
- .thenUpdateSet((eb) => ({
- first_name: eb.ref('person.last_name'),
- }))
+ .thenDelete()
testSql(query, dialect, {
postgres: NOT_SUPPORTED,
mysql: NOT_SUPPORTED,
mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "first_name" = "person"."last_name";',
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then delete;',
parameters: [],
},
sqlite: NOT_SUPPORTED,
@@ -900,422 +740,600 @@ for (const dialect of DIALECTS.filter(
expect(result.numChangedRows).to.equal(0n)
})
- it('should perform a merge...using table simple on...when not matched by source then update set column query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySource()
- .thenUpdateSet('middle_name', 'pet owner')
+ describe('And', () => {
+ it('should perform a merge...using table simple on...when not matched by source and simple binary then delete query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySourceAnd(
+ 'person.first_name',
+ '=',
+ 'Jennifer',
+ )
+ .thenDelete()
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source and "person"."first_name" = @1 then delete;',
+ parameters: ['Jennifer'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "middle_name" = @1;',
- parameters: ['pet owner'],
- },
- sqlite: NOT_SUPPORTED,
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
})
- const result = await query.executeTakeFirstOrThrow()
+ it('should perform a merge...using table simple on...when not matched by source and simple binary ref then delete query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySourceAndRef(
+ 'person.first_name',
+ '=',
+ 'person.last_name',
+ )
+ .thenDelete()
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source and "person"."first_name" = "person"."last_name" then delete;',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
- })
+ const result = await query.executeTakeFirstOrThrow()
- it('should perform a merge...using table simple on...when not matched by source then update set column ref query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySource()
- .thenUpdateSet('first_name', (eb) => eb.ref('person.last_name'))
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
+ })
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "first_name" = "person"."last_name";',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ it('should perform a merge...using table simple on...when not matched by source and complex and then delete query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySourceAnd((eb) =>
+ eb('person.gender', '=', 'female').and(
+ 'person.first_name',
+ '=',
+ eb.ref('person.last_name'),
+ ),
+ )
+ .thenDelete()
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source and ("person"."gender" = @1 and "person"."first_name" = "person"."last_name") then delete;',
+ parameters: ['female'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
})
- const result = await query.executeTakeFirstOrThrow()
+ it('should perform a merge...using table simple on...when not matched by source and complex or then delete query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySourceAnd((eb) =>
+ eb('person.gender', '=', 'female').or(
+ 'person.first_name',
+ '=',
+ eb.ref('person.last_name'),
+ ),
+ )
+ .thenDelete()
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source and ("person"."gender" = @1 or "person"."first_name" = "person"."last_name") then delete;',
+ parameters: ['female'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
+ })
})
- it('should perform a merge...using table simple on...when not matched by source then update set complex query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenNotMatchedBySource()
- .thenUpdate((ub) =>
- ub
- .set('first_name', (eb) => eb.ref('person.last_name'))
- .set({
- marital_status: 'single',
- }),
- )
+ describe('update', () => {
+ it('should perform a merge...using table simple on...when not matched by source then update set object query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySource()
+ .thenUpdateSet({
+ middle_name: 'pet owner',
+ })
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "middle_name" = @1;',
+ parameters: ['pet owner'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "first_name" = "person"."last_name", "marital_status" = @1;',
- parameters: ['single'],
- },
- sqlite: NOT_SUPPORTED,
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
})
- const result = await query.executeTakeFirstOrThrow()
+ it('should perform a merge...using table simple on...when not matched by source then update set object ref query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySource()
+ .thenUpdateSet((eb) => ({
+ first_name: eb.ref('person.last_name'),
+ }))
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "first_name" = "person"."last_name";',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(0n)
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
+ })
+
+ it('should perform a merge...using table simple on...when not matched by source then update set column query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySource()
+ .thenUpdateSet('middle_name', 'pet owner')
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "middle_name" = @1;',
+ parameters: ['pet owner'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
+ })
+
+ it('should perform a merge...using table simple on...when not matched by source then update set column ref query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySource()
+ .thenUpdateSet('first_name', (eb) =>
+ eb.ref('person.last_name'),
+ )
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "first_name" = "person"."last_name";',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
+ })
+
+ it('should perform a merge...using table simple on...when not matched by source then update set complex query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenNotMatchedBySource()
+ .thenUpdate((ub) =>
+ ub
+ .set('first_name', (eb) => eb.ref('person.last_name'))
+ .set({
+ marital_status: 'single',
+ }),
+ )
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when not matched by source then update set "first_name" = "person"."last_name", "marital_status" = @1;',
+ parameters: ['single'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
+
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(0n)
+ })
})
})
- })
- }
- })
+ }
+ })
- describe('multiple whens', () => {
- it('should perform a merge...using table simple on...when matched then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
- .whenNotMatched()
- .thenInsertValues((eb) => ({
- gender: 'other',
- first_name: eb.ref('pet.name'),
- middle_name: 'the',
- last_name: eb.ref('pet.species'),
- }))
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete when not matched then insert ("gender", "first_name", "middle_name", "last_name") values ($1, "pet"."name", $2, "pet"."species")',
- parameters: ['other', 'the'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete when not matched then insert ("gender", "first_name", "middle_name", "last_name") values (@1, "pet"."name", @2, "pet"."species");',
- parameters: ['other', 'the'],
- },
- sqlite: NOT_SUPPORTED,
- })
+ describe('multiple whens', () => {
+ it('should perform a merge...using table simple on...when matched then delete query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenDelete()
+ .whenNotMatched()
+ .thenInsertValues((eb) => ({
+ gender: 'other',
+ first_name: eb.ref('pet.name'),
+ middle_name: 'the',
+ last_name: eb.ref('pet.species'),
+ }))
- const result = await query.executeTakeFirstOrThrow()
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete when not matched then insert ("gender", "first_name", "middle_name", "last_name") values ($1, "pet"."name", $2, "pet"."species")',
+ parameters: ['other', 'the'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete when not matched then insert ("gender", "first_name", "middle_name", "last_name") values (@1, "pet"."name", @2, "pet"."species");',
+ parameters: ['other', 'the'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(3n)
- })
- })
+ const result = await query.executeTakeFirstOrThrow()
- if (dialect === 'postgres') {
- it('should perform a merge...using table simple on...when matched then delete returning id query', async () => {
- const expected = await ctx.db.selectFrom('pet').select('id').execute()
-
- const query = ctx.db
- .mergeInto('pet')
- .using('person', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
- .returning('pet.id')
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete returning "pet"."id"',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(3n)
})
+ })
- const result = await query.execute()
+ if (sqlSpec === 'postgres') {
+ it('should perform a merge...using table simple on...when matched then delete returning id query', async () => {
+ const expected = await ctx.db.selectFrom('pet').select('id').execute()
- expect(result).to.eql(expected)
- })
+ const query = ctx.db
+ .mergeInto('pet')
+ .using('person', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenDelete()
+ .returning('pet.id')
- it('should perform a merge...using table simple on...when matched then update set name returning {target}.name, {source}.first_name query', async () => {
- const query = ctx.db
- .mergeInto('pet')
- .using('person', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenUpdateSet((eb) => ({
- name: sql`${eb.ref('person.first_name')} || '''s pet'`,
- }))
- .returning([
- 'pet.name as pet_name',
- 'person.first_name as owner_name',
- ])
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete returning "pet"."id"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.execute()
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then update set "name" = "person"."first_name" || \'\'\'s pet\' returning "pet"."name" as "pet_name", "person"."first_name" as "owner_name"',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
+ expect(result).to.eql(expected)
})
- const result = await query.execute()
+ it('should perform a merge...using table simple on...when matched then update set name returning {target}.name, {source}.first_name query', async () => {
+ const query = ctx.db
+ .mergeInto('pet')
+ .using('person', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenUpdateSet((eb) => ({
+ name: sql`${eb.ref('person.first_name')} || '''s pet'`,
+ }))
+ .returning([
+ 'pet.name as pet_name',
+ 'person.first_name as owner_name',
+ ])
- expect(result).to.eql([
- { owner_name: 'Jennifer', pet_name: "Jennifer's pet" },
- { owner_name: 'Arnold', pet_name: "Arnold's pet" },
- { owner_name: 'Sylvester', pet_name: "Sylvester's pet" },
- ])
- })
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then update set "name" = "person"."first_name" || \'\'\'s pet\' returning "pet"."name" as "pet_name", "person"."first_name" as "owner_name"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.execute()
- it('should perform a merge...using table simple on...when matched then delete returning * query', async () => {
- const expected = await ctx.db
- .selectFrom('pet')
- .innerJoin('person', 'pet.owner_id', 'person.id')
- .selectAll()
- .execute()
-
- const query = ctx.db
- .mergeInto('pet')
- .using('person', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
- .returningAll()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete returning *',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
+ expect(result).to.eql([
+ { owner_name: 'Jennifer', pet_name: "Jennifer's pet" },
+ { owner_name: 'Arnold', pet_name: "Arnold's pet" },
+ { owner_name: 'Sylvester', pet_name: "Sylvester's pet" },
+ ])
})
- const result = await query.execute()
+ it('should perform a merge...using table simple on...when matched then delete returning * query', async () => {
+ const expected = await ctx.db
+ .selectFrom('pet')
+ .innerJoin('person', 'pet.owner_id', 'person.id')
+ .selectAll()
+ .execute()
- expect(result).to.eql(expected)
- })
+ const query = ctx.db
+ .mergeInto('pet')
+ .using('person', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenDelete()
+ .returningAll()
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete returning *',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
- it('should perform a merge...using table simple on...when matched then delete returning {target}.* query', async () => {
- const expected = await ctx.db.selectFrom('pet').selectAll().execute()
-
- const query = ctx.db
- .mergeInto('pet')
- .using('person', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
- .returningAll('pet')
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete returning "pet".*',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
+ const result = await query.execute()
+
+ expect(result).to.eql(expected)
})
- const result = await query.execute()
+ it('should perform a merge...using table simple on...when matched then delete returning {target}.* query', async () => {
+ const expected = await ctx.db.selectFrom('pet').selectAll().execute()
- expect(result).to.eql(expected)
- })
+ const query = ctx.db
+ .mergeInto('pet')
+ .using('person', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenDelete()
+ .returningAll('pet')
- it('should perform a merge...using table simple on...when matched then delete returning {source}.* query', async () => {
- const expected = await ctx.db
- .selectFrom('pet')
- .innerJoin('person', 'pet.owner_id', 'person.id')
- .selectAll('person')
- .execute()
-
- const query = ctx.db
- .mergeInto('pet')
- .using('person', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
- .returningAll('person')
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete returning "person".*',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
- })
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete returning "pet".*',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
- const result = await query.execute()
+ const result = await query.execute()
- expect(result).to.eql(expected)
- })
+ expect(result).to.eql(expected)
+ })
- it('should perform a merge...using table simple on...when matched then delete returning merge_action(), {target}.name', async () => {
- await ctx.db.connection().execute(async (db) => {
- await ctx.db
- .insertInto('person')
- .values({ first_name: 'Moshe', gender: 'other' })
+ it('should perform a merge...using table simple on...when matched then delete returning {source}.* query', async () => {
+ const expected = await ctx.db
+ .selectFrom('pet')
+ .innerJoin('person', 'pet.owner_id', 'person.id')
+ .selectAll('person')
.execute()
- await sql`SET session_replication_role = 'replica'`.execute(db)
- await db
- .insertInto('pet')
- .values({
- name: 'Ralph',
- owner_id: 9999,
- species: 'hamster',
- })
- .execute()
- await sql`SET session_replication_role = 'origin'`.execute(db)
- })
+ const query = ctx.db
+ .mergeInto('pet')
+ .using('person', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenDelete()
+ .returningAll('person')
- const query = ctx.db
- .mergeInto('pet')
- .using('person', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenUpdateSet(
- 'name',
- (eb) => sql`${eb.ref('person.first_name')} || '''s pet'`,
- )
- .whenNotMatched()
- .thenInsertValues((eb) => ({
- name: sql`${eb.ref('person.first_name')} || '''s pet'`,
- owner_id: eb.ref('person.id'),
- species: 'hamster',
- }))
- .whenNotMatchedBySource()
- .thenDelete()
- .returning([mergeAction().as('action'), 'pet.name'])
-
- testSql(query, dialect, {
- postgres: {
- sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then update set "name" = "person"."first_name" || \'\'\'s pet\' when not matched then insert ("name", "owner_id", "species") values ("person"."first_name" || \'\'\'s pet\', "person"."id", $1) when not matched by source then delete returning merge_action() as "action", "pet"."name"',
- parameters: ['hamster'],
- },
- mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
- sqlite: NOT_SUPPORTED,
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete returning "person".*',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.execute()
+
+ expect(result).to.eql(expected)
})
- const result = await query.execute()
+ it('should perform a merge...using table simple on...when matched then delete returning merge_action(), {target}.name', async () => {
+ await ctx.db.connection().execute(async (db) => {
+ await db
+ .insertInto('person')
+ .values({ first_name: 'Moshe', gender: 'other' })
+ .execute()
+
+ await sql`SET session_replication_role = 'replica'`.execute(db)
+ await db
+ .insertInto('pet')
+ .values({
+ name: 'Ralph',
+ owner_id: 9999,
+ species: 'hamster',
+ })
+ .execute()
+ await sql`SET session_replication_role = 'origin'`.execute(db)
+ })
- expect(result).to.eql([
- { action: 'UPDATE', name: "Jennifer's pet" },
- { action: 'UPDATE', name: "Arnold's pet" },
- { action: 'UPDATE', name: "Sylvester's pet" },
- { action: 'DELETE', name: 'Ralph' },
- { action: 'INSERT', name: "Moshe's pet" },
- ])
- })
- }
-
- if (dialect === 'mssql') {
- it('should perform a merge top...using table simple on...when matched then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .top(1)
- .using('pet', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge top(1) into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete;',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ const query = ctx.db
+ .mergeInto('pet')
+ .using('person', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenUpdateSet(
+ 'name',
+ (eb) => sql`${eb.ref('person.first_name')} || '''s pet'`,
+ )
+ .whenNotMatched()
+ .thenInsertValues((eb) => ({
+ name: sql`${eb.ref('person.first_name')} || '''s pet'`,
+ owner_id: eb.ref('person.id'),
+ species: 'hamster',
+ }))
+ .whenNotMatchedBySource()
+ .thenDelete()
+ .returning([mergeAction().as('action'), 'pet.name'])
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then update set "name" = "person"."first_name" || \'\'\'s pet\' when not matched then insert ("name", "owner_id", "species") values ("person"."first_name" || \'\'\'s pet\', "person"."id", $1) when not matched by source then delete returning merge_action() as "action", "pet"."name"',
+ parameters: ['hamster'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.execute()
+
+ expect(result).to.eql(
+ variant === 'pglite'
+ ? [
+ { action: 'UPDATE', name: "Jennifer's pet" },
+ { action: 'UPDATE', name: "Arnold's pet" },
+ { action: 'UPDATE', name: "Sylvester's pet" },
+ { action: 'INSERT', name: "Moshe's pet" },
+ { action: 'DELETE', name: 'Ralph' },
+ ]
+ : [
+ { action: 'UPDATE', name: "Jennifer's pet" },
+ { action: 'UPDATE', name: "Arnold's pet" },
+ { action: 'UPDATE', name: "Sylvester's pet" },
+ { action: 'DELETE', name: 'Ralph' },
+ { action: 'INSERT', name: "Moshe's pet" },
+ ],
+ )
})
+ }
+
+ if (sqlSpec === 'mssql') {
+ it('should perform a merge top...using table simple on...when matched then delete query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .top(1)
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenDelete()
- const result = await query.executeTakeFirstOrThrow()
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge top(1) into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete;',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(1n)
- })
+ const result = await query.executeTakeFirstOrThrow()
- it('should perform a merge top percent...using table simple on...when matched then delete query', async () => {
- const query = ctx.db
- .mergeInto('person')
- .top(50, 'percent')
- .using('pet', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge top(50) percent into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete;',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(1n)
})
- const result = await query.executeTakeFirstOrThrow()
+ it('should perform a merge top percent...using table simple on...when matched then delete query', async () => {
+ const query = ctx.db
+ .mergeInto('person')
+ .top(50, 'percent')
+ .using('pet', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenDelete()
- expect(result).to.be.instanceOf(MergeResult)
- expect(result.numChangedRows).to.equal(2n)
- })
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge top(50) percent into "person" using "pet" on "pet"."owner_id" = "person"."id" when matched then delete;',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.executeTakeFirstOrThrow()
- it('should perform a merge...using table simple on...when matched then delete output id query', async () => {
- const expected = await ctx.db.selectFrom('pet').select('id').execute()
-
- const query = ctx.db
- .mergeInto('pet')
- .using('person', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenDelete()
- .output('deleted.id')
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete output "deleted"."id";',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ expect(result).to.be.instanceOf(MergeResult)
+ expect(result.numChangedRows).to.equal(2n)
})
- const result = await query.execute()
+ it('should perform a merge...using table simple on...when matched then delete output id query', async () => {
+ const expected = await ctx.db.selectFrom('pet').select('id').execute()
- expect(result).to.eql(expected)
- })
+ const query = ctx.db
+ .mergeInto('pet')
+ .using('person', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenDelete()
+ .output('deleted.id')
+
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then delete output "deleted"."id";',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.execute()
- it('should perform a merge...using table simple on...when matched then update set name output deleted.name, inserted.name query', async () => {
- const query = ctx.db
- .mergeInto('pet')
- .using('person', 'pet.owner_id', 'person.id')
- .whenMatched()
- .thenUpdateSet((eb) => ({
- name: sql`${eb.ref('person.first_name')} + '''s pet'`,
- }))
- .output(['deleted.name as old_name', 'inserted.name as new_name'])
-
- testSql(query, dialect, {
- postgres: NOT_SUPPORTED,
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then update set "name" = "person"."first_name" + \'\'\'s pet\' output "deleted"."name" as "old_name", "inserted"."name" as "new_name";',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ expect(result).to.eql(expected)
})
- const result = await query.execute()
+ it('should perform a merge...using table simple on...when matched then update set name output deleted.name, inserted.name query', async () => {
+ const query = ctx.db
+ .mergeInto('pet')
+ .using('person', 'pet.owner_id', 'person.id')
+ .whenMatched()
+ .thenUpdateSet((eb) => ({
+ name: sql`${eb.ref('person.first_name')} + '''s pet'`,
+ }))
+ .output(['deleted.name as old_name', 'inserted.name as new_name'])
- expect(result).to.eql([
- { old_name: 'Catto', new_name: "Jennifer's pet" },
- { old_name: 'Doggo', new_name: "Arnold's pet" },
- { old_name: 'Hammo', new_name: "Sylvester's pet" },
- ])
- })
- }
- })
+ testSql(query, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'merge into "pet" using "person" on "pet"."owner_id" = "person"."id" when matched then update set "name" = "person"."first_name" + \'\'\'s pet\' output "deleted"."name" as "old_name", "inserted"."name" as "new_name";',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ const result = await query.execute()
+
+ expect(result).to.eql([
+ { old_name: 'Catto', new_name: "Jennifer's pet" },
+ { old_name: 'Doggo', new_name: "Arnold's pet" },
+ { old_name: 'Hammo', new_name: "Sylvester's pet" },
+ ])
+ })
+ }
+ })
+ }
}
diff --git a/test/node/src/migration.test.ts b/test/node/src/migration.test.ts
index bc93dce3f..ffeec7597 100644
--- a/test/node/src/migration.test.ts
+++ b/test/node/src/migration.test.ts
@@ -10,7 +10,6 @@ import {
Migrator,
NO_MIGRATIONS,
MigratorProps,
- type QueryExecutor,
type Kysely,
} from '../../../'
@@ -30,7 +29,9 @@ const CUSTOM_MIGRATION_TABLE = 'custom_migrations'
const CUSTOM_MIGRATION_LOCK_TABLE = 'custom_migrations_lock'
for (const dialect of DIALECTS) {
- describe(`${dialect}: migration`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: migration`, () => {
let ctx: TestContext
before(async function () {
@@ -790,7 +791,7 @@ for (const dialect of DIALECTS) {
expect(executedUpMethods2).to.eql(['migration2', 'migration4'])
})
- it('should not execute in transaction if disableTransactions is true', async () => {
+ it('should not execute in transaction if disableTransactions is true on the `Migrator` instance', async () => {
const [migrator, executedUpMethods] = createMigrations(['migration1'], {
disableTransactions: true,
})
@@ -806,7 +807,25 @@ for (const dialect of DIALECTS) {
expect(transactionSpy.called).to.be.false
})
- it('should execute in transaction if disableTransactions is false and transactionDdl supported', async () => {
+ it('should not execute in transaction if disableTransactions is true when calling `migrateUp`', async () => {
+ const [migrator, executedUpMethods] = createMigrations(['migration1'], {
+ disableTransactions: false,
+ })
+
+ const { results } = await migrator.migrateUp({
+ disableTransactions: true,
+ })
+
+ expect(results).to.eql([
+ { migrationName: 'migration1', direction: 'Up', status: 'Success' },
+ ])
+
+ expect(executedUpMethods).to.eql(['migration1'])
+
+ expect(transactionSpy.called).to.be.false
+ })
+
+ it('should execute in transaction if disableTransactions is false on the `Migrator` instance and transactionDdl supported', async () => {
const [migrator, executedUpMethods] = createMigrations(['migration1'], {
disableTransactions: false,
})
@@ -825,6 +844,28 @@ for (const dialect of DIALECTS) {
expect(transactionSpy.called).to.be.false
}
})
+
+ it('should execute in transaction if disableTransactions is false when calling `migrateUp` and transactionDdl supported', async () => {
+ const [migrator, executedUpMethods] = createMigrations(['migration1'], {
+ disableTransactions: true,
+ })
+
+ const { results } = await migrator.migrateUp({
+ disableTransactions: false,
+ })
+
+ expect(results).to.eql([
+ { migrationName: 'migration1', direction: 'Up', status: 'Success' },
+ ])
+
+ expect(executedUpMethods).to.eql(['migration1'])
+
+ if (ctx.db.getExecutor().adapter.supportsTransactionalDdl) {
+ expect(transactionSpy.called).to.be.true
+ } else {
+ expect(transactionSpy.called).to.be.false
+ }
+ })
})
describe('migrateDown', () => {
@@ -970,7 +1011,7 @@ for (const dialect of DIALECTS) {
})
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
describe('custom migration tables in a custom schema', () => {
it('should create custom migration tables in custom schema', async () => {
const [migrator, executedUpMethods] = createMigrations(
@@ -1018,7 +1059,7 @@ for (const dialect of DIALECTS) {
}
async function deleteMigrationTables(): Promise {
- if (dialect !== 'sqlite') {
+ if (sqlSpec !== 'sqlite') {
await ctx.db.schema
.withSchema(CUSTOM_MIGRATION_SCHEMA)
.dropTable(CUSTOM_MIGRATION_TABLE)
diff --git a/test/node/src/order-by.test.ts b/test/node/src/order-by.test.ts
index 7595c0623..6bc8152d3 100644
--- a/test/node/src/order-by.test.ts
+++ b/test/node/src/order-by.test.ts
@@ -13,7 +13,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: order by`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: order by`, () => {
let ctx: TestContext
before(async function () {
@@ -297,7 +299,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('order by nulls first', async () => {
const query = ctx.db
.selectFrom('person')
@@ -351,7 +353,7 @@ for (const dialect of DIALECTS) {
mysql: 'utf8mb4_general_ci',
mssql: 'Latin1_General_CI_AS',
sqlite: 'nocase',
- }[dialect]
+ }[sqlSpec]
const query = ctx.db
.selectFrom('person')
@@ -381,7 +383,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('order by raw expression in direction', async () => {
const query = ctx.db
.selectFrom('person')
diff --git a/test/node/src/performance.test.ts b/test/node/src/performance.test.ts
index 3059a4f38..563fb7d03 100644
--- a/test/node/src/performance.test.ts
+++ b/test/node/src/performance.test.ts
@@ -9,12 +9,14 @@ import {
DIALECTS,
} from './test-setup.js'
-if (DIALECTS.includes('postgres')) {
- describe.skip(`query builder performance`, () => {
+for (const dialect of DIALECTS) {
+ const { variant } = dialect
+
+ describe.skip(`${variant}: query builder performance`, () => {
let ctx: TestContext
before(async function () {
- ctx = await initTest(this, 'postgres')
+ ctx = await initTest(this, dialect)
})
beforeEach(async () => {
diff --git a/test/node/src/raw-query.test.ts b/test/node/src/raw-query.test.ts
index f5726682e..f1deeb363 100644
--- a/test/node/src/raw-query.test.ts
+++ b/test/node/src/raw-query.test.ts
@@ -12,7 +12,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: raw queries`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: raw queries`, () => {
let ctx: TestContext
before(async function () {
@@ -42,7 +44,11 @@ for (const dialect of DIALECTS) {
expect(result.insertId).to.equal(undefined)
expect(result.numAffectedRows).to.equal(
- dialect === 'mssql' ? 2n : undefined,
+ {
+ [variant]: undefined,
+ mssql: 2n,
+ pglite: 0n,
+ }[variant],
)
expect(result.rows).to.eql([
{ first_name: 'Arnold' },
@@ -73,7 +79,7 @@ for (const dialect of DIALECTS) {
expect(result.rows).to.eql([])
})
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should run a raw insert query', async () => {
const firstName = 'New'
const lastName = 'Personsson'
@@ -91,7 +97,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should run a raw insert query', async () => {
const firstName = 'New'
const lastName = 'Personsson'
diff --git a/test/node/src/raw-sql.test.ts b/test/node/src/raw-sql.test.ts
index 48614a7be..25c5983ad 100644
--- a/test/node/src/raw-sql.test.ts
+++ b/test/node/src/raw-sql.test.ts
@@ -18,7 +18,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: raw sql`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: raw sql`, () => {
let ctx: TestContext
before(async function () {
@@ -157,14 +159,14 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('sql.id should separate multiple arguments by dots', async () => {
const query = ctx.db
.selectFrom('person')
.selectAll()
.where(
sql`${sql.id(
- dialect === 'postgres' ? 'public' : 'dbo',
+ sqlSpec === 'postgres' ? 'public' : 'dbo',
'person',
'first_name',
)} between ${'A'} and ${'B'}`,
@@ -215,14 +217,14 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('sql.ref should support schemas and table names', async () => {
const query = ctx.db
.selectFrom('person')
.selectAll()
.where(
sql`${sql.ref(
- `${dialect === 'postgres' ? 'public' : 'dbo'}.person.first_name`,
+ `${sqlSpec === 'postgres' ? 'public' : 'dbo'}.person.first_name`,
)} between ${'A'} and ${'B'}`,
)
@@ -270,12 +272,12 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('sql.table should support schemas', async () => {
const query = ctx.db
.selectFrom(
sql`${sql.table(
- `${dialect === 'postgres' ? 'public' : 'dbo'}.person`,
+ `${sqlSpec === 'postgres' ? 'public' : 'dbo'}.person`,
)}`.as('person'),
)
.selectAll()
@@ -327,7 +329,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('second argument of sql.join should specify the separator', async () => {
const names = ['Jennifer', 'Arnold', 'Sylvester']
@@ -352,7 +354,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('CompiledQuery should support raw query with parameters', async () => {
const query = CompiledQuery.raw(
'select * from "person" where "public"."person"."first_name" between $1 and $2',
diff --git a/test/node/src/replace.test.ts b/test/node/src/replace.test.ts
index facfc1385..c5ff9c2e4 100644
--- a/test/node/src/replace.test.ts
+++ b/test/node/src/replace.test.ts
@@ -14,8 +14,10 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- if (dialect === 'mysql' || dialect === 'sqlite') {
- describe(`${dialect}: replace into`, () => {
+ const { sqlSpec, variant } = dialect
+
+ if (sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
+ describe(`${variant}: replace into`, () => {
let ctx: TestContext
before(async function () {
diff --git a/test/node/src/sanitize-identifiers.test.ts b/test/node/src/sanitize-identifiers.test.ts
index 24673672f..4005bd3a5 100644
--- a/test/node/src/sanitize-identifiers.test.ts
+++ b/test/node/src/sanitize-identifiers.test.ts
@@ -6,12 +6,13 @@ import {
TestContext,
Person,
testSql,
- NOT_SUPPORTED,
DIALECTS,
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: sanitize identifiers`, () => {
+ const { variant } = dialect
+
+ describe(`${variant}: sanitize identifiers`, () => {
let ctx: TestContext
before(async function () {
diff --git a/test/node/src/schema.test.ts b/test/node/src/schema.test.ts
index 868d0a7b7..e1f181e99 100644
--- a/test/node/src/schema.test.ts
+++ b/test/node/src/schema.test.ts
@@ -13,7 +13,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: schema`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: schema`, () => {
let ctx: TestContext
before(async function () {
@@ -31,7 +33,7 @@ for (const dialect of DIALECTS) {
})
describe('create table', () => {
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a table with all data types', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -193,7 +195,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- } else if (dialect === 'mysql') {
+ } else if (sqlSpec === 'mysql') {
it('should create a table with all data types', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -298,7 +300,7 @@ for (const dialect of DIALECTS) {
name: 'k',
})
})
- } else if (dialect === 'mssql') {
+ } else if (sqlSpec === 'mssql') {
it('should create a table with all data types', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -383,7 +385,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- } else if (dialect === 'sqlite') {
+ } else if (sqlSpec === 'sqlite') {
it('should create a table with all data types', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -515,7 +517,34 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'mysql') {
+ it('should create a table with a unique constraints using expressions', async () => {
+ const builder = ctx.db.schema
+ .createTable('test')
+ .addColumn('a', 'varchar(255)')
+ .addColumn('b', 'varchar(255)')
+ .addColumn('c', 'varchar(255)')
+ .addUniqueConstraint('a_b_unique', [
+ sql`(lower(a))`,
+ sql`(lower(b))`,
+ ])
+ .addUniqueConstraint('a_c_unique', [sql`(lower(a))`, 'c'])
+
+ testSql(builder, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ sql: 'create table `test` (`a` varchar(255), `b` varchar(255), `c` varchar(255), constraint `a_b_unique` unique ((lower(a)), (lower(b))), constraint `a_c_unique` unique ((lower(a)), `c`))',
+ parameters: [],
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await builder.execute()
+ })
+ }
+
+ if (sqlSpec === 'postgres') {
it('should create a table with a unique constraint and "nulls not distinct" option', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -630,7 +659,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a table with a deferrable primary key constraint', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -753,7 +782,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect !== 'mysql' && dialect !== 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should create a table with deferrable foreign key constraints', async () => {
await ctx.db.schema
.createTable('test2')
@@ -812,7 +841,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('should support schemas in foreign key target table', async () => {
await ctx.db.schema
.createTable('test2')
@@ -828,7 +857,7 @@ for (const dialect of DIALECTS) {
.addForeignKeyConstraint(
'foreign_key',
['a', 'b'],
- dialect === 'postgres' ? 'public.test2' : 'dbo.test2',
+ sqlSpec === 'postgres' ? 'public.test2' : 'dbo.test2',
['c', 'd'],
)
@@ -892,9 +921,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
it("should create a table if it doesn't already exist", async () => {
const builder = ctx.db.schema
@@ -1003,7 +1032,7 @@ for (const dialect of DIALECTS) {
it('should create a table with as expression and raw sql', async () => {
let rawSql = sql`select "first_name", "last_name" from "person"`
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
rawSql = sql`select \`first_name\`, \`last_name\` from \`person\``
}
@@ -1054,7 +1083,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should create a temporary table', async () => {
await ctx.db.connection().execute(async (conn) => {
const builder = conn.schema
@@ -1086,7 +1115,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a temporary table with on commit statement', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -1108,8 +1137,8 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mssql') {
- const schema = dialect === 'postgres' ? 'public' : 'dbo'
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
+ const schema = sqlSpec === 'postgres' ? 'public' : 'dbo'
it('should create a table in specific schema', async () => {
const builder = ctx.db.schema
@@ -1136,7 +1165,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a table with generated identity', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -1158,7 +1187,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a table with generated identity (by default)', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -1180,7 +1209,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a global temporary table', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -1208,7 +1237,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a table partitioned by country', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -1238,7 +1267,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should create a table partitioned by country', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -1268,7 +1297,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'sqlite') {
+ if (sqlSpec === 'sqlite') {
it('should create a strict table', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -1296,7 +1325,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should create a table while using modifiers to define columns', async () => {
const builder = ctx.db.schema
.createTable('test')
@@ -1441,7 +1470,11 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect == 'postgres' || dialect === 'mysql' || dialect === 'mssql') {
+ if (
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
+ ) {
it('should drop a table cascade', async () => {
const builder = ctx.db.schema.dropTable('test').cascade()
testSql(builder, dialect, {
@@ -1521,7 +1554,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should create an index if not exists', async () => {
await ctx.db.schema
.createIndex('test_first_name_index')
@@ -1581,7 +1614,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should create an index with a type', async () => {
const builder = ctx.db.schema
.createIndex('test_first_name_index')
@@ -1606,7 +1639,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create an index with "nulls not distinct" modifier', async () => {
const builder = ctx.db.schema
.createIndex('test_first_name_index')
@@ -1683,9 +1716,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
it('should create an index for an expression', async () => {
const builder = ctx.db.schema
@@ -1770,9 +1803,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mssql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mssql' ||
+ sqlSpec === 'sqlite'
) {
it('should create a partial index, single column', async () => {
const builder = ctx.db.schema
@@ -1832,7 +1865,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should create a partial index, multi-column, or', async () => {
const builder = ctx.db.schema
.createIndex('test_partial_index')
@@ -1881,7 +1914,7 @@ for (const dialect of DIALECTS) {
it('should drop an index', async () => {
let builder = ctx.db.schema.dropIndex('test_first_name_index')
- if (dialect === 'mysql' || dialect === 'mssql') {
+ if (sqlSpec === 'mysql' || sqlSpec === 'mssql') {
builder = builder.on('test')
}
@@ -1908,16 +1941,16 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mssql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mssql' ||
+ sqlSpec === 'sqlite'
) {
it('should drop an index if it exists', async () => {
let builder = ctx.db.schema
.dropIndex('test_first_name_index')
.ifExists()
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
builder = builder.on('test')
}
@@ -1941,7 +1974,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should drop an index cascade', async () => {
let builder = ctx.db.schema
.dropIndex('test_first_name_index')
@@ -2012,7 +2045,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should create a temporary view', async () => {
const builder = ctx.db.schema
.createView('dogs')
@@ -2038,7 +2071,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should create or replace a view', async () => {
const builder = ctx.db.schema
.createView('dogs')
@@ -2064,7 +2097,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'sqlite') {
+ if (sqlSpec === 'sqlite') {
it("should create a view if it doesn't exists", async () => {
const builder = ctx.db.schema
.createView('dogs')
@@ -2087,7 +2120,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a materialized view', async () => {
const builder = ctx.db.schema
.createView('materialized_dogs')
@@ -2113,7 +2146,7 @@ for (const dialect of DIALECTS) {
async function cleanup() {
await ctx.db.schema.dropView('dogs').ifExists().execute()
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
await ctx.db.schema
.dropView('materialized_dogs')
.materialized()
@@ -2140,7 +2173,7 @@ for (const dialect of DIALECTS) {
.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should refresh a materialized view', async () => {
const builder =
ctx.db.schema.refreshMaterializedView('materialized_dogs')
@@ -2266,7 +2299,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should drop a view cascade', async () => {
const builder = ctx.db.schema.dropView('dogs').cascade()
@@ -2312,9 +2345,9 @@ for (const dialect of DIALECTS) {
afterEach(cleanup)
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
it('should create a schema', async () => {
const builder = ctx.db.schema.createSchema('pets')
@@ -2339,7 +2372,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should create a schema if not exists', async () => {
const builder = ctx.db.schema.createSchema('pets').ifNotExists()
@@ -2370,9 +2403,9 @@ for (const dialect of DIALECTS) {
afterEach(cleanup)
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
it('should drop a schema', async () => {
await ctx.db.schema.createSchema('pets').execute()
@@ -2421,7 +2454,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should drop a schema cascade', async () => {
await ctx.db.schema.createSchema('pets').execute()
const builder = ctx.db.schema.dropSchema('pets').cascade()
@@ -2462,7 +2495,7 @@ for (const dialect of DIALECTS) {
})
describe('create type', () => {
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
beforeEach(cleanup)
afterEach(cleanup)
@@ -2491,7 +2524,7 @@ for (const dialect of DIALECTS) {
})
describe('drop type', () => {
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
beforeEach(cleanup)
afterEach(cleanup)
@@ -2572,7 +2605,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should add a column with "unique nulls not distinct" modifier', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -2612,7 +2645,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should add a unique column', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -2636,15 +2669,15 @@ for (const dialect of DIALECTS) {
expect(await getColumnMeta('test.bool_col')).to.containSubset({
name: 'bool_col',
isNullable: false,
- dataType: dialect === 'postgres' ? 'bool' : 'tinyint',
+ dataType: sqlSpec === 'postgres' ? 'bool' : 'tinyint',
})
})
}
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
it('should add multiple columns', async () => {
const builder = ctx.db.schema
@@ -2685,7 +2718,7 @@ for (const dialect of DIALECTS) {
}
})
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
describe('modify column', () => {
it('should set column data type', async () => {
const builder = ctx.db.schema
@@ -2780,12 +2813,12 @@ for (const dialect of DIALECTS) {
}
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
describe('alter column', () => {
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should set default value', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -2835,7 +2868,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('should set column data type', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -2879,7 +2912,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should add not null constraint for column', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -2922,7 +2955,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should alter multiple columns', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -2985,9 +3018,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
it('should drop multiple columns', async () => {
await ctx.db.schema
@@ -3041,9 +3074,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
describe('rename', () => {
it('should rename a table', async () => {
@@ -3070,7 +3103,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
describe('set schema', () => {
it('should rename a table', async () => {
const builder = ctx.db.schema.alterTable('test').setSchema('public')
@@ -3091,9 +3124,9 @@ for (const dialect of DIALECTS) {
}
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
describe('rename column', () => {
it('should rename a column', async () => {
@@ -3120,7 +3153,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should rename multiple columns', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -3147,9 +3180,9 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
describe('mixed column alterations', () => {
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should alter multiple columns in various ways', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -3176,7 +3209,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should alter multiple columns in various ways', async () => {
await ctx.db.schema
.alterTable('test')
@@ -3215,7 +3248,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect !== 'sqlite') {
+ if (sqlSpec !== 'sqlite') {
describe('add unique constraint', () => {
it('should add a unique constraint', async () => {
const builder = ctx.db.schema
@@ -3247,7 +3280,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should add a unique constraint with "nulls not distinct" modifier', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -3361,10 +3394,33 @@ for (const dialect of DIALECTS) {
})
}
+ if (sqlSpec === 'mysql') {
+ it('should add a unique constraint using expressions', async () => {
+ const builder = ctx.db.schema
+ .alterTable('test')
+ .addUniqueConstraint('unique_constraint', [
+ sql`(lower(varchar_col))`,
+ 'integer_col',
+ ])
+
+ testSql(builder, dialect, {
+ postgres: NOT_SUPPORTED,
+ mysql: {
+ sql: 'alter table `test` add constraint `unique_constraint` unique ((lower(varchar_col)), `integer_col`)',
+ parameters: [],
+ },
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await builder.execute()
+ })
+ }
+
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
describe('add check constraint', () => {
it('should add a check constraint', async () => {
@@ -3397,9 +3453,9 @@ for (const dialect of DIALECTS) {
}
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
describe('add foreign key constraint', () => {
it('should add a foreign key constraint', async () => {
@@ -3478,7 +3534,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should add a deferrable initially deferred foreign key constraint', async () => {
await ctx.db.schema
.createTable('test2')
@@ -3511,7 +3567,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect !== 'sqlite') {
+ if (sqlSpec !== 'sqlite') {
describe('drop constraint', () => {
it('should drop a foreign key constraint', async () => {
await ctx.db.schema.dropTable('test').execute()
@@ -3557,7 +3613,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
describe('rename constraint', () => {
it('should rename a foreign key constraint', async () => {
await ctx.db.schema.dropTable('test').execute()
@@ -3601,9 +3657,9 @@ for (const dialect of DIALECTS) {
}
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
describe('add primary key constraint', async () => {
beforeEach(() => {
@@ -3615,7 +3671,7 @@ for (const dialect of DIALECTS) {
})
afterEach(async () => {
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
await ctx.db.schema
.alterTable('test')
.dropConstraint('test_pkey')
@@ -3653,7 +3709,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should add a deferrable initially deferred primary key constraint', async () => {
const builder = ctx.db.schema
.alterTable('test')
@@ -3705,9 +3761,9 @@ for (const dialect of DIALECTS) {
}
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
describe('parse schema name', () => {
beforeEach(cleanup)
@@ -3782,7 +3838,7 @@ for (const dialect of DIALECTS) {
await builder.execute()
})
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
describe('add index', () => {
it('should add an index', async () => {
const query = ctx.db.schema
@@ -3902,7 +3958,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
describe('drop index', () => {
beforeEach(async () => {
await ctx.db.schema
diff --git a/test/node/src/select.test.ts b/test/node/src/select.test.ts
index 54d246030..122cf836b 100644
--- a/test/node/src/select.test.ts
+++ b/test/node/src/select.test.ts
@@ -19,7 +19,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: select`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: select`, () => {
let ctx: TestContext
before(async function () {
@@ -164,7 +166,7 @@ for (const dialect of DIALECTS) {
])
})
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('should select all columns of a table with a schema', async () => {
const query = ctx.db
.selectFrom('toy_schema.toy')
@@ -377,7 +379,10 @@ for (const dialect of DIALECTS) {
const counts = await query.execute()
expect(counts).to.have.length(1)
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (
+ (sqlSpec === 'postgres' && variant !== 'pglite') ||
+ sqlSpec === 'mysql'
+ ) {
expect(counts[0]).to.eql({ num_jennifers: '1' })
} else {
expect(counts[0]).to.eql({ num_jennifers: 1 })
@@ -423,13 +428,17 @@ for (const dialect of DIALECTS) {
expect(counts).to.have.length(1)
expect(counts[0]).to.eql({
- num_jennifers: dialect === 'postgres' || dialect === 'mysql' ? '1' : 1,
+ num_jennifers:
+ (sqlSpec === 'postgres' && variant !== 'pglite') ||
+ sqlSpec === 'mysql'
+ ? '1'
+ : 1,
})
})
// Raw expressions are of course supported on all dialects, but we use an
// expression that's only valid on postgres or mssql.
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('should select one field using a raw expression', async () => {
const query = ctx.db
.selectFrom('person')
@@ -466,7 +475,7 @@ for (const dialect of DIALECTS) {
it('should select multiple fields', async () => {
const fullName =
- dialect === 'mysql' || dialect === 'mssql'
+ sqlSpec === 'mysql' || sqlSpec === 'mssql'
? sql`concat(first_name, ' ', last_name)`
: sql`first_name || ' ' || last_name`
@@ -653,7 +662,7 @@ for (const dialect of DIALECTS) {
expect(persons).to.eql([{ gender: 'female' }, { gender: 'male' }])
})
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('should select a row for update', async () => {
const query = ctx.db
.selectFrom('person')
@@ -788,7 +797,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should select with distinct on', async () => {
const query = ctx.db
.selectFrom('person')
@@ -969,7 +978,7 @@ for (const dialect of DIALECTS) {
expect(persons).to.eql([{ gender: 'female' }, { gender: 'male' }])
})
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('modifyEnd should add arbitrary SQL to the end of the query', async () => {
const query = ctx.db
.selectFrom('person')
@@ -997,36 +1006,41 @@ for (const dialect of DIALECTS) {
})
}
- it('should stream results', async () => {
- const males: unknown[] = []
+ if (variant !== 'pglite') {
+ it('should stream results', async () => {
+ const males: unknown[] = []
- const stream = ctx.db
- .selectFrom('person')
- .select(['first_name', 'last_name', 'gender'])
- .where('gender', '=', 'male')
- .orderBy('first_name')
- .stream()
+ const stream = ctx.db
+ .selectFrom('person')
+ .select(['first_name', 'last_name', 'gender'])
+ .where('gender', '=', 'male')
+ .orderBy('first_name')
+ .stream()
- for await (const male of stream) {
- males.push(male)
- }
+ for await (const male of stream) {
+ males.push(male)
+ }
- expect(males).to.have.length(2)
- expect(males).to.eql([
- {
- first_name: 'Arnold',
- last_name: 'Schwarzenegger',
- gender: 'male',
- },
- {
- first_name: 'Sylvester',
- last_name: 'Stallone',
- gender: 'male',
- },
- ])
- })
+ expect(males).to.have.length(2)
+ expect(males).to.eql([
+ {
+ first_name: 'Arnold',
+ last_name: 'Schwarzenegger',
+ gender: 'male',
+ },
+ {
+ first_name: 'Sylvester',
+ last_name: 'Stallone',
+ gender: 'male',
+ },
+ ])
+ })
+ }
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (
+ (sqlSpec === 'postgres' && variant !== 'pglite') ||
+ sqlSpec === 'mssql'
+ ) {
it('should stream results with a specific chunk size', async () => {
const males: unknown[] = []
@@ -1057,27 +1071,29 @@ for (const dialect of DIALECTS) {
})
}
- it('should release connection on premature async iterator stop', async () => {
- for (let i = 0; i <= POOL_SIZE + 1; i++) {
- const stream = ctx.db.selectFrom('person').selectAll().stream()
+ if (variant !== 'pglite') {
+ it('should release connection on premature async iterator stop', async () => {
+ for (let i = 0; i <= POOL_SIZE + 1; i++) {
+ const stream = ctx.db.selectFrom('person').selectAll().stream()
- for await (const _ of stream) {
- break
+ for await (const _ of stream) {
+ break
+ }
}
- }
- })
+ })
- it('should release connection on premature async iterator stop when using a specific chunk size', async () => {
- for (let i = 0; i <= POOL_SIZE + 1; i++) {
- const stream = ctx.db.selectFrom('person').selectAll().stream(1)
+ it('should release connection on premature async iterator stop when using a specific chunk size', async () => {
+ for (let i = 0; i <= POOL_SIZE + 1; i++) {
+ const stream = ctx.db.selectFrom('person').selectAll().stream(1)
- for await (const _ of stream) {
- break
+ for await (const _ of stream) {
+ break
+ }
}
- }
- })
+ })
+ }
- if (dialect === 'postgres') {
+ if (variant === 'postgres') {
it('should throw an error if the cursor implementation is not provided for the postgres dialect', async () => {
const db = new Kysely({
dialect: new PostgresDialect({
@@ -1102,7 +1118,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
it('should create a select query with limit and offset', async () => {
const query = ctx.db
.selectFrom('person')
@@ -1184,7 +1200,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a select query with limit null', async () => {
const query = ctx.db
.selectFrom('person')
@@ -1238,7 +1254,7 @@ for (const dialect of DIALECTS) {
const result = await query.execute()
expect(result).to.have.length(1)
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
// For some weird reason, MySQL returns `one` as a string.
expect(result[0]).to.eql({ person_first_name: 'Arnold' })
} else {
@@ -1246,7 +1262,7 @@ for (const dialect of DIALECTS) {
}
})
- if (dialect === 'postgres' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
it('should create a select query with order by, offset and fetch', async () => {
const query = ctx.db
.selectFrom('person')
@@ -1298,7 +1314,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a select query with order by, offset and fetch with ties', async () => {
const query = ctx.db
.selectFrom('person')
@@ -1321,7 +1337,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should create a select query with top', async () => {
const query = ctx.db.selectFrom('person').select('first_name').top(2)
diff --git a/test/node/src/set-operation.test.ts b/test/node/src/set-operation.test.ts
index 3c7f7abec..1b077629c 100644
--- a/test/node/src/set-operation.test.ts
+++ b/test/node/src/set-operation.test.ts
@@ -11,7 +11,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: set operations`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: set operations`, () => {
let ctx: TestContext
before(async function () {
@@ -153,7 +155,7 @@ for (const dialect of DIALECTS) {
])
})
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'mssql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql' || sqlSpec === 'mssql') {
it('should combine three select queries using union and an expression builder', async () => {
const query = ctx.db
.selectFrom('person')
@@ -263,7 +265,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres' || dialect === 'mssql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql' || sqlSpec === 'sqlite') {
it('should combine two select queries using intersect', async () => {
const query = ctx.db
.selectFrom('person')
@@ -346,7 +348,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should combine two select queries using intersect all', async () => {
const query = ctx.db
.selectFrom('person')
diff --git a/test/node/src/sql-injection.test.ts b/test/node/src/sql-injection.test.ts
index 6ff8a3070..0c0a8e00d 100644
--- a/test/node/src/sql-injection.test.ts
+++ b/test/node/src/sql-injection.test.ts
@@ -3,9 +3,11 @@ import { sql } from '../../../'
import { destroyTest, DIALECTS, initTest, TestContext } from './test-setup'
for (const dialect of DIALECTS) {
- describe(`${dialect}: select`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: select`, () => {
let ctx: TestContext
- const identifierWrapper = dialect === 'mysql' ? '`' : '"'
+ const identifierWrapper = sqlSpec === 'mysql' ? '`' : '"'
before(async function () {
ctx = await initTest(this, dialect)
diff --git a/test/node/src/stream.test.ts b/test/node/src/stream.test.ts
index 427ee1152..a3cc7eb56 100644
--- a/test/node/src/stream.test.ts
+++ b/test/node/src/stream.test.ts
@@ -12,30 +12,30 @@ import { Request } from 'tedious'
import { SinonSandbox, SinonSpy, createSandbox } from 'sinon'
const CHUNK_SIZE = 10
-const DIALECT = 'mssql'
const ITEM_COUNT = 100
const TABLE = 'stream_test'
-if (DIALECTS.includes(DIALECT)) {
- describe(`mssql: stream`, () => {
- let sandbox: SinonSandbox
- let pauseSpy: SinonSpy
- let resumeSpy: SinonSpy
- let ctx: Omit & {
- db: Kysely } }>
- }
+for (const dialect of DIALECTS) {
+ if (dialect.variant === 'mssql') {
+ describe(`mssql: stream`, () => {
+ let sandbox: SinonSandbox
+ let pauseSpy: SinonSpy
+ let resumeSpy: SinonSpy
+ let ctx: Omit & {
+ db: Kysely } }>
+ }
- before(async function () {
- sandbox = createSandbox()
- pauseSpy = sandbox.spy(Request.prototype, 'pause')
- resumeSpy = sandbox.spy(Request.prototype, 'resume')
+ before(async function () {
+ sandbox = createSandbox()
+ pauseSpy = sandbox.spy(Request.prototype, 'pause')
+ resumeSpy = sandbox.spy(Request.prototype, 'resume')
- ctx = (await initTest(this, DIALECT)) as any
- try {
- await ctx.db.schema.dropTable(TABLE).execute()
- } catch (err) {}
- await createTableWithId(ctx.db.schema, DIALECT, TABLE).execute()
- await sql`
+ ctx = (await initTest(this, dialect)) as any
+ try {
+ await ctx.db.schema.dropTable(TABLE).execute()
+ } catch (err) {}
+ await createTableWithId(ctx.db.schema, dialect, TABLE).execute()
+ await sql`
set identity_insert ${sql.table(TABLE)} on;
with cteNums(n) AS (
SELECT 1
@@ -48,24 +48,25 @@ if (DIALECTS.includes(DIALECT)) {
OPTION (MAXRECURSION ${sql.lit(ITEM_COUNT)});
set identity_insert ${sql.table(TABLE)} off
`.execute(ctx.db)
- })
+ })
- after(async () => {
- await ctx.db.schema.dropTable(TABLE).execute()
- await destroyTest(ctx as any)
- sandbox.restore()
- })
+ after(async () => {
+ await ctx.db.schema.dropTable(TABLE).execute()
+ await destroyTest(ctx as any)
+ sandbox.restore()
+ })
- it('should pause/resume the request according to chunk size', async () => {
- for await (const _ of ctx.db
- .selectFrom(TABLE)
- .selectAll()
- .stream(CHUNK_SIZE));
+ it('should pause/resume the request according to chunk size', async () => {
+ for await (const _ of ctx.db
+ .selectFrom(TABLE)
+ .selectAll()
+ .stream(CHUNK_SIZE));
- const chunks = Math.ceil(ITEM_COUNT / CHUNK_SIZE)
+ const chunks = Math.ceil(ITEM_COUNT / CHUNK_SIZE)
- expect(pauseSpy.callCount).to.equal(chunks)
- expect(resumeSpy.callCount).to.equal(chunks + 1)
+ expect(pauseSpy.callCount).to.equal(chunks)
+ expect(resumeSpy.callCount).to.equal(chunks + 1)
+ })
})
- })
+ }
}
diff --git a/test/node/src/test-setup.ts b/test/node/src/test-setup.ts
index b23853f61..2812fb198 100644
--- a/test/node/src/test-setup.ts
+++ b/test/node/src/test-setup.ts
@@ -7,6 +7,7 @@ import * as Database from 'better-sqlite3'
import * as Tarn from 'tarn'
import * as Tedious from 'tedious'
import { PoolOptions } from 'mysql2'
+import { PGlite } from '@electric-sql/pglite'
chai.use(chaiAsPromised)
@@ -33,6 +34,7 @@ import {
InsertObject,
MssqlDialect,
SelectQueryBuilder,
+ PGliteDialect,
} from '../../../'
import {
OrderByDirection,
@@ -84,22 +86,36 @@ interface PetInsertParams extends Omit {
}
export interface TestContext {
- dialect: BuiltInDialect
+ dialect: DialectDescriptor
config: KyselyConfig
db: Kysely
}
-export type BuiltInDialect = 'postgres' | 'mysql' | 'mssql' | 'sqlite'
-export type PerDialect = Record
+export type SQLSpec = 'postgres' | 'mysql' | 'mssql' | 'sqlite'
-export const DIALECTS: BuiltInDialect[] = (
- ['postgres', 'mysql', 'mssql', 'sqlite'] as const
+export type DialectVariant = SQLSpec | 'pglite'
+
+export interface DialectDescriptor {
+ sqlSpec: SQLSpec
+ variant: DialectVariant
+}
+
+export type PerDialectVariant = Record
+export type PerSQLDialect = Record
+
+export const DIALECTS = (
+ [
+ { sqlSpec: 'postgres', variant: 'postgres' },
+ { sqlSpec: 'mysql', variant: 'mysql' },
+ { sqlSpec: 'mssql', variant: 'mssql' },
+ { sqlSpec: 'sqlite', variant: 'sqlite' },
+ { sqlSpec: 'postgres', variant: 'pglite' },
+ ] as const satisfies readonly DialectDescriptor[]
).filter(
- (d) =>
- !process.env.DIALECTS ||
- process.env.DIALECTS.split(',')
+ ({ variant }) =>
+ process.env.DIALECTS?.split(',')
.map((it) => it.trim())
- .includes(d),
+ .includes(variant) ?? true,
)
const TEST_INIT_TIMEOUT = 5 * 60 * 1000
@@ -161,14 +177,17 @@ const SQLITE_CONFIG = {
databasePath: ':memory:',
}
+const PGLITE_CONFIG = {}
+
export const DIALECT_CONFIGS = {
postgres: POSTGRES_CONFIG,
mysql: MYSQL_CONFIG,
mssql: MSSQL_CONFIG,
sqlite: SQLITE_CONFIG,
+ pglite: PGLITE_CONFIG,
}
-export const DB_CONFIGS: PerDialect = {
+export const DB_CONFIGS: PerDialectVariant = {
postgres: {
dialect: new PostgresDialect({
pool: async () => new Pool(DIALECT_CONFIGS.postgres),
@@ -213,14 +232,21 @@ export const DB_CONFIGS: PerDialect = {
}),
plugins: PLUGINS,
},
+
+ pglite: {
+ dialect: new PGliteDialect({
+ pglite: async () => new PGlite(DIALECT_CONFIGS.pglite),
+ }),
+ plugins: PLUGINS,
+ },
}
export async function initTest(
ctx: Mocha.Context,
- dialect: BuiltInDialect,
+ dialect: DialectDescriptor,
overrides?: Omit,
): Promise {
- const config = DB_CONFIGS[dialect]
+ const config = DB_CONFIGS[dialect.variant]
ctx.timeout(TEST_INIT_TIMEOUT)
const db = await connect({ ...config, ...overrides })
@@ -230,7 +256,10 @@ export async function initTest(
}
export async function destroyTest(ctx: TestContext): Promise {
- await dropDatabase(ctx.db)
+ if (ctx.dialect.variant !== 'pglite' && ctx.dialect.variant !== 'sqlite') {
+ await dropDatabase(ctx.db)
+ }
+
await ctx.db.destroy()
}
@@ -288,10 +317,20 @@ export async function clearDatabase(ctx: TestContext): Promise {
export function testSql(
query: Compilable,
- dialect: BuiltInDialect,
- expectedPerDialect: PerDialect<{ sql: string | string[]; parameters: any[] }>,
+ dialect: DialectDescriptor,
+ expectedPerDialect: PerSQLDialect<{
+ sql: string | string[]
+ parameters: any[]
+ }> &
+ Partial<
+ Omit<
+ PerDialectVariant<{ sql: string | string[]; parameters: any[] }>,
+ keyof PerSQLDialect
+ >
+ >,
): void {
- const expected = expectedPerDialect[dialect]
+ const expected =
+ expectedPerDialect[dialect.variant] || expectedPerDialect[dialect.sqlSpec]
const expectedSql = Array.isArray(expected.sql)
? expected.sql.map((it) => it.trim()).join(' ')
: expected.sql
@@ -303,9 +342,13 @@ export function testSql(
async function createDatabase(
db: Kysely,
- dialect: BuiltInDialect,
+ dialect: DialectDescriptor,
): Promise {
- await dropDatabase(db)
+ const { sqlSpec, variant } = dialect
+
+ if (variant !== 'pglite' && variant !== 'sqlite') {
+ await dropDatabase(db)
+ }
await createTableWithId(db.schema, dialect, 'person')
.addColumn('first_name', 'varchar(255)')
@@ -328,14 +371,14 @@ async function createDatabase(
.addColumn('name', 'varchar(255)', (col) => col.notNull())
.addColumn('pet_id', 'integer', (col) => col.references('pet.id').notNull())
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
await createToyTableBase
.addColumn('price', 'double precision', (col) => col.notNull())
.execute()
await sql`COMMENT ON COLUMN toy.price IS 'Price in USD';`.execute(db)
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
await createToyTableBase
.addColumn('price', 'double precision', (col) => col.notNull())
.execute()
@@ -344,7 +387,7 @@ async function createDatabase(
)
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
await createToyTableBase
.addColumn('price', 'double precision', (col) =>
col.notNull().modifyEnd(sql`comment ${sql.lit('Price in USD')}`),
@@ -352,7 +395,7 @@ async function createDatabase(
.execute()
}
- if (dialect === 'sqlite') {
+ if (sqlSpec === 'sqlite') {
// there is no way to add a comment
await createToyTableBase
.addColumn('price', 'double precision', (col) => col.notNull())
@@ -368,24 +411,24 @@ async function createDatabase(
export function createTableWithId(
schema: SchemaModule,
- dialect: BuiltInDialect,
+ dialect: DialectDescriptor,
tableName: string,
implicitIncrement: boolean = false,
) {
const builder = schema.createTable(tableName)
- if (dialect === 'postgres') {
+ if (dialect.sqlSpec === 'postgres') {
return builder.addColumn('id', 'serial', (col) => col.primaryKey())
}
- if (dialect === 'mssql') {
+ if (dialect.sqlSpec === 'mssql') {
return builder.addColumn('id', 'integer', (col) =>
col.identity().notNull().primaryKey(),
)
}
return builder.addColumn('id', 'integer', (col) => {
- if (implicitIncrement && dialect === 'sqlite') {
+ if (implicitIncrement && dialect.sqlSpec === 'sqlite') {
return col.primaryKey()
}
return col.autoIncrement().primaryKey()
@@ -460,13 +503,13 @@ export async function insert(
): Promise {
const { dialect } = ctx
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (dialect.sqlSpec === 'postgres' || dialect.sqlSpec === 'sqlite') {
const { id } = await qb.returning('id').executeTakeFirstOrThrow()
return id
}
- if (dialect === 'mssql') {
+ if (dialect.sqlSpec === 'mssql') {
const { id } = await qb
.output('inserted.id' as any)
.$castTo<{ id: number }>()
@@ -502,10 +545,10 @@ function sleep(millis: number): Promise {
export function limit>(
limit: number,
- dialect: BuiltInDialect,
+ dialect: DialectDescriptor,
): (qb: QB) => QB {
return (qb) => {
- if (dialect === 'mssql') {
+ if (dialect.sqlSpec === 'mssql') {
return qb.top(limit) as QB
}
@@ -518,10 +561,10 @@ export function orderBy>(
? OrderByExpression
: never,
direction: OrderByDirection | undefined,
- dialect: BuiltInDialect,
+ dialect: DialectDescriptor,
): (qb: QB) => QB {
return (qb) => {
- if (dialect === 'mssql') {
+ if (dialect.sqlSpec === 'mssql') {
return qb.orderBy(
orderBy,
sql`${sql.raw(direction ? `${direction} ` : '')}${sql.raw(
diff --git a/test/node/src/transaction.test.ts b/test/node/src/transaction.test.ts
index ee7059c8f..40c510bb6 100644
--- a/test/node/src/transaction.test.ts
+++ b/test/node/src/transaction.test.ts
@@ -16,7 +16,9 @@ import { DatabaseError as PostgresError } from 'pg'
import { SqliteError } from 'better-sqlite3'
for (const dialect of DIALECTS) {
- describe(`${dialect}: transaction`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: transaction`, () => {
let ctx: TestContext
const executedQueries: CompiledQuery[] = []
const sandbox = sinon.createSandbox()
@@ -61,13 +63,17 @@ for (const dialect of DIALECTS) {
await destroyTest(ctx)
})
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'mssql') {
+ if (
+ (sqlSpec === 'postgres' && variant !== 'pglite') ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
+ ) {
for (const isolationLevel of [
'read uncommitted',
'read committed',
'repeatable read',
'serializable',
- ...(dialect === 'mssql' ? (['snapshot'] as const) : []),
+ ...(sqlSpec === 'mssql' ? (['snapshot'] as const) : []),
] as const) {
it(`should set the transaction isolation level as "${isolationLevel}"`, async () => {
await ctx.db
@@ -84,7 +90,7 @@ for (const dialect of DIALECTS) {
.execute()
})
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
expect(tediousBeginTransactionSpy.calledOnce).to.be.true
expect(tediousBeginTransactionSpy.getCall(0).args[1]).to.not.be
.undefined
@@ -132,13 +138,16 @@ for (const dialect of DIALECTS) {
parameters: ['Foo', 'Barson', 'male'],
},
],
- }[dialect],
+ }[sqlSpec],
)
})
}
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (
+ (sqlSpec === 'postgres' && variant !== 'pglite') ||
+ sqlSpec === 'mysql'
+ ) {
for (const accessMode of TRANSACTION_ACCESS_MODES) {
it(`should set the transaction access mode as "${accessMode}"`, async () => {
await ctx.db
@@ -166,13 +175,13 @@ for (const dialect of DIALECTS) {
{ sql: 'select * from `person`', parameters: [] },
{ sql: 'commit', parameters: [] },
],
- }[dialect],
+ }[sqlSpec],
)
})
}
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should be able to start a transaction with a single connection', async () => {
const result = await ctx.db.connection().execute((db) => {
return db.transaction().execute((trx) => {
@@ -239,9 +248,9 @@ for (const dialect of DIALECTS) {
expect.fail('Expected transaction to fail')
} catch (error) {
- if (dialect === 'sqlite') {
+ if (sqlSpec === 'sqlite') {
expect(error).to.be.instanceOf(SqliteError)
- } else if (dialect === 'postgres') {
+ } else if (sqlSpec === 'postgres' && variant !== 'pglite') {
expect(error).to.be.instanceOf(PostgresError)
}
@@ -274,7 +283,7 @@ for (const dialect of DIALECTS) {
gender: 'other',
})
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
const compiledQuery = query.compile()
await trx.executeQuery(
diff --git a/test/node/src/update.test.ts b/test/node/src/update.test.ts
index 1997e787a..fe2f5ad39 100644
--- a/test/node/src/update.test.ts
+++ b/test/node/src/update.test.ts
@@ -15,7 +15,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: update`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: update`, () => {
let ctx: TestContext
before(async function () {
@@ -63,7 +65,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(UpdateResult)
expect(result.numUpdatedRows).to.equal(1n)
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
expect(result.numChangedRows).to.equal(1n)
} else {
expect(result.numChangedRows).to.undefined
@@ -114,7 +116,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(UpdateResult)
expect(result.numUpdatedRows).to.equal(1n)
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
expect(result.numChangedRows).to.equal(1n)
} else {
expect(result.numChangedRows).to.undefined
@@ -136,7 +138,7 @@ for (const dialect of DIALECTS) {
// mssql doesn't support table aliases in update clause, but it does support this
// with update alias set ... from table_name as alias
- if (dialect === 'postgres' || dialect === 'mysql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql' || sqlSpec === 'sqlite') {
it('should update one row with table alias', async () => {
const query = ctx.db
.updateTable('person as p')
@@ -163,7 +165,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(UpdateResult)
expect(result.numUpdatedRows).to.equal(1n)
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
expect(result.numChangedRows).to.equal(1n)
} else {
expect(result.numChangedRows).to.undefined
@@ -184,7 +186,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should update one row with table alias in from clause', async () => {
const query = ctx.db
.updateTable('p' as 'person')
@@ -257,7 +259,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(UpdateResult)
expect(result.numUpdatedRows).to.equal(1n)
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
expect(result.numChangedRows).to.equal(1n)
} else {
expect(result.numChangedRows).to.undefined
@@ -272,7 +274,7 @@ for (const dialect of DIALECTS) {
expect(person.last_name).to.equal('Catto')
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should update one row using an expression', async () => {
const query = ctx.db
.updateTable('person')
@@ -338,7 +340,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(UpdateResult)
expect(result.numUpdatedRows).to.equal(1n)
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
expect(result.numChangedRows).to.equal(1n)
} else {
expect(result.numChangedRows).to.undefined
@@ -382,7 +384,7 @@ for (const dialect of DIALECTS) {
expect(result).to.be.instanceOf(UpdateResult)
expect(result.numUpdatedRows).to.equal(1n)
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
expect(result.numChangedRows).to.equal(1n)
} else {
expect(result.numChangedRows).to.undefined
@@ -400,7 +402,7 @@ for (const dialect of DIALECTS) {
})
})
- if (dialect === 'postgres' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'sqlite') {
it('should update some rows and return updated rows when `returning` is used', async () => {
const query = ctx.db
.updateTable('person')
@@ -478,7 +480,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should update some rows and return joined rows when `returningAll` is used', async () => {
const query = ctx.db
.updateTable('person')
@@ -506,7 +508,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres' && variant !== 'pglite') {
it('should update multiple rows and stream returned results', async () => {
const stream = ctx.db
.updateTable('person')
@@ -531,7 +533,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mysql') {
+ if (sqlSpec === 'mysql') {
it('should update but not change the row', async () => {
const query = ctx.db
.updateTable('person')
@@ -747,7 +749,7 @@ for (const dialect of DIALECTS) {
}
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should update using a from clause and a join', async () => {
const query = ctx.db
.updateTable('pet as p')
@@ -783,7 +785,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mysql') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mysql') {
it('modifyEnd should add arbitrary SQL to the end of the query', async () => {
const query = ctx.db
.updateTable('person')
@@ -812,7 +814,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should update using a from clause and a join', async () => {
const query = ctx.db
.updateTable('p' as 'pet')
@@ -887,7 +889,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'mssql') {
+ if (sqlSpec === 'mssql') {
it('should update some rows and return updated rows when `output` is used', async () => {
const query = ctx.db
.updateTable('person')
diff --git a/test/node/src/where.test.ts b/test/node/src/where.test.ts
index d67a494e1..12fe9b0da 100644
--- a/test/node/src/where.test.ts
+++ b/test/node/src/where.test.ts
@@ -13,7 +13,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: where`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: where`, () => {
let ctx: TestContext
before(async function () {
@@ -274,9 +276,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
it('a raw instance and a boolean value', async () => {
const query = ctx.db
@@ -351,9 +353,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
it('a boolean subquery', async () => {
const query = ctx.db
@@ -514,9 +516,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
it('a `where in` query with tuples', async () => {
const query = ctx.db
@@ -922,9 +924,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'sqlite'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'sqlite'
) {
it('case expression', async () => {
const query = ctx.db
@@ -990,9 +992,9 @@ for (const dialect of DIALECTS) {
})
if (
- dialect === 'postgres' ||
- dialect === 'mysql' ||
- dialect === 'mssql'
+ sqlSpec === 'postgres' ||
+ sqlSpec === 'mysql' ||
+ sqlSpec === 'mssql'
) {
it('subquery inside `any` operator', async () => {
await ctx.db
diff --git a/test/node/src/with-schema.test.ts b/test/node/src/with-schema.test.ts
index 55e30889e..f5cae8236 100644
--- a/test/node/src/with-schema.test.ts
+++ b/test/node/src/with-schema.test.ts
@@ -11,592 +11,603 @@ import {
limit,
} from './test-setup.js'
-for (const dialect of DIALECTS.filter(
- (dialect) => dialect === 'postgres' || dialect === 'mssql',
-)) {
- describe(`${dialect}: with schema`, () => {
- let ctx: TestContext
-
- before(async function () {
- ctx = await initTest(this, dialect)
- await dropTables()
- await createTables()
- })
+for (const dialect of DIALECTS) {
+ const { sqlSpec, variant } = dialect
- beforeEach(async () => {
- const personId = await insert(
- ctx,
- ctx.db.insertInto('person').values({
- first_name: 'Foo',
- last_name: 'Bar',
- gender: 'other',
- }),
- )
-
- await ctx.db
- .withSchema('mammals')
- .insertInto('pet')
- .values({
- name: 'Catto',
- owner_id: personId,
- species: 'cat',
- })
- .execute()
- })
-
- afterEach(async () => {
- await ctx.db.withSchema('mammals').deleteFrom('pet').execute()
- await clearDatabase(ctx)
- })
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql') {
+ describe(`${variant}: with schema`, () => {
+ let ctx: TestContext
- after(async () => {
- await dropTables()
- await destroyTest(ctx)
- })
+ before(async function () {
+ ctx = await initTest(this, dialect)
+ await dropTables()
+ await createTables()
+ })
- describe('select from', () => {
- it('should add schema', async () => {
- const query = ctx.db.withSchema('mammals').selectFrom('pet').selectAll()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'select * from "mammals"."pet"',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'select * from "mammals"."pet"',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
+ beforeEach(async () => {
+ const personId = await insert(
+ ctx,
+ ctx.db.insertInto('person').values({
+ first_name: 'Foo',
+ last_name: 'Bar',
+ gender: 'other',
+ }),
+ )
- await query.execute()
+ await ctx.db
+ .withSchema('mammals')
+ .insertInto('pet')
+ .values({
+ name: 'Catto',
+ owner_id: personId,
+ species: 'cat',
+ })
+ .execute()
})
- it('should add schema for joins', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .selectFrom('pet as p')
- .leftJoin('pet', 'pet.id', 'p.id')
- .selectAll()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'select * from "mammals"."pet" as "p" left join "mammals"."pet" on "mammals"."pet"."id" = "p"."id"',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'select * from "mammals"."pet" as "p" left join "mammals"."pet" on "mammals"."pet"."id" = "p"."id"',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
+ afterEach(async () => {
+ await ctx.db.withSchema('mammals').deleteFrom('pet').execute()
+ await clearDatabase(ctx)
+ })
- await query.execute()
+ after(async () => {
+ await dropTables()
+ await destroyTest(ctx)
})
- it('should add schema for aliased joins', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .selectFrom('pet as p1')
- .leftJoin('pet as p2', 'p1.id', 'p2.id')
- .selectAll()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'select * from "mammals"."pet" as "p1" left join "mammals"."pet" as "p2" on "p1"."id" = "p2"."id"',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'select * from "mammals"."pet" as "p1" left join "mammals"."pet" as "p2" on "p1"."id" = "p2"."id"',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
+ describe('select from', () => {
+ it('should add schema', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .selectFrom('pet')
+ .selectAll()
- await query.execute()
- })
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'select * from "mammals"."pet"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'select * from "mammals"."pet"',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- it('should not add schema for aliases', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .selectFrom('pet as p')
- .select('p.name')
-
- testSql(query, dialect, {
- postgres: {
- sql: 'select "p"."name" from "mammals"."pet" as "p"',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'select "p"."name" from "mammals"."pet" as "p"',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ await query.execute()
})
- await query.execute()
- })
+ it('should add schema for joins', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .selectFrom('pet as p')
+ .leftJoin('pet', 'pet.id', 'p.id')
+ .selectAll()
- it('should add schema for subqueries', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .selectFrom('pet')
- .select([
- 'pet.name',
- (qb) =>
- qb
- .selectFrom('pet as p')
- .select('name')
- .whereRef('p.id', '=', 'pet.id')
- .as('p_name'),
- ])
-
- testSql(query, dialect, {
- postgres: {
- sql: [
- 'select "mammals"."pet"."name",',
- '(select "name" from "mammals"."pet" as "p" where "p"."id" = "mammals"."pet"."id") as "p_name"',
- 'from "mammals"."pet"',
- ],
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: [
- 'select "mammals"."pet"."name",',
- '(select "name" from "mammals"."pet" as "p" where "p"."id" = "mammals"."pet"."id") as "p_name"',
- 'from "mammals"."pet"',
- ],
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'select * from "mammals"."pet" as "p" left join "mammals"."pet" on "mammals"."pet"."id" = "p"."id"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'select * from "mammals"."pet" as "p" left join "mammals"."pet" on "mammals"."pet"."id" = "p"."id"',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
})
- await query.execute()
- })
+ it('should add schema for aliased joins', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .selectFrom('pet as p1')
+ .leftJoin('pet as p2', 'p1.id', 'p2.id')
+ .selectAll()
- it('subqueries should use their own schema if specified', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .selectFrom('pet')
- .select([
- 'pet.name',
- (qb) =>
- qb
- .withSchema(dialect === 'postgres' ? 'public' : 'dbo')
- .selectFrom('person')
- .select('first_name')
- .whereRef('pet.owner_id', '=', 'person.id')
- .as('owner_first_name'),
- ])
-
- testSql(query, dialect, {
- postgres: {
- sql: [
- 'select "mammals"."pet"."name",',
- '(select "first_name" from "public"."person" where "mammals"."pet"."owner_id" = "public"."person"."id") as "owner_first_name"',
- 'from "mammals"."pet"',
- ],
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: [
- 'select "mammals"."pet"."name",',
- '(select "first_name" from "dbo"."person" where "mammals"."pet"."owner_id" = "dbo"."person"."id") as "owner_first_name"',
- 'from "mammals"."pet"',
- ],
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'select * from "mammals"."pet" as "p1" left join "mammals"."pet" as "p2" on "p1"."id" = "p2"."id"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'select * from "mammals"."pet" as "p1" left join "mammals"."pet" as "p2" on "p1"."id" = "p2"."id"',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
})
- await query.execute()
- })
+ it('should not add schema for aliases', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .selectFrom('pet as p')
+ .select('p.name')
- if (dialect === 'postgres') {
- it('should not add schema for json_agg parameters', async () => {
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'select "p"."name" from "mammals"."pet" as "p"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'select "p"."name" from "mammals"."pet" as "p"',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
+ })
+
+ it('should add schema for subqueries', async () => {
const query = ctx.db
.withSchema('mammals')
.selectFrom('pet')
- .select((eb) => [
- eb.fn.jsonAgg('pet').as('one'),
- eb.fn.jsonAgg(eb.table('pet')).as('two'),
- eb.fn.jsonAgg('pet').orderBy('pet.name', 'desc').as('three'),
+ .select([
+ 'pet.name',
+ (qb) =>
+ qb
+ .selectFrom('pet as p')
+ .select('name')
+ .whereRef('p.id', '=', 'pet.id')
+ .as('p_name'),
])
testSql(query, dialect, {
postgres: {
- sql: 'select json_agg("pet") as "one", json_agg("pet") as "two", json_agg("pet" order by "mammals"."pet"."name" desc) as "three" from "mammals"."pet"',
+ sql: [
+ 'select "mammals"."pet"."name",',
+ '(select "name" from "mammals"."pet" as "p" where "p"."id" = "mammals"."pet"."id") as "p_name"',
+ 'from "mammals"."pet"',
+ ],
parameters: [],
},
mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
+ mssql: {
+ sql: [
+ 'select "mammals"."pet"."name",',
+ '(select "name" from "mammals"."pet" as "p" where "p"."id" = "mammals"."pet"."id") as "p_name"',
+ 'from "mammals"."pet"',
+ ],
+ parameters: [],
+ },
sqlite: NOT_SUPPORTED,
})
await query.execute()
})
- it('should not add schema for to_json parameters', async () => {
+ it('subqueries should use their own schema if specified', async () => {
const query = ctx.db
.withSchema('mammals')
.selectFrom('pet')
- .select((eb) => [
- eb.fn.toJson('pet').as('one'),
- eb.fn.toJson(eb.table('pet')).as('two'),
+ .select([
+ 'pet.name',
+ (qb) =>
+ qb
+ .withSchema(sqlSpec === 'postgres' ? 'public' : 'dbo')
+ .selectFrom('person')
+ .select('first_name')
+ .whereRef('pet.owner_id', '=', 'person.id')
+ .as('owner_first_name'),
])
testSql(query, dialect, {
postgres: {
- sql: 'select to_json("pet") as "one", to_json("pet") as "two" from "mammals"."pet"',
+ sql: [
+ 'select "mammals"."pet"."name",',
+ '(select "first_name" from "public"."person" where "mammals"."pet"."owner_id" = "public"."person"."id") as "owner_first_name"',
+ 'from "mammals"."pet"',
+ ],
parameters: [],
},
mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
+ mssql: {
+ sql: [
+ 'select "mammals"."pet"."name",',
+ '(select "first_name" from "dbo"."person" where "mammals"."pet"."owner_id" = "dbo"."person"."id") as "owner_first_name"',
+ 'from "mammals"."pet"',
+ ],
+ parameters: [],
+ },
sqlite: NOT_SUPPORTED,
})
await query.execute()
})
- }
- })
-
- describe('insert into', () => {
- it('should add schema', async () => {
- const [anyPerson] = await ctx.db
- .selectFrom('person')
- .selectAll()
- .$call(limit(1, dialect))
- .execute()
- const query = ctx.db
- .withSchema('mammals')
- .insertInto('pet')
- .values({
- name: 'Doggo',
- species: 'dog',
- owner_id: anyPerson.id,
+ if (sqlSpec === 'postgres') {
+ it('should not add schema for json_agg parameters', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .selectFrom('pet')
+ .select((eb) => [
+ eb.fn.jsonAgg('pet').as('one'),
+ eb.fn.jsonAgg(eb.table('pet')).as('two'),
+ eb.fn.jsonAgg('pet').orderBy('pet.name', 'desc').as('three'),
+ ])
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'select json_agg("pet") as "one", json_agg("pet") as "two", json_agg("pet" order by "mammals"."pet"."name" desc) as "three" from "mammals"."pet"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
})
- .$call((qb) => (dialect === 'postgres' ? qb.returning('pet.id') : qb))
-
- testSql(query, dialect, {
- postgres: {
- sql: 'insert into "mammals"."pet" ("name", "species", "owner_id") values ($1, $2, $3) returning "mammals"."pet"."id"',
- parameters: ['Doggo', 'dog', anyPerson.id],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'insert into "mammals"."pet" ("name", "species", "owner_id") values (@1, @2, @3)',
- parameters: ['Doggo', 'dog', anyPerson.id],
- },
- sqlite: NOT_SUPPORTED,
- })
- await query.execute()
+ it('should not add schema for to_json parameters', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .selectFrom('pet')
+ .select((eb) => [
+ eb.fn.toJson('pet').as('one'),
+ eb.fn.toJson(eb.table('pet')).as('two'),
+ ])
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'select to_json("pet") as "one", to_json("pet") as "two" from "mammals"."pet"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
+ })
+ }
})
- })
- describe('delete from', () => {
- it('should add schema', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .deleteFrom('pet')
- .where('pet.name', '=', 'Doggo')
-
- testSql(query, dialect, {
- postgres: {
- sql: 'delete from "mammals"."pet" where "mammals"."pet"."name" = $1',
- parameters: ['Doggo'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'delete from "mammals"."pet" where "mammals"."pet"."name" = @1',
- parameters: ['Doggo'],
- },
- sqlite: NOT_SUPPORTED,
- })
+ describe('insert into', () => {
+ it('should add schema', async () => {
+ const [anyPerson] = await ctx.db
+ .selectFrom('person')
+ .selectAll()
+ .$call(limit(1, dialect))
+ .execute()
- await query.execute()
- })
- })
+ const query = ctx.db
+ .withSchema('mammals')
+ .insertInto('pet')
+ .values({
+ name: 'Doggo',
+ species: 'dog',
+ owner_id: anyPerson.id,
+ })
+ .$call((qb) =>
+ sqlSpec === 'postgres' ? qb.returning('pet.id') : qb,
+ )
- describe('update', () => {
- it('should add schema', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .updateTable('pet')
- .where('pet.name', '=', 'Doggo')
- .set({ species: 'cat' })
-
- testSql(query, dialect, {
- postgres: {
- sql: 'update "mammals"."pet" set "species" = $1 where "mammals"."pet"."name" = $2',
- parameters: ['cat', 'Doggo'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'update "mammals"."pet" set "species" = @1 where "mammals"."pet"."name" = @2',
- parameters: ['cat', 'Doggo'],
- },
- sqlite: NOT_SUPPORTED,
- })
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'insert into "mammals"."pet" ("name", "species", "owner_id") values ($1, $2, $3) returning "mammals"."pet"."id"',
+ parameters: ['Doggo', 'dog', anyPerson.id],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'insert into "mammals"."pet" ("name", "species", "owner_id") values (@1, @2, @3)',
+ parameters: ['Doggo', 'dog', anyPerson.id],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- await query.execute()
+ await query.execute()
+ })
})
- })
- describe('with', () => {
- it('should not add schema for common table expression names', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .with('doggo', (db) =>
- db.selectFrom('pet').where('pet.name', '=', 'Doggo').selectAll(),
- )
- .selectFrom('doggo')
- .selectAll()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'with "doggo" as (select * from "mammals"."pet" where "mammals"."pet"."name" = $1) select * from "doggo"',
- parameters: ['Doggo'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'with "doggo" as (select * from "mammals"."pet" where "mammals"."pet"."name" = @1) select * from "doggo"',
- parameters: ['Doggo'],
- },
- sqlite: NOT_SUPPORTED,
- })
+ describe('delete from', () => {
+ it('should add schema', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .deleteFrom('pet')
+ .where('pet.name', '=', 'Doggo')
- await query.execute()
- })
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'delete from "mammals"."pet" where "mammals"."pet"."name" = $1',
+ parameters: ['Doggo'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'delete from "mammals"."pet" where "mammals"."pet"."name" = @1',
+ parameters: ['Doggo'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
- it('should not add schema for common table expression names in subqueries', async () => {
- const query = ctx.db
- .withSchema('mammals')
- .with('doggo', (qb) =>
- qb.selectFrom('pet').where('name', '=', 'Doggo').select('pet.id'),
- )
- .selectFrom('pet')
- .select((eb) => [
- 'pet.id',
- eb.selectFrom('doggo').select('id').as('doggo_id'),
- ])
- .selectAll()
-
- testSql(query, dialect, {
- postgres: {
- sql: 'with "doggo" as (select "mammals"."pet"."id" from "mammals"."pet" where "name" = $1) select "mammals"."pet"."id", (select "id" from "doggo") as "doggo_id", * from "mammals"."pet"',
- parameters: ['Doggo'],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'with "doggo" as (select "mammals"."pet"."id" from "mammals"."pet" where "name" = @1) select "mammals"."pet"."id", (select "id" from "doggo") as "doggo_id", * from "mammals"."pet"',
- parameters: ['Doggo'],
- },
- sqlite: NOT_SUPPORTED,
+ await query.execute()
})
-
- await query.execute()
})
- })
- describe('create table', () => {
- afterEach(async () => {
- await ctx.db.schema
- .withSchema('mammals')
- .dropTable('foo')
- .ifExists()
- .execute()
+ describe('update', () => {
+ it('should add schema', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .updateTable('pet')
+ .where('pet.name', '=', 'Doggo')
+ .set({ species: 'cat' })
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'update "mammals"."pet" set "species" = $1 where "mammals"."pet"."name" = $2',
+ parameters: ['cat', 'Doggo'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'update "mammals"."pet" set "species" = @1 where "mammals"."pet"."name" = @2',
+ parameters: ['cat', 'Doggo'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
+ })
})
- it('should add schema for references', async () => {
- const query = ctx.db.schema
- .withSchema('mammals')
- .createTable('foo')
- .addColumn('bar', 'integer', (col) => col.references('pet.id'))
-
- testSql(query, dialect, {
- postgres: {
- sql: 'create table "mammals"."foo" ("bar" integer references "mammals"."pet" ("id"))',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'create table "mammals"."foo" ("bar" integer references "mammals"."pet" ("id"))',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ describe('with', () => {
+ it('should not add schema for common table expression names', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .with('doggo', (db) =>
+ db.selectFrom('pet').where('pet.name', '=', 'Doggo').selectAll(),
+ )
+ .selectFrom('doggo')
+ .selectAll()
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'with "doggo" as (select * from "mammals"."pet" where "mammals"."pet"."name" = $1) select * from "doggo"',
+ parameters: ['Doggo'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'with "doggo" as (select * from "mammals"."pet" where "mammals"."pet"."name" = @1) select * from "doggo"',
+ parameters: ['Doggo'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
})
- await query.execute()
- })
- })
+ it('should not add schema for common table expression names in subqueries', async () => {
+ const query = ctx.db
+ .withSchema('mammals')
+ .with('doggo', (qb) =>
+ qb.selectFrom('pet').where('name', '=', 'Doggo').select('pet.id'),
+ )
+ .selectFrom('pet')
+ .select((eb) => [
+ 'pet.id',
+ eb.selectFrom('doggo').select('id').as('doggo_id'),
+ ])
+ .selectAll()
- describe('create index', () => {
- afterEach(async () => {
- await ctx.db.schema
- .withSchema('mammals')
- .dropIndex('pet_id_index')
- .ifExists()
- .execute()
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'with "doggo" as (select "mammals"."pet"."id" from "mammals"."pet" where "name" = $1) select "mammals"."pet"."id", (select "id" from "doggo") as "doggo_id", * from "mammals"."pet"',
+ parameters: ['Doggo'],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'with "doggo" as (select "mammals"."pet"."id" from "mammals"."pet" where "name" = @1) select "mammals"."pet"."id", (select "id" from "doggo") as "doggo_id", * from "mammals"."pet"',
+ parameters: ['Doggo'],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
+ })
})
- it('should not add schema for created index', async () => {
- const query = ctx.db.schema
- .withSchema('mammals')
- .createIndex('pet_id_index')
- .column('id')
- .on('pet')
-
- testSql(query, dialect, {
- postgres: {
- sql: 'create index "pet_id_index" on "mammals"."pet" ("id")',
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: 'create index "pet_id_index" on "mammals"."pet" ("id")',
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
+ describe('create table', () => {
+ afterEach(async () => {
+ await ctx.db.schema
+ .withSchema('mammals')
+ .dropTable('foo')
+ .ifExists()
+ .execute()
})
- await query.execute()
+ it('should add schema for references', async () => {
+ const query = ctx.db.schema
+ .withSchema('mammals')
+ .createTable('foo')
+ .addColumn('bar', 'integer', (col) => col.references('pet.id'))
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'create table "mammals"."foo" ("bar" integer references "mammals"."pet" ("id"))',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'create table "mammals"."foo" ("bar" integer references "mammals"."pet" ("id"))',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
+ })
})
- })
- if (dialect === 'postgres') {
- describe('drop index', () => {
- beforeEach(async () => {
+ describe('create index', () => {
+ afterEach(async () => {
await ctx.db.schema
+ .withSchema('mammals')
+ .dropIndex('pet_id_index')
+ .ifExists()
+ .execute()
+ })
+
+ it('should not add schema for created index', async () => {
+ const query = ctx.db.schema
.withSchema('mammals')
.createIndex('pet_id_index')
.column('id')
.on('pet')
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'create index "pet_id_index" on "mammals"."pet" ("id")',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: 'create index "pet_id_index" on "mammals"."pet" ("id")',
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
+ })
+ })
+
+ if (sqlSpec === 'postgres') {
+ describe('drop index', () => {
+ beforeEach(async () => {
+ await ctx.db.schema
+ .withSchema('mammals')
+ .createIndex('pet_id_index')
+ .column('id')
+ .on('pet')
+ .execute()
+ })
+
+ it('should add schema for dropped index', async () => {
+ const query = ctx.db.schema
+ .withSchema('mammals')
+ .dropIndex('pet_id_index')
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: 'drop index "mammals"."pet_id_index"',
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: NOT_SUPPORTED,
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
+ })
+ })
+ }
+
+ describe('create view', () => {
+ afterEach(async () => {
+ await ctx.db.schema
+ .withSchema('mammals')
+ .dropView('dogs')
+ .ifExists()
.execute()
})
- it('should add schema for dropped index', async () => {
+ it('should add schema for created view', async () => {
const query = ctx.db.schema
.withSchema('mammals')
- .dropIndex('pet_id_index')
+ .createView('dogs')
+ .as(
+ ctx.db.selectFrom('pet').where('species', '=', 'dog').selectAll(),
+ )
testSql(query, dialect, {
postgres: {
- sql: 'drop index "mammals"."pet_id_index"',
+ sql: `create view "mammals"."dogs" as select * from "mammals"."pet" where "species" = 'dog'`,
parameters: [],
},
mysql: NOT_SUPPORTED,
- mssql: NOT_SUPPORTED,
+ mssql: {
+ sql: `create view "mammals"."dogs" as select * from "mammals"."pet" where "species" = 'dog'`,
+ parameters: [],
+ },
sqlite: NOT_SUPPORTED,
})
await query.execute()
})
})
- }
- describe('create view', () => {
- afterEach(async () => {
+ describe('drop view', () => {
+ beforeEach(async () => {
+ await ctx.db.schema
+ .withSchema('mammals')
+ .createView('dogs')
+ .as(
+ ctx.db.selectFrom('pet').where('species', '=', 'dog').selectAll(),
+ )
+ .execute()
+ })
+
+ it('should add schema for dropped view', async () => {
+ const query = ctx.db.schema.withSchema('mammals').dropView('dogs')
+
+ testSql(query, dialect, {
+ postgres: {
+ sql: `drop view "mammals"."dogs"`,
+ parameters: [],
+ },
+ mysql: NOT_SUPPORTED,
+ mssql: {
+ sql: `drop view "mammals"."dogs"`,
+ parameters: [],
+ },
+ sqlite: NOT_SUPPORTED,
+ })
+
+ await query.execute()
+ })
+ })
+
+ async function createTables(): Promise {
await ctx.db.schema
- .withSchema('mammals')
- .dropView('dogs')
- .ifExists()
+ .createSchema('mammals')
+ .$call((qb) => (sqlSpec === 'postgres' ? qb.ifNotExists() : qb))
.execute()
- })
- it('should add schema for created view', async () => {
- const query = ctx.db.schema
- .withSchema('mammals')
- .createView('dogs')
- .as(ctx.db.selectFrom('pet').where('species', '=', 'dog').selectAll())
-
- testSql(query, dialect, {
- postgres: {
- sql: `create view "mammals"."dogs" as select * from "mammals"."pet" where "species" = 'dog'`,
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: `create view "mammals"."dogs" as select * from "mammals"."pet" where "species" = 'dog'`,
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
+ const table = createTableWithId(
+ ctx.db.schema.withSchema('mammals'),
+ dialect,
+ 'pet',
+ )
- await query.execute()
- })
- })
+ await table
+ .addColumn('name', 'varchar(50)', (col) => col.unique())
+ .addColumn('owner_id', 'integer', (col) =>
+ col
+ .references(
+ sqlSpec === 'postgres' ? 'public.person.id' : 'dbo.person.id',
+ )
+ .onDelete('cascade'),
+ )
+ .addColumn('species', 'varchar(50)')
+ .execute()
+ }
- describe('drop view', () => {
- beforeEach(async () => {
+ async function dropTables(): Promise {
await ctx.db.schema
.withSchema('mammals')
- .createView('dogs')
- .as(ctx.db.selectFrom('pet').where('species', '=', 'dog').selectAll())
+ .dropTable('pet')
+ .ifExists()
.execute()
- })
- it('should add schema for dropped view', async () => {
- const query = ctx.db.schema.withSchema('mammals').dropView('dogs')
-
- testSql(query, dialect, {
- postgres: {
- sql: `drop view "mammals"."dogs"`,
- parameters: [],
- },
- mysql: NOT_SUPPORTED,
- mssql: {
- sql: `drop view "mammals"."dogs"`,
- parameters: [],
- },
- sqlite: NOT_SUPPORTED,
- })
-
- await query.execute()
- })
+ await ctx.db.schema.dropSchema('mammals').ifExists().execute()
+ }
})
-
- async function createTables(): Promise {
- await ctx.db.schema
- .createSchema('mammals')
- .$call((qb) => (dialect === 'postgres' ? qb.ifNotExists() : qb))
- .execute()
-
- const table = createTableWithId(
- ctx.db.schema.withSchema('mammals'),
- dialect,
- 'pet',
- )
-
- await table
- .addColumn('name', 'varchar(50)', (col) => col.unique())
- .addColumn('owner_id', 'integer', (col) =>
- col
- .references(
- dialect === 'postgres' ? 'public.person.id' : 'dbo.person.id',
- )
- .onDelete('cascade'),
- )
- .addColumn('species', 'varchar(50)')
- .execute()
- }
-
- async function dropTables(): Promise {
- await ctx.db.schema
- .withSchema('mammals')
- .dropTable('pet')
- .ifExists()
- .execute()
-
- await ctx.db.schema.dropSchema('mammals').ifExists().execute()
- }
- })
+ }
}
diff --git a/test/node/src/with.test.ts b/test/node/src/with.test.ts
index a45c5c64d..c106d744e 100644
--- a/test/node/src/with.test.ts
+++ b/test/node/src/with.test.ts
@@ -13,7 +13,9 @@ import {
} from './test-setup.js'
for (const dialect of DIALECTS) {
- describe(`${dialect}: with`, () => {
+ const { sqlSpec, variant } = dialect
+
+ describe(`${variant}: with`, () => {
let ctx: TestContext
before(async function () {
@@ -120,7 +122,7 @@ for (const dialect of DIALECTS) {
await query.execute()
})
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('recursive common table expressions can refer to themselves', async () => {
await ctx.db.transaction().execute(async (trx) => {
// Create a temporary table that gets dropped when the transaction ends.
@@ -246,7 +248,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres' || dialect === 'mssql' || dialect === 'sqlite') {
+ if (sqlSpec === 'postgres' || sqlSpec === 'mssql' || sqlSpec === 'sqlite') {
it('should create an insert query with common table expressions', async () => {
const query = ctx.db
.with('jennifer', (db) =>
@@ -282,7 +284,7 @@ for (const dialect of DIALECTS) {
})
}
- if (dialect === 'postgres') {
+ if (sqlSpec === 'postgres') {
it('should create a with query where CTEs are inserts updates and deletes', async () => {
const query = ctx.db
.with('deleted_arnold', (db) =>
diff --git a/test/typings/shared.d.ts b/test/typings/shared.d.ts
index 9a7bc1afd..741453df2 100644
--- a/test/typings/shared.d.ts
+++ b/test/typings/shared.d.ts
@@ -1,9 +1,4 @@
-import {
- ColumnType,
- Generated,
- GeneratedAlways,
- JSONColumnType,
-} from '../../dist/cjs'
+import { ColumnType, Generated, GeneratedAlways, Json } from '../../dist/cjs'
export interface Pet {
id: Generated
@@ -71,21 +66,21 @@ export interface Person {
export interface PersonMetadata {
id: Generated
person_id: number
- website: JSONColumnType<{ url: string }>
- nicknames: JSONColumnType
- profile: JSONColumnType<{
+ website: Json<{ url: string }>
+ nicknames: Json
+ profile: Json<{
auth: {
roles: string[]
last_login?: { device: string }
}
tags: string[]
}>
- experience: JSONColumnType<
+ experience: Json<
{
establishment: string
}[]
>
- schedule: JSONColumnType<{ name: string; time: string }[][][]>
- record: JSONColumnType>
- array: JSONColumnType>
+ schedule: Json<{ name: string; time: string }[][][]>
+ record: Json>
+ array: Json | null>
}
diff --git a/test/typings/test-d/insert.test-d.ts b/test/typings/test-d/insert.test-d.ts
index e8f049ba1..4b0483149 100644
--- a/test/typings/test-d/insert.test-d.ts
+++ b/test/typings/test-d/insert.test-d.ts
@@ -1,5 +1,5 @@
import { expectError, expectType } from 'tsd'
-import { InsertResult, Kysely, sql } from '..'
+import { ExpressionBuilder, InsertObject, InsertResult, Kysely, sql } from '..'
import { Database } from '../shared'
async function testInsert(db: Kysely) {
@@ -268,3 +268,126 @@ async function testOutput(db: Kysely) {
expectError(db.insertInto('person').output('deleted.age').values(person))
expectError(db.insertInto('person').outputAll('deleted').values(person))
}
+
+async function testjval(db: Kysely) {
+ const getValues = <
+ O extends Partial>,
+ >(
+ { jval }: ExpressionBuilder,
+ overrides?: O,
+ ) => ({
+ array: jval(['123']),
+ experience: jval([{ establishment: 'New York Times' }]),
+ nicknames: jval(['Jenny']),
+ person_id: 1,
+ profile: jval({
+ auth: {
+ roles: ['admin'],
+ },
+ tags: ['important'],
+ }),
+ website: jval({ url: 'http://example.com' }),
+ record: jval({ key: 'value' }),
+ schedule: jval([
+ [
+ [
+ {
+ name: 'foo',
+ time: '2024-01-01T00:00:00.000Z',
+ },
+ ],
+ ],
+ ]),
+ ...overrides,
+ })
+
+ db.insertInto('person_metadata').values(getValues).execute()
+
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ array: null,
+ }),
+ )
+
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ array: eb.jval(null),
+ }),
+ )
+
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ array: sql.jval(null),
+ }),
+ )
+
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ website: sql.jval({ url: 'http://example.com' }),
+ }),
+ )
+
+ expectError(
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ array: ['123'], // expects `jval(Array | null)`
+ }),
+ ),
+ )
+
+ expectError(
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ array: eb.val(['123']), // expects `jval(Array | null)`
+ }),
+ ),
+ )
+
+ expectError(
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ array: eb.jval({}), // expects `jval(Array | null)`
+ }),
+ ),
+ )
+
+ expectError(
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ array: eb.jval([123]), // expects `jval(Array | null)`
+ }),
+ ),
+ )
+
+ expectError(
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ experience: [{ establishment: 'New York Times' }], // expects `jval({ establishment: string }[])`
+ }),
+ ),
+ )
+
+ expectError(
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ experience: eb.jval({ establishment: 'New York Times' }), // expects `jval({ establishment: string }[])`
+ }),
+ ),
+ )
+
+ expectError(
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ experience: eb.jval([{}]), // expects `jval({ establishment: string }[])`
+ }),
+ ),
+ )
+
+ expectError(
+ db.insertInto('person_metadata').values((eb) =>
+ getValues(eb, {
+ experience: eb.jval([{ establishment: 2 }]), // expects `jval({ establishment: string }[])`
+ }),
+ ),
+ )
+}